1 2 /* 3 Creates hypre ijmatrix from PETSc matrix 4 */ 5 6 #include <petscpkg_version.h> 7 #include <petsc/private/petschypre.h> 8 #include <petscmathypre.h> 9 #include <petsc/private/matimpl.h> 10 #include <../src/mat/impls/hypre/mhypre.h> 11 #include <../src/mat/impls/aij/mpi/mpiaij.h> 12 #include <../src/vec/vec/impls/hypre/vhyp.h> 13 #include <HYPRE.h> 14 #include <HYPRE_utilities.h> 15 #include <_hypre_parcsr_ls.h> 16 #include <_hypre_sstruct_ls.h> 17 18 #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0) 19 #define hypre_ParCSRMatrixClone(A,B) hypre_ParCSRMatrixCompleteClone(A) 20 #endif 21 22 PETSC_INTERN PetscErrorCode MatPtAP_IS_XAIJ(Mat,Mat,MatReuse,PetscReal,Mat*); 23 24 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*); 25 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix); 26 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix); 27 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix); 28 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,HYPRE_Complex,Vec,HYPRE_Complex,Vec,PetscBool); 29 static PetscErrorCode hypre_array_destroy(void*); 30 PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins); 31 32 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij) 33 { 34 PetscErrorCode ierr; 35 PetscInt i,n_d,n_o; 36 const PetscInt *ia_d,*ia_o; 37 PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE; 38 HYPRE_Int *nnz_d=NULL,*nnz_o=NULL; 39 40 PetscFunctionBegin; 41 if (A_d) { /* determine number of nonzero entries in local diagonal part */ 42 ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr); 43 if (done_d) { 44 ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr); 45 for (i=0; i<n_d; i++) { 46 nnz_d[i] = ia_d[i+1] - ia_d[i]; 47 } 48 } 49 ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr); 50 } 51 if (A_o) { /* determine number of nonzero entries in local off-diagonal part */ 52 ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 53 if (done_o) { 54 ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr); 55 for (i=0; i<n_o; i++) { 56 nnz_o[i] = ia_o[i+1] - ia_o[i]; 57 } 58 } 59 ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 60 } 61 if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */ 62 if (!done_o) { /* only diagonal part */ 63 ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr); 64 for (i=0; i<n_d; i++) { 65 nnz_o[i] = 0; 66 } 67 } 68 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 69 { /* If we don't do this, the columns of the matrix will be all zeros! */ 70 hypre_AuxParCSRMatrix *aux_matrix; 71 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 72 hypre_AuxParCSRMatrixDestroy(aux_matrix); 73 hypre_IJMatrixTranslator(ij) = NULL; 74 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o)); 75 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 76 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; 77 } 78 #else 79 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o)); 80 #endif 81 ierr = PetscFree(nnz_d);CHKERRQ(ierr); 82 ierr = PetscFree(nnz_o);CHKERRQ(ierr); 83 } 84 PetscFunctionReturn(0); 85 } 86 87 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA) 88 { 89 PetscErrorCode ierr; 90 PetscInt rstart,rend,cstart,cend; 91 92 PetscFunctionBegin; 93 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 94 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 95 rstart = A->rmap->rstart; 96 rend = A->rmap->rend; 97 cstart = A->cmap->rstart; 98 cend = A->cmap->rend; 99 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 100 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 101 { 102 PetscBool same; 103 Mat A_d,A_o; 104 const PetscInt *colmap; 105 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr); 106 if (same) { 107 ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 108 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 109 PetscFunctionReturn(0); 110 } 111 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr); 112 if (same) { 113 ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 114 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 115 PetscFunctionReturn(0); 116 } 117 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr); 118 if (same) { 119 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 120 PetscFunctionReturn(0); 121 } 122 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr); 123 if (same) { 124 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 125 PetscFunctionReturn(0); 126 } 127 } 128 PetscFunctionReturn(0); 129 } 130 131 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij) 132 { 133 PetscErrorCode ierr; 134 PetscInt i,rstart,rend,ncols,nr,nc; 135 const PetscScalar *values; 136 const PetscInt *cols; 137 PetscBool flg; 138 139 PetscFunctionBegin; 140 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 141 ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr); 142 if (flg && nr == nc) { 143 ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr); 144 PetscFunctionReturn(0); 145 } 146 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 147 if (flg) { 148 ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr); 149 PetscFunctionReturn(0); 150 } 151 152 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 153 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 154 for (i=rstart; i<rend; i++) { 155 ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 156 if (ncols) { 157 HYPRE_Int nc = (HYPRE_Int)ncols; 158 159 if ((PetscInt)nc != ncols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",ncols,i); 160 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,&nc,(HYPRE_BigInt *)&i,(HYPRE_BigInt *)cols,(HYPRE_Complex *)values)); 161 } 162 ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 163 } 164 PetscFunctionReturn(0); 165 } 166 167 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij) 168 { 169 PetscErrorCode ierr; 170 Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data; 171 HYPRE_Int type; 172 hypre_ParCSRMatrix *par_matrix; 173 hypre_AuxParCSRMatrix *aux_matrix; 174 hypre_CSRMatrix *hdiag; 175 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 176 177 PetscFunctionBegin; 178 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 179 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 180 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 181 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 182 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 183 /* 184 this is the Hack part where we monkey directly with the hypre datastructures 185 */ 186 if (sameint) { 187 ierr = PetscArraycpy(hdiag->i,pdiag->i,A->rmap->n + 1);CHKERRQ(ierr); 188 ierr = PetscArraycpy(hdiag->j,pdiag->j,pdiag->nz);CHKERRQ(ierr); 189 } else { 190 PetscInt i; 191 192 for (i=0;i<A->rmap->n + 1;i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i]; 193 for (i=0;i<pdiag->nz;i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i]; 194 } 195 ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr); 196 197 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 198 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 199 PetscFunctionReturn(0); 200 } 201 202 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij) 203 { 204 PetscErrorCode ierr; 205 Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data; 206 Mat_SeqAIJ *pdiag,*poffd; 207 PetscInt i,*garray = pA->garray,*jj,cstart,*pjj; 208 HYPRE_Int *hjj,type; 209 hypre_ParCSRMatrix *par_matrix; 210 hypre_AuxParCSRMatrix *aux_matrix; 211 hypre_CSRMatrix *hdiag,*hoffd; 212 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 213 214 PetscFunctionBegin; 215 pdiag = (Mat_SeqAIJ*) pA->A->data; 216 poffd = (Mat_SeqAIJ*) pA->B->data; 217 /* cstart is only valid for square MPIAIJ layed out in the usual way */ 218 ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr); 219 220 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 221 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 222 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 223 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 224 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 225 hoffd = hypre_ParCSRMatrixOffd(par_matrix); 226 227 /* 228 this is the Hack part where we monkey directly with the hypre datastructures 229 */ 230 if (sameint) { 231 ierr = PetscArraycpy(hdiag->i,pdiag->i,pA->A->rmap->n + 1);CHKERRQ(ierr); 232 } else { 233 for (i=0; i<pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]); 234 } 235 /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */ 236 hjj = hdiag->j; 237 pjj = pdiag->j; 238 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 239 for (i=0; i<pdiag->nz; i++) hjj[i] = pjj[i]; 240 #else 241 for (i=0; i<pdiag->nz; i++) hjj[i] = cstart + pjj[i]; 242 #endif 243 ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr); 244 if (sameint) { 245 ierr = PetscArraycpy(hoffd->i,poffd->i,pA->A->rmap->n + 1);CHKERRQ(ierr); 246 } else { 247 for (i=0; i<pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]); 248 } 249 250 /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this 251 If we hacked a hypre a bit more we might be able to avoid this step */ 252 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 253 PetscStackCallStandard(hypre_CSRMatrixBigInitialize,(hoffd)); 254 jj = (PetscInt*) hoffd->big_j; 255 #else 256 jj = (PetscInt*) hoffd->j; 257 #endif 258 pjj = poffd->j; 259 for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]]; 260 261 ierr = PetscArraycpy(hoffd->data,poffd->a,poffd->nz);CHKERRQ(ierr); 262 263 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 264 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 265 PetscFunctionReturn(0); 266 } 267 268 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B) 269 { 270 Mat_HYPRE* mhA = (Mat_HYPRE*)(A->data); 271 Mat lA; 272 ISLocalToGlobalMapping rl2g,cl2g; 273 IS is; 274 hypre_ParCSRMatrix *hA; 275 hypre_CSRMatrix *hdiag,*hoffd; 276 MPI_Comm comm; 277 HYPRE_Complex *hdd,*hod,*aa; 278 PetscScalar *data; 279 HYPRE_BigInt *col_map_offd; 280 HYPRE_Int *hdi,*hdj,*hoi,*hoj; 281 PetscInt *ii,*jj,*iptr,*jptr; 282 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N; 283 HYPRE_Int type; 284 PetscErrorCode ierr; 285 286 PetscFunctionBegin; 287 comm = PetscObjectComm((PetscObject)A); 288 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type)); 289 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 290 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA)); 291 M = hypre_ParCSRMatrixGlobalNumRows(hA); 292 N = hypre_ParCSRMatrixGlobalNumCols(hA); 293 str = hypre_ParCSRMatrixFirstRowIndex(hA); 294 stc = hypre_ParCSRMatrixFirstColDiag(hA); 295 hdiag = hypre_ParCSRMatrixDiag(hA); 296 hoffd = hypre_ParCSRMatrixOffd(hA); 297 dr = hypre_CSRMatrixNumRows(hdiag); 298 dc = hypre_CSRMatrixNumCols(hdiag); 299 nnz = hypre_CSRMatrixNumNonzeros(hdiag); 300 hdi = hypre_CSRMatrixI(hdiag); 301 hdj = hypre_CSRMatrixJ(hdiag); 302 hdd = hypre_CSRMatrixData(hdiag); 303 oc = hypre_CSRMatrixNumCols(hoffd); 304 nnz += hypre_CSRMatrixNumNonzeros(hoffd); 305 hoi = hypre_CSRMatrixI(hoffd); 306 hoj = hypre_CSRMatrixJ(hoffd); 307 hod = hypre_CSRMatrixData(hoffd); 308 if (reuse != MAT_REUSE_MATRIX) { 309 PetscInt *aux; 310 311 /* generate l2g maps for rows and cols */ 312 ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr); 313 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 314 ierr = ISDestroy(&is);CHKERRQ(ierr); 315 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA); 316 ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr); 317 for (i=0; i<dc; i++) aux[i] = i+stc; 318 for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i]; 319 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 320 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 321 ierr = ISDestroy(&is);CHKERRQ(ierr); 322 /* create MATIS object */ 323 ierr = MatCreate(comm,B);CHKERRQ(ierr); 324 ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr); 325 ierr = MatSetType(*B,MATIS);CHKERRQ(ierr); 326 ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr); 327 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 328 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 329 330 /* allocate CSR for local matrix */ 331 ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr); 332 ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr); 333 ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr); 334 } else { 335 PetscInt nr; 336 PetscBool done; 337 ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr); 338 ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr); 339 if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr); 340 if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz); 341 ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr); 342 } 343 /* merge local matrices */ 344 ii = iptr; 345 jj = jptr; 346 aa = (HYPRE_Complex*)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */ 347 *ii = *(hdi++) + *(hoi++); 348 for (jd=0,jo=0,cum=0; *ii<nnz; cum++) { 349 PetscScalar *aold = (PetscScalar*)aa; 350 PetscInt *jold = jj,nc = jd+jo; 351 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; } 352 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; } 353 *(++ii) = *(hdi++) + *(hoi++); 354 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr); 355 } 356 for (; cum<dr; cum++) *(++ii) = nnz; 357 if (reuse != MAT_REUSE_MATRIX) { 358 Mat_SeqAIJ* a; 359 360 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr); 361 ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr); 362 /* hack SeqAIJ */ 363 a = (Mat_SeqAIJ*)(lA->data); 364 a->free_a = PETSC_TRUE; 365 a->free_ij = PETSC_TRUE; 366 ierr = MatDestroy(&lA);CHKERRQ(ierr); 367 } 368 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 369 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 370 if (reuse == MAT_INPLACE_MATRIX) { 371 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 372 } 373 PetscFunctionReturn(0); 374 } 375 376 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B) 377 { 378 Mat M = NULL; 379 Mat_HYPRE *hB; 380 MPI_Comm comm = PetscObjectComm((PetscObject)A); 381 PetscErrorCode ierr; 382 383 PetscFunctionBegin; 384 if (reuse == MAT_REUSE_MATRIX) { 385 /* always destroy the old matrix and create a new memory; 386 hope this does not churn the memory too much. The problem 387 is I do not know if it is possible to put the matrix back to 388 its initial state so that we can directly copy the values 389 the second time through. */ 390 hB = (Mat_HYPRE*)((*B)->data); 391 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij)); 392 } else { 393 ierr = MatCreate(comm,&M);CHKERRQ(ierr); 394 ierr = MatSetType(M,MATHYPRE);CHKERRQ(ierr); 395 ierr = MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 396 hB = (Mat_HYPRE*)(M->data); 397 if (reuse == MAT_INITIAL_MATRIX) *B = M; 398 } 399 ierr = MatSetOption(*B,MAT_SORTED_FULL,PETSC_TRUE); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */ 400 ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr); 401 ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr); 402 if (reuse == MAT_INPLACE_MATRIX) { 403 ierr = MatHeaderReplace(A,&M);CHKERRQ(ierr); 404 } 405 (*B)->preallocated = PETSC_TRUE; 406 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 407 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 408 PetscFunctionReturn(0); 409 } 410 411 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B) 412 { 413 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 414 hypre_ParCSRMatrix *parcsr; 415 hypre_CSRMatrix *hdiag,*hoffd; 416 MPI_Comm comm; 417 PetscScalar *da,*oa,*aptr; 418 PetscInt *dii,*djj,*oii,*ojj,*iptr; 419 PetscInt i,dnnz,onnz,m,n; 420 HYPRE_Int type; 421 PetscMPIInt size; 422 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 423 PetscErrorCode ierr; 424 425 PetscFunctionBegin; 426 comm = PetscObjectComm((PetscObject)A); 427 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 428 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 429 if (reuse == MAT_REUSE_MATRIX) { 430 PetscBool ismpiaij,isseqaij; 431 ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 432 ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 433 if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported"); 434 } 435 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 436 437 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 438 hdiag = hypre_ParCSRMatrixDiag(parcsr); 439 hoffd = hypre_ParCSRMatrixOffd(parcsr); 440 m = hypre_CSRMatrixNumRows(hdiag); 441 n = hypre_CSRMatrixNumCols(hdiag); 442 dnnz = hypre_CSRMatrixNumNonzeros(hdiag); 443 onnz = hypre_CSRMatrixNumNonzeros(hoffd); 444 if (reuse == MAT_INITIAL_MATRIX) { 445 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 446 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 447 ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr); 448 } else if (reuse == MAT_REUSE_MATRIX) { 449 PetscInt nr; 450 PetscBool done; 451 if (size > 1) { 452 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 453 454 ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 455 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m); 456 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz); 457 ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr); 458 } else { 459 ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 460 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m); 461 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz); 462 ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr); 463 } 464 } else { /* MAT_INPLACE_MATRIX */ 465 if (!sameint) { 466 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 467 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 468 } else { 469 dii = (PetscInt*)hypre_CSRMatrixI(hdiag); 470 djj = (PetscInt*)hypre_CSRMatrixJ(hdiag); 471 } 472 da = (PetscScalar*)hypre_CSRMatrixData(hdiag); 473 } 474 475 if (!sameint) { 476 for (i=0;i<m+1;i++) dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]); 477 for (i=0;i<dnnz;i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]); 478 } else { 479 ierr = PetscArraycpy(dii,hypre_CSRMatrixI(hdiag),m+1);CHKERRQ(ierr); 480 ierr = PetscArraycpy(djj,hypre_CSRMatrixJ(hdiag),dnnz);CHKERRQ(ierr); 481 } 482 ierr = PetscArraycpy(da,hypre_CSRMatrixData(hdiag),dnnz);CHKERRQ(ierr); 483 iptr = djj; 484 aptr = da; 485 for (i=0; i<m; i++) { 486 PetscInt nc = dii[i+1]-dii[i]; 487 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 488 iptr += nc; 489 aptr += nc; 490 } 491 if (size > 1) { 492 HYPRE_BigInt *coffd; 493 HYPRE_Int *offdj; 494 495 if (reuse == MAT_INITIAL_MATRIX) { 496 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 497 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 498 ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr); 499 } else if (reuse == MAT_REUSE_MATRIX) { 500 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 501 PetscInt nr,hr = hypre_CSRMatrixNumRows(hoffd); 502 PetscBool done; 503 504 ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr); 505 if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr); 506 if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz); 507 ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr); 508 } else { /* MAT_INPLACE_MATRIX */ 509 if (!sameint) { 510 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 511 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 512 } else { 513 oii = (PetscInt*)hypre_CSRMatrixI(hoffd); 514 ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd); 515 } 516 oa = (PetscScalar*)hypre_CSRMatrixData(hoffd); 517 } 518 if (!sameint) { 519 for (i=0;i<m+1;i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]); 520 } else { 521 ierr = PetscArraycpy(oii,hypre_CSRMatrixI(hoffd),m+1);CHKERRQ(ierr); 522 } 523 offdj = hypre_CSRMatrixJ(hoffd); 524 coffd = hypre_ParCSRMatrixColMapOffd(parcsr); 525 for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]]; 526 ierr = PetscArraycpy(oa,hypre_CSRMatrixData(hoffd),onnz);CHKERRQ(ierr); 527 iptr = ojj; 528 aptr = oa; 529 for (i=0; i<m; i++) { 530 PetscInt nc = oii[i+1]-oii[i]; 531 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 532 iptr += nc; 533 aptr += nc; 534 } 535 if (reuse == MAT_INITIAL_MATRIX) { 536 Mat_MPIAIJ *b; 537 Mat_SeqAIJ *d,*o; 538 539 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr); 540 /* hack MPIAIJ */ 541 b = (Mat_MPIAIJ*)((*B)->data); 542 d = (Mat_SeqAIJ*)b->A->data; 543 o = (Mat_SeqAIJ*)b->B->data; 544 d->free_a = PETSC_TRUE; 545 d->free_ij = PETSC_TRUE; 546 o->free_a = PETSC_TRUE; 547 o->free_ij = PETSC_TRUE; 548 } else if (reuse == MAT_INPLACE_MATRIX) { 549 Mat T; 550 551 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr); 552 if (sameint) { /* ownership of CSR pointers is transferred to PETSc */ 553 hypre_CSRMatrixI(hdiag) = NULL; 554 hypre_CSRMatrixJ(hdiag) = NULL; 555 hypre_CSRMatrixI(hoffd) = NULL; 556 hypre_CSRMatrixJ(hoffd) = NULL; 557 } else { /* Hack MPIAIJ -> free ij but not a */ 558 Mat_MPIAIJ *b = (Mat_MPIAIJ*)(T->data); 559 Mat_SeqAIJ *d = (Mat_SeqAIJ*)(b->A->data); 560 Mat_SeqAIJ *o = (Mat_SeqAIJ*)(b->B->data); 561 562 d->free_ij = PETSC_TRUE; 563 o->free_ij = PETSC_TRUE; 564 } 565 hypre_CSRMatrixData(hdiag) = NULL; 566 hypre_CSRMatrixData(hoffd) = NULL; 567 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 568 } 569 } else { 570 oii = NULL; 571 ojj = NULL; 572 oa = NULL; 573 if (reuse == MAT_INITIAL_MATRIX) { 574 Mat_SeqAIJ* b; 575 576 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr); 577 /* hack SeqAIJ */ 578 b = (Mat_SeqAIJ*)((*B)->data); 579 b->free_a = PETSC_TRUE; 580 b->free_ij = PETSC_TRUE; 581 } else if (reuse == MAT_INPLACE_MATRIX) { 582 Mat T; 583 584 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr); 585 if (sameint) { /* ownership of CSR pointers is transferred to PETSc */ 586 hypre_CSRMatrixI(hdiag) = NULL; 587 hypre_CSRMatrixJ(hdiag) = NULL; 588 } else { /* free ij but not a */ 589 Mat_SeqAIJ* b = (Mat_SeqAIJ*)(T->data); 590 591 b->free_ij = PETSC_TRUE; 592 } 593 hypre_CSRMatrixData(hdiag) = NULL; 594 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 595 } 596 } 597 598 /* we have to use hypre_Tfree to free the HYPRE arrays 599 that PETSc now onws */ 600 if (reuse == MAT_INPLACE_MATRIX) { 601 PetscInt nh; 602 void *ptrs[6] = {da,oa,dii,djj,oii,ojj}; 603 const char *names[6] = {"_hypre_csr_da", 604 "_hypre_csr_oa", 605 "_hypre_csr_dii", 606 "_hypre_csr_djj", 607 "_hypre_csr_oii", 608 "_hypre_csr_ojj"}; 609 nh = sameint ? 6 : 2; 610 for (i=0; i<nh; i++) { 611 PetscContainer c; 612 613 ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr); 614 ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr); 615 ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr); 616 ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr); 617 ierr = PetscContainerDestroy(&c);CHKERRQ(ierr); 618 } 619 } 620 PetscFunctionReturn(0); 621 } 622 623 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 624 { 625 hypre_ParCSRMatrix *tA; 626 hypre_CSRMatrix *hdiag,*hoffd; 627 Mat_SeqAIJ *diag,*offd; 628 PetscInt *garray,i,noffd,dnnz,onnz,*row_starts,*col_starts; 629 MPI_Comm comm = PetscObjectComm((PetscObject)A); 630 PetscBool ismpiaij,isseqaij; 631 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 632 PetscErrorCode ierr; 633 634 PetscFunctionBegin; 635 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 636 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 637 if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type_name); 638 if (ismpiaij) { 639 Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data); 640 641 diag = (Mat_SeqAIJ*)a->A->data; 642 offd = (Mat_SeqAIJ*)a->B->data; 643 garray = a->garray; 644 noffd = a->B->cmap->N; 645 dnnz = diag->nz; 646 onnz = offd->nz; 647 } else { 648 diag = (Mat_SeqAIJ*)A->data; 649 offd = NULL; 650 garray = NULL; 651 noffd = 0; 652 dnnz = diag->nz; 653 onnz = 0; 654 } 655 656 /* create a temporary ParCSR */ 657 if (HYPRE_AssumedPartitionCheck()) { 658 PetscMPIInt myid; 659 660 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 661 row_starts = A->rmap->range + myid; 662 col_starts = A->cmap->range + myid; 663 } else { 664 row_starts = A->rmap->range; 665 col_starts = A->cmap->range; 666 } 667 tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_BigInt*)row_starts,(HYPRE_BigInt*)col_starts,noffd,dnnz,onnz); 668 hypre_ParCSRMatrixSetRowStartsOwner(tA,0); 669 hypre_ParCSRMatrixSetColStartsOwner(tA,0); 670 671 /* set diagonal part */ 672 hdiag = hypre_ParCSRMatrixDiag(tA); 673 if (sameint) { /* reuse CSR pointers */ 674 hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i; 675 hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j; 676 } else { /* malloc CSR pointers */ 677 HYPRE_Int *hi,*hj; 678 679 ierr = PetscMalloc2(A->rmap->n+1,&hi,dnnz,&hj);CHKERRQ(ierr); 680 for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(diag->i[i]); 681 for (i = 0; i < dnnz; i++) hj[i] = (HYPRE_Int)(diag->j[i]); 682 hypre_CSRMatrixI(hdiag) = hi; 683 hypre_CSRMatrixJ(hdiag) = hj; 684 } 685 hypre_CSRMatrixData(hdiag) = (HYPRE_Complex*)diag->a; 686 hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz; 687 hypre_CSRMatrixSetRownnz(hdiag); 688 hypre_CSRMatrixSetDataOwner(hdiag,0); 689 690 /* set offdiagonal part */ 691 hoffd = hypre_ParCSRMatrixOffd(tA); 692 if (offd) { 693 if (sameint) { /* reuse CSR pointers */ 694 hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i; 695 hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j; 696 } else { /* malloc CSR pointers */ 697 HYPRE_Int *hi,*hj; 698 699 ierr = PetscMalloc2(A->rmap->n+1,&hi,onnz,&hj);CHKERRQ(ierr); 700 for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(offd->i[i]); 701 for (i = 0; i < onnz; i++) hj[i] = (HYPRE_Int)(offd->j[i]); 702 hypre_CSRMatrixI(hoffd) = hi; 703 hypre_CSRMatrixJ(hoffd) = hj; 704 } 705 hypre_CSRMatrixData(hoffd) = (HYPRE_Complex*)offd->a; 706 hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz; 707 hypre_CSRMatrixSetRownnz(hoffd); 708 hypre_CSRMatrixSetDataOwner(hoffd,0); 709 hypre_ParCSRMatrixSetNumNonzeros(tA); 710 hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt*)garray; 711 } 712 *hA = tA; 713 PetscFunctionReturn(0); 714 } 715 716 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 717 { 718 hypre_CSRMatrix *hdiag,*hoffd; 719 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 720 PetscErrorCode ierr; 721 722 PetscFunctionBegin; 723 hdiag = hypre_ParCSRMatrixDiag(*hA); 724 hoffd = hypre_ParCSRMatrixOffd(*hA); 725 /* free temporary memory allocated by PETSc */ 726 if (!sameint) { 727 HYPRE_Int *hi,*hj; 728 729 hi = hypre_CSRMatrixI(hdiag); 730 hj = hypre_CSRMatrixJ(hdiag); 731 ierr = PetscFree2(hi,hj);CHKERRQ(ierr); 732 if (hoffd) { 733 hi = hypre_CSRMatrixI(hoffd); 734 hj = hypre_CSRMatrixJ(hoffd); 735 ierr = PetscFree2(hi,hj);CHKERRQ(ierr); 736 } 737 } 738 /* set pointers to NULL before destroying tA */ 739 hypre_CSRMatrixI(hdiag) = NULL; 740 hypre_CSRMatrixJ(hdiag) = NULL; 741 hypre_CSRMatrixData(hdiag) = NULL; 742 hypre_CSRMatrixI(hoffd) = NULL; 743 hypre_CSRMatrixJ(hoffd) = NULL; 744 hypre_CSRMatrixData(hoffd) = NULL; 745 hypre_ParCSRMatrixColMapOffd(*hA) = NULL; 746 hypre_ParCSRMatrixDestroy(*hA); 747 *hA = NULL; 748 PetscFunctionReturn(0); 749 } 750 751 /* calls RAP from BoomerAMG: 752 the resulting ParCSR will not own the column and row starts 753 It looks like we don't need to have the diagonal entries 754 ordered first in the rows of the diagonal part 755 for boomerAMGBuildCoarseOperator to work */ 756 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP) 757 { 758 HYPRE_Int P_owns_col_starts,R_owns_row_starts; 759 760 PetscFunctionBegin; 761 P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP); 762 R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR); 763 PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP)); 764 PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP)); 765 /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */ 766 hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0); 767 hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0); 768 if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1); 769 if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1); 770 PetscFunctionReturn(0); 771 } 772 773 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C) 774 { 775 Mat B; 776 hypre_ParCSRMatrix *hA,*hP,*hPtAP; 777 PetscErrorCode ierr; 778 779 PetscFunctionBegin; 780 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 781 ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr); 782 ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr); 783 ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 784 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 785 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 786 ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr); 787 PetscFunctionReturn(0); 788 } 789 790 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C) 791 { 792 PetscErrorCode ierr; 793 794 PetscFunctionBegin; 795 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 796 ierr = MatSetType(*C,MATAIJ);CHKERRQ(ierr); 797 (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE; 798 PetscFunctionReturn(0); 799 } 800 801 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C) 802 { 803 Mat B; 804 Mat_HYPRE *hP; 805 hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr; 806 HYPRE_Int type; 807 MPI_Comm comm = PetscObjectComm((PetscObject)A); 808 PetscBool ishypre; 809 PetscErrorCode ierr; 810 811 PetscFunctionBegin; 812 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 813 if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 814 hP = (Mat_HYPRE*)P->data; 815 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 816 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 817 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 818 819 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 820 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 821 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 822 823 /* create temporary matrix and merge to C */ 824 ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 825 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 826 PetscFunctionReturn(0); 827 } 828 829 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 830 { 831 PetscErrorCode ierr; 832 833 PetscFunctionBegin; 834 if (scall == MAT_INITIAL_MATRIX) { 835 const char *deft = MATAIJ; 836 char type[256]; 837 PetscBool flg; 838 839 ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr); 840 ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr); 841 ierr = PetscOptionsEnd();CHKERRQ(ierr); 842 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 843 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 844 if (flg) { 845 ierr = MatSetType(*C,type);CHKERRQ(ierr); 846 } else { 847 ierr = MatSetType(*C,deft);CHKERRQ(ierr); 848 } 849 (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE; 850 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 851 } 852 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 853 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 854 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 855 PetscFunctionReturn(0); 856 } 857 858 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C) 859 { 860 Mat B; 861 hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr; 862 Mat_HYPRE *hA,*hP; 863 PetscBool ishypre; 864 HYPRE_Int type; 865 PetscErrorCode ierr; 866 867 PetscFunctionBegin; 868 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 869 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 870 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 871 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 872 hA = (Mat_HYPRE*)A->data; 873 hP = (Mat_HYPRE*)P->data; 874 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 875 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 876 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 877 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 878 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 879 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 880 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 881 ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 882 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 883 PetscFunctionReturn(0); 884 } 885 886 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 887 { 888 PetscErrorCode ierr; 889 890 PetscFunctionBegin; 891 if (scall == MAT_INITIAL_MATRIX) { 892 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 893 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 894 ierr = MatSetType(*C,MATHYPRE);CHKERRQ(ierr); 895 (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE; 896 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 897 } 898 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 899 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 900 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 901 PetscFunctionReturn(0); 902 } 903 904 /* calls hypre_ParMatmul 905 hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA 906 hypre_ParMatrixCreate does not duplicate the communicator 907 It looks like we don't need to have the diagonal entries 908 ordered first in the rows of the diagonal part 909 for boomerAMGBuildCoarseOperator to work */ 910 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB) 911 { 912 PetscFunctionBegin; 913 PetscStackPush("hypre_ParMatmul"); 914 *hAB = hypre_ParMatmul(hA,hB); 915 PetscStackPop; 916 PetscFunctionReturn(0); 917 } 918 919 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C) 920 { 921 Mat D; 922 hypre_ParCSRMatrix *hA,*hB,*hAB = NULL; 923 PetscErrorCode ierr; 924 925 PetscFunctionBegin; 926 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 927 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 928 ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr); 929 ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 930 ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr); 931 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 932 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 933 PetscFunctionReturn(0); 934 } 935 936 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C) 937 { 938 PetscErrorCode ierr; 939 940 PetscFunctionBegin; 941 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 942 ierr = MatSetType(*C,MATAIJ);CHKERRQ(ierr); 943 (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE; 944 PetscFunctionReturn(0); 945 } 946 947 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C) 948 { 949 Mat D; 950 hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL; 951 Mat_HYPRE *hA,*hB; 952 PetscBool ishypre; 953 HYPRE_Int type; 954 PetscErrorCode ierr; 955 956 PetscFunctionBegin; 957 ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr); 958 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE); 959 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 960 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 961 hA = (Mat_HYPRE*)A->data; 962 hB = (Mat_HYPRE*)B->data; 963 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 964 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 965 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type)); 966 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 967 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 968 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr)); 969 ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr); 970 ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 971 /* need to use HeaderReplace because HeaderMerge messes up with the communicator */ 972 ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr); 973 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 974 PetscFunctionReturn(0); 975 } 976 977 static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C) 978 { 979 PetscErrorCode ierr; 980 981 PetscFunctionBegin; 982 if (scall == MAT_INITIAL_MATRIX) { 983 ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr); 984 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 985 ierr = MatSetType(*C,MATHYPRE);CHKERRQ(ierr); 986 (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 987 ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr); 988 } 989 ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr); 990 ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr); 991 ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr); 992 PetscFunctionReturn(0); 993 } 994 995 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D) 996 { 997 Mat E; 998 hypre_ParCSRMatrix *hA,*hB,*hC,*hABC; 999 PetscErrorCode ierr; 1000 1001 PetscFunctionBegin; 1002 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 1003 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 1004 ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr); 1005 ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr); 1006 ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr); 1007 ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr); 1008 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 1009 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 1010 ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr); 1011 PetscFunctionReturn(0); 1012 } 1013 1014 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D) 1015 { 1016 PetscErrorCode ierr; 1017 1018 PetscFunctionBegin; 1019 ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr); 1020 ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr); 1021 PetscFunctionReturn(0); 1022 } 1023 1024 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y) 1025 { 1026 PetscErrorCode ierr; 1027 1028 PetscFunctionBegin; 1029 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);CHKERRQ(ierr); 1030 PetscFunctionReturn(0); 1031 } 1032 1033 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y) 1034 { 1035 PetscErrorCode ierr; 1036 1037 PetscFunctionBegin; 1038 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);CHKERRQ(ierr); 1039 PetscFunctionReturn(0); 1040 } 1041 1042 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z) 1043 { 1044 PetscErrorCode ierr; 1045 1046 PetscFunctionBegin; 1047 if (y != z) { 1048 ierr = VecCopy(y,z);CHKERRQ(ierr); 1049 } 1050 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);CHKERRQ(ierr); 1051 PetscFunctionReturn(0); 1052 } 1053 1054 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z) 1055 { 1056 PetscErrorCode ierr; 1057 1058 PetscFunctionBegin; 1059 if (y != z) { 1060 ierr = VecCopy(y,z);CHKERRQ(ierr); 1061 } 1062 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);CHKERRQ(ierr); 1063 PetscFunctionReturn(0); 1064 } 1065 1066 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */ 1067 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans) 1068 { 1069 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1070 hypre_ParCSRMatrix *parcsr; 1071 hypre_ParVector *hx,*hy; 1072 HYPRE_Complex *ax,*ay,*sax,*say; 1073 PetscErrorCode ierr; 1074 1075 PetscFunctionBegin; 1076 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 1077 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx)); 1078 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy)); 1079 ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 1080 ierr = VecGetArray(y,(PetscScalar**)&ay);CHKERRQ(ierr); 1081 if (trans) { 1082 VecHYPRE_ParVectorReplacePointer(hA->x,ay,say); 1083 VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax); 1084 hypre_ParCSRMatrixMatvecT(a,parcsr,hy,b,hx); 1085 VecHYPRE_ParVectorReplacePointer(hA->x,say,ay); 1086 VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax); 1087 } else { 1088 VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax); 1089 VecHYPRE_ParVectorReplacePointer(hA->b,ay,say); 1090 hypre_ParCSRMatrixMatvec(a,parcsr,hx,b,hy); 1091 VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax); 1092 VecHYPRE_ParVectorReplacePointer(hA->b,say,ay); 1093 } 1094 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 1095 ierr = VecRestoreArray(y,(PetscScalar**)&ay);CHKERRQ(ierr); 1096 PetscFunctionReturn(0); 1097 } 1098 1099 static PetscErrorCode MatDestroy_HYPRE(Mat A) 1100 { 1101 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1102 PetscErrorCode ierr; 1103 1104 PetscFunctionBegin; 1105 if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x)); 1106 if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b)); 1107 if (hA->ij) { 1108 if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL; 1109 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij)); 1110 } 1111 if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); } 1112 1113 ierr = MatStashDestroy_Private(&A->stash);CHKERRQ(ierr); 1114 1115 ierr = PetscFree(hA->array);CHKERRQ(ierr); 1116 1117 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr); 1118 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr); 1119 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr); 1120 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaijmkl_hypre_C",NULL);CHKERRQ(ierr); 1121 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr); 1122 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr); 1123 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr); 1124 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_is_hypre_C",NULL);CHKERRQ(ierr); 1125 ierr = PetscFree(A->data);CHKERRQ(ierr); 1126 PetscFunctionReturn(0); 1127 } 1128 1129 static PetscErrorCode MatSetUp_HYPRE(Mat A) 1130 { 1131 PetscErrorCode ierr; 1132 1133 PetscFunctionBegin; 1134 ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr); 1135 PetscFunctionReturn(0); 1136 } 1137 1138 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode) 1139 { 1140 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1141 Vec x,b; 1142 PetscMPIInt n; 1143 PetscInt i,j,rstart,ncols,flg; 1144 PetscInt *row,*col; 1145 PetscScalar *val; 1146 PetscErrorCode ierr; 1147 1148 PetscFunctionBegin; 1149 if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE"); 1150 1151 if (!A->nooffprocentries) { 1152 while (1) { 1153 ierr = MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 1154 if (!flg) break; 1155 1156 for (i=0; i<n; ) { 1157 /* Now identify the consecutive vals belonging to the same row */ 1158 for (j=i,rstart=row[j]; j<n; j++) { 1159 if (row[j] != rstart) break; 1160 } 1161 if (j < n) ncols = j-i; 1162 else ncols = n-i; 1163 /* Now assemble all these values with a single function call */ 1164 ierr = MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);CHKERRQ(ierr); 1165 1166 i = j; 1167 } 1168 } 1169 ierr = MatStashScatterEnd_Private(&A->stash);CHKERRQ(ierr); 1170 } 1171 1172 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij)); 1173 /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */ 1174 /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */ 1175 if(!hA->sorted_full) { 1176 hypre_AuxParCSRMatrix *aux_matrix; 1177 1178 /* call destroy just to make sure we do not leak anything */ 1179 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1180 PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix)); 1181 hypre_IJMatrixTranslator(hA->ij) = NULL; 1182 1183 /* Initialize with assembled flag -> it only recreates the aux_par_matrix */ 1184 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1185 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1186 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */ 1187 PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix)); 1188 } 1189 if (hA->x) PetscFunctionReturn(0); 1190 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1191 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1192 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr); 1193 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr); 1194 ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr); 1195 ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr); 1196 ierr = VecDestroy(&x);CHKERRQ(ierr); 1197 ierr = VecDestroy(&b);CHKERRQ(ierr); 1198 PetscFunctionReturn(0); 1199 } 1200 1201 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array) 1202 { 1203 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1204 PetscErrorCode ierr; 1205 1206 PetscFunctionBegin; 1207 if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use"); 1208 1209 if (hA->size >= size) { 1210 *array = hA->array; 1211 } else { 1212 ierr = PetscFree(hA->array);CHKERRQ(ierr); 1213 hA->size = size; 1214 ierr = PetscMalloc(hA->size,&hA->array);CHKERRQ(ierr); 1215 *array = hA->array; 1216 } 1217 1218 hA->available = PETSC_FALSE; 1219 PetscFunctionReturn(0); 1220 } 1221 1222 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array) 1223 { 1224 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1225 1226 PetscFunctionBegin; 1227 *array = NULL; 1228 hA->available = PETSC_TRUE; 1229 PetscFunctionReturn(0); 1230 } 1231 1232 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins) 1233 { 1234 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1235 PetscScalar *vals = (PetscScalar *)v; 1236 HYPRE_Complex *sscr; 1237 PetscInt *cscr[2]; 1238 PetscInt i,nzc; 1239 void *array = NULL; 1240 PetscErrorCode ierr; 1241 1242 PetscFunctionBegin; 1243 ierr = MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(HYPRE_Complex)*nc*nr,&array);CHKERRQ(ierr); 1244 cscr[0] = (PetscInt*)array; 1245 cscr[1] = ((PetscInt*)array)+nc; 1246 sscr = (HYPRE_Complex*)(((PetscInt*)array)+nc*2); 1247 for (i=0,nzc=0;i<nc;i++) { 1248 if (cols[i] >= 0) { 1249 cscr[0][nzc ] = cols[i]; 1250 cscr[1][nzc++] = i; 1251 } 1252 } 1253 if (!nzc) { 1254 ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr); 1255 PetscFunctionReturn(0); 1256 } 1257 1258 if (ins == ADD_VALUES) { 1259 for (i=0;i<nr;i++) { 1260 if (rows[i] >= 0 && nzc) { 1261 PetscInt j; 1262 HYPRE_Int hnc = (HYPRE_Int)nzc; 1263 1264 if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]); 1265 for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); } 1266 PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr)); 1267 } 1268 vals += nc; 1269 } 1270 } else { /* INSERT_VALUES */ 1271 PetscInt rst,ren; 1272 1273 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1274 for (i=0;i<nr;i++) { 1275 if (rows[i] >= 0 && nzc) { 1276 PetscInt j; 1277 HYPRE_Int hnc = (HYPRE_Int)nzc; 1278 1279 if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]); 1280 for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); } 1281 /* nonlocal values */ 1282 if (rows[i] < rst || rows[i] >= ren) { ierr = MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],(PetscScalar*)sscr,PETSC_FALSE);CHKERRQ(ierr); } 1283 /* local values */ 1284 else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr)); 1285 } 1286 vals += nc; 1287 } 1288 } 1289 1290 ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr); 1291 PetscFunctionReturn(0); 1292 } 1293 1294 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1295 { 1296 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1297 HYPRE_Int *hdnnz,*honnz; 1298 PetscInt i,rs,re,cs,ce,bs; 1299 PetscMPIInt size; 1300 PetscErrorCode ierr; 1301 1302 PetscFunctionBegin; 1303 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1304 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1305 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1306 rs = A->rmap->rstart; 1307 re = A->rmap->rend; 1308 cs = A->cmap->rstart; 1309 ce = A->cmap->rend; 1310 if (!hA->ij) { 1311 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij)); 1312 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1313 } else { 1314 HYPRE_BigInt hrs,hre,hcs,hce; 1315 PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce)); 1316 if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%D)",hrs,hre+1,rs,re); 1317 if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%D)",hcs,hce+1,cs,ce); 1318 } 1319 if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs; 1320 if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs; 1321 1322 if (!dnnz) { 1323 ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr); 1324 for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz; 1325 } else { 1326 hdnnz = (HYPRE_Int*)dnnz; 1327 } 1328 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 1329 if (size > 1) { 1330 hypre_AuxParCSRMatrix *aux_matrix; 1331 if (!onnz) { 1332 ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr); 1333 for (i=0;i<A->rmap->n;i++) honnz[i] = onz; 1334 } else { 1335 honnz = (HYPRE_Int*)onnz; 1336 } 1337 /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems 1338 they assume the user will input the entire row values, properly sorted 1339 In PETSc, we don't make such an assumption and set this flag to 1, 1340 unless the option MAT_SORTED_FULL is set to true. 1341 Also, to avoid possible memory leaks, we destroy and recreate the translator 1342 This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize 1343 the IJ matrix for us */ 1344 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1345 hypre_AuxParCSRMatrixDestroy(aux_matrix); 1346 hypre_IJMatrixTranslator(hA->ij) = NULL; 1347 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz)); 1348 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1349 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !hA->sorted_full; 1350 } else { 1351 honnz = NULL; 1352 PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz)); 1353 } 1354 1355 /* reset assembled flag and call the initialize method */ 1356 hypre_IJMatrixAssembleFlag(hA->ij) = 0; 1357 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1358 if (!dnnz) { 1359 ierr = PetscFree(hdnnz);CHKERRQ(ierr); 1360 } 1361 if (!onnz && honnz) { 1362 ierr = PetscFree(honnz);CHKERRQ(ierr); 1363 } 1364 1365 /* Match AIJ logic */ 1366 A->preallocated = PETSC_TRUE; 1367 A->assembled = PETSC_FALSE; 1368 PetscFunctionReturn(0); 1369 } 1370 1371 /*@C 1372 MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format 1373 1374 Collective on Mat 1375 1376 Input Parameters: 1377 + A - the matrix 1378 . dnz - number of nonzeros per row in DIAGONAL portion of local submatrix 1379 (same value is used for all local rows) 1380 . dnnz - array containing the number of nonzeros in the various rows of the 1381 DIAGONAL portion of the local submatrix (possibly different for each row) 1382 or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure. 1383 The size of this array is equal to the number of local rows, i.e 'm'. 1384 For matrices that will be factored, you must leave room for (and set) 1385 the diagonal entry even if it is zero. 1386 . onz - number of nonzeros per row in the OFF-DIAGONAL portion of local 1387 submatrix (same value is used for all local rows). 1388 - onnz - array containing the number of nonzeros in the various rows of the 1389 OFF-DIAGONAL portion of the local submatrix (possibly different for 1390 each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero 1391 structure. The size of this array is equal to the number 1392 of local rows, i.e 'm'. 1393 1394 Notes: 1395 If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored. 1396 1397 Level: intermediate 1398 1399 .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE 1400 @*/ 1401 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1402 { 1403 PetscErrorCode ierr; 1404 1405 PetscFunctionBegin; 1406 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1407 PetscValidType(A,1); 1408 ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr); 1409 PetscFunctionReturn(0); 1410 } 1411 1412 /* 1413 MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix 1414 1415 Collective 1416 1417 Input Parameters: 1418 + parcsr - the pointer to the hypre_ParCSRMatrix 1419 . mtype - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported. 1420 - copymode - PETSc copying options 1421 1422 Output Parameter: 1423 . A - the matrix 1424 1425 Level: intermediate 1426 1427 .seealso: MatHYPRE, PetscCopyMode 1428 */ 1429 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A) 1430 { 1431 Mat T; 1432 Mat_HYPRE *hA; 1433 MPI_Comm comm; 1434 PetscInt rstart,rend,cstart,cend,M,N; 1435 PetscBool isseqaij,isseqaijmkl,ismpiaij,isaij,ishyp,isis; 1436 PetscErrorCode ierr; 1437 1438 PetscFunctionBegin; 1439 comm = hypre_ParCSRMatrixComm(parcsr); 1440 ierr = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 1441 ierr = PetscStrcmp(mtype,MATSEQAIJMKL,&isseqaijmkl);CHKERRQ(ierr); 1442 ierr = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 1443 ierr = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr); 1444 ierr = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr); 1445 ierr = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr); 1446 isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij); 1447 if (!isaij && !ishyp && !isis) SETERRQ7(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATSEQAIJMKL,MATMPIAIJ,MATIS,MATHYPRE); 1448 /* access ParCSRMatrix */ 1449 rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr); 1450 rend = hypre_ParCSRMatrixLastRowIndex(parcsr); 1451 cstart = hypre_ParCSRMatrixFirstColDiag(parcsr); 1452 cend = hypre_ParCSRMatrixLastColDiag(parcsr); 1453 M = hypre_ParCSRMatrixGlobalNumRows(parcsr); 1454 N = hypre_ParCSRMatrixGlobalNumCols(parcsr); 1455 1456 /* fix for empty local rows/columns */ 1457 if (rend < rstart) rend = rstart; 1458 if (cend < cstart) cend = cstart; 1459 1460 /* PETSc convention */ 1461 rend++; 1462 cend++; 1463 rend = PetscMin(rend,M); 1464 cend = PetscMin(cend,N); 1465 1466 /* create PETSc matrix with MatHYPRE */ 1467 ierr = MatCreate(comm,&T);CHKERRQ(ierr); 1468 ierr = MatSetSizes(T,rend-rstart,cend-cstart,M,N);CHKERRQ(ierr); 1469 ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr); 1470 hA = (Mat_HYPRE*)(T->data); 1471 1472 /* create HYPRE_IJMatrix */ 1473 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 1474 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1475 1476 /* create new ParCSR object if needed */ 1477 if (ishyp && copymode == PETSC_COPY_VALUES) { 1478 hypre_ParCSRMatrix *new_parcsr; 1479 hypre_CSRMatrix *hdiag,*hoffd,*ndiag,*noffd; 1480 1481 new_parcsr = hypre_ParCSRMatrixClone(parcsr,0); 1482 hdiag = hypre_ParCSRMatrixDiag(parcsr); 1483 hoffd = hypre_ParCSRMatrixOffd(parcsr); 1484 ndiag = hypre_ParCSRMatrixDiag(new_parcsr); 1485 noffd = hypre_ParCSRMatrixOffd(new_parcsr); 1486 ierr = PetscArraycpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag));CHKERRQ(ierr); 1487 ierr = PetscArraycpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd));CHKERRQ(ierr); 1488 parcsr = new_parcsr; 1489 copymode = PETSC_OWN_POINTER; 1490 } 1491 1492 /* set ParCSR object */ 1493 hypre_IJMatrixObject(hA->ij) = parcsr; 1494 T->preallocated = PETSC_TRUE; 1495 1496 /* set assembled flag */ 1497 hypre_IJMatrixAssembleFlag(hA->ij) = 1; 1498 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1499 if (ishyp) { 1500 PetscMPIInt myid = 0; 1501 1502 /* make sure we always have row_starts and col_starts available */ 1503 if (HYPRE_AssumedPartitionCheck()) { 1504 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 1505 } 1506 if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) { 1507 PetscLayout map; 1508 1509 ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr); 1510 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1511 hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid); 1512 } 1513 if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) { 1514 PetscLayout map; 1515 1516 ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr); 1517 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1518 hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid); 1519 } 1520 /* prevent from freeing the pointer */ 1521 if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE; 1522 *A = T; 1523 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1524 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1525 } else if (isaij) { 1526 if (copymode != PETSC_OWN_POINTER) { 1527 /* prevent from freeing the pointer */ 1528 hA->inner_free = PETSC_FALSE; 1529 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1530 ierr = MatDestroy(&T);CHKERRQ(ierr); 1531 } else { /* AIJ return type with PETSC_OWN_POINTER */ 1532 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr); 1533 *A = T; 1534 } 1535 } else if (isis) { 1536 ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1537 if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE; 1538 ierr = MatDestroy(&T);CHKERRQ(ierr); 1539 } 1540 PetscFunctionReturn(0); 1541 } 1542 1543 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr) 1544 { 1545 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1546 HYPRE_Int type; 1547 1548 PetscFunctionBegin; 1549 if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present"); 1550 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 1551 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR"); 1552 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr)); 1553 PetscFunctionReturn(0); 1554 } 1555 1556 /* 1557 MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix 1558 1559 Not collective 1560 1561 Input Parameters: 1562 + A - the MATHYPRE object 1563 1564 Output Parameter: 1565 . parcsr - the pointer to the hypre_ParCSRMatrix 1566 1567 Level: intermediate 1568 1569 .seealso: MatHYPRE, PetscCopyMode 1570 */ 1571 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr) 1572 { 1573 PetscErrorCode ierr; 1574 1575 PetscFunctionBegin; 1576 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1577 PetscValidType(A,1); 1578 ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr); 1579 PetscFunctionReturn(0); 1580 } 1581 1582 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd) 1583 { 1584 hypre_ParCSRMatrix *parcsr; 1585 hypre_CSRMatrix *ha; 1586 PetscInt rst; 1587 PetscErrorCode ierr; 1588 1589 PetscFunctionBegin; 1590 if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks"); 1591 ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr); 1592 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1593 if (missing) *missing = PETSC_FALSE; 1594 if (dd) *dd = -1; 1595 ha = hypre_ParCSRMatrixDiag(parcsr); 1596 if (ha) { 1597 PetscInt size,i; 1598 HYPRE_Int *ii,*jj; 1599 1600 size = hypre_CSRMatrixNumRows(ha); 1601 ii = hypre_CSRMatrixI(ha); 1602 jj = hypre_CSRMatrixJ(ha); 1603 for (i = 0; i < size; i++) { 1604 PetscInt j; 1605 PetscBool found = PETSC_FALSE; 1606 1607 for (j = ii[i]; j < ii[i+1] && !found; j++) 1608 found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE; 1609 1610 if (!found) { 1611 PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i); 1612 if (missing) *missing = PETSC_TRUE; 1613 if (dd) *dd = i+rst; 1614 PetscFunctionReturn(0); 1615 } 1616 } 1617 if (!size) { 1618 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1619 if (missing) *missing = PETSC_TRUE; 1620 if (dd) *dd = rst; 1621 } 1622 } else { 1623 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1624 if (missing) *missing = PETSC_TRUE; 1625 if (dd) *dd = rst; 1626 } 1627 PetscFunctionReturn(0); 1628 } 1629 1630 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s) 1631 { 1632 hypre_ParCSRMatrix *parcsr; 1633 hypre_CSRMatrix *ha; 1634 PetscErrorCode ierr; 1635 HYPRE_Complex hs; 1636 1637 PetscFunctionBegin; 1638 ierr = PetscHYPREScalarCast(s,&hs);CHKERRQ(ierr); 1639 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1640 /* diagonal part */ 1641 ha = hypre_ParCSRMatrixDiag(parcsr); 1642 if (ha) { 1643 PetscInt size,i; 1644 HYPRE_Int *ii; 1645 HYPRE_Complex *a; 1646 1647 size = hypre_CSRMatrixNumRows(ha); 1648 a = hypre_CSRMatrixData(ha); 1649 ii = hypre_CSRMatrixI(ha); 1650 for (i = 0; i < ii[size]; i++) a[i] *= hs; 1651 } 1652 /* offdiagonal part */ 1653 ha = hypre_ParCSRMatrixOffd(parcsr); 1654 if (ha) { 1655 PetscInt size,i; 1656 HYPRE_Int *ii; 1657 HYPRE_Complex *a; 1658 1659 size = hypre_CSRMatrixNumRows(ha); 1660 a = hypre_CSRMatrixData(ha); 1661 ii = hypre_CSRMatrixI(ha); 1662 for (i = 0; i < ii[size]; i++) a[i] *= hs; 1663 } 1664 PetscFunctionReturn(0); 1665 } 1666 1667 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 1668 { 1669 hypre_ParCSRMatrix *parcsr; 1670 HYPRE_Int *lrows; 1671 PetscInt rst,ren,i; 1672 PetscErrorCode ierr; 1673 1674 PetscFunctionBegin; 1675 if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented"); 1676 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1677 ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr); 1678 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1679 for (i=0;i<numRows;i++) { 1680 if (rows[i] < rst || rows[i] >= ren) 1681 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported"); 1682 lrows[i] = rows[i] - rst; 1683 } 1684 PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows)); 1685 ierr = PetscFree(lrows);CHKERRQ(ierr); 1686 PetscFunctionReturn(0); 1687 } 1688 1689 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha) 1690 { 1691 PetscErrorCode ierr; 1692 1693 PetscFunctionBegin; 1694 if (ha) { 1695 HYPRE_Int *ii, size; 1696 HYPRE_Complex *a; 1697 1698 size = hypre_CSRMatrixNumRows(ha); 1699 a = hypre_CSRMatrixData(ha); 1700 ii = hypre_CSRMatrixI(ha); 1701 1702 if (a) {ierr = PetscArrayzero(a,ii[size]);CHKERRQ(ierr);} 1703 } 1704 PetscFunctionReturn(0); 1705 } 1706 1707 PetscErrorCode MatZeroEntries_HYPRE(Mat A) 1708 { 1709 hypre_ParCSRMatrix *parcsr; 1710 PetscErrorCode ierr; 1711 1712 PetscFunctionBegin; 1713 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1714 /* diagonal part */ 1715 ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));CHKERRQ(ierr); 1716 /* off-diagonal part */ 1717 ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));CHKERRQ(ierr); 1718 PetscFunctionReturn(0); 1719 } 1720 1721 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],HYPRE_Complex diag) 1722 { 1723 PetscInt ii; 1724 HYPRE_Int *i, *j; 1725 HYPRE_Complex *a; 1726 1727 PetscFunctionBegin; 1728 if (!hA) PetscFunctionReturn(0); 1729 1730 i = hypre_CSRMatrixI(hA); 1731 j = hypre_CSRMatrixJ(hA); 1732 a = hypre_CSRMatrixData(hA); 1733 1734 for (ii = 0; ii < N; ii++) { 1735 HYPRE_Int jj, ibeg, iend, irow; 1736 1737 irow = rows[ii]; 1738 ibeg = i[irow]; 1739 iend = i[irow+1]; 1740 for (jj = ibeg; jj < iend; jj++) 1741 if (j[jj] == irow) a[jj] = diag; 1742 else a[jj] = 0.0; 1743 } 1744 PetscFunctionReturn(0); 1745 } 1746 1747 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1748 { 1749 hypre_ParCSRMatrix *parcsr; 1750 PetscInt *lrows,len; 1751 HYPRE_Complex hdiag; 1752 PetscErrorCode ierr; 1753 1754 PetscFunctionBegin; 1755 if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size"); 1756 ierr = PetscHYPREScalarCast(diag,&hdiag);CHKERRQ(ierr); 1757 /* retrieve the internal matrix */ 1758 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1759 /* get locally owned rows */ 1760 ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr); 1761 /* zero diagonal part */ 1762 ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,hdiag);CHKERRQ(ierr); 1763 /* zero off-diagonal part */ 1764 ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);CHKERRQ(ierr); 1765 1766 ierr = PetscFree(lrows);CHKERRQ(ierr); 1767 PetscFunctionReturn(0); 1768 } 1769 1770 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode) 1771 { 1772 PetscErrorCode ierr; 1773 1774 PetscFunctionBegin; 1775 if (mat->nooffprocentries) PetscFunctionReturn(0); 1776 1777 ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 1778 PetscFunctionReturn(0); 1779 } 1780 1781 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1782 { 1783 hypre_ParCSRMatrix *parcsr; 1784 HYPRE_Int hnz; 1785 PetscErrorCode ierr; 1786 1787 PetscFunctionBegin; 1788 /* retrieve the internal matrix */ 1789 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1790 /* call HYPRE API */ 1791 PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v)); 1792 if (nz) *nz = (PetscInt)hnz; 1793 PetscFunctionReturn(0); 1794 } 1795 1796 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1797 { 1798 hypre_ParCSRMatrix *parcsr; 1799 HYPRE_Int hnz; 1800 PetscErrorCode ierr; 1801 1802 PetscFunctionBegin; 1803 /* retrieve the internal matrix */ 1804 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1805 /* call HYPRE API */ 1806 hnz = nz ? (HYPRE_Int)(*nz) : 0; 1807 PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v)); 1808 PetscFunctionReturn(0); 1809 } 1810 1811 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 1812 { 1813 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1814 PetscInt i; 1815 1816 PetscFunctionBegin; 1817 if (!m || !n) PetscFunctionReturn(0); 1818 /* Ignore negative row indices 1819 * And negative column indices should be automatically ignored in hypre 1820 * */ 1821 for (i=0; i<m; i++) { 1822 if (idxm[i] >= 0) { 1823 HYPRE_Int hn = (HYPRE_Int)n; 1824 PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,&hn,(HYPRE_BigInt*)&idxm[i],(HYPRE_BigInt*)idxn,(HYPRE_Complex*)(v + i*n))); 1825 } 1826 } 1827 PetscFunctionReturn(0); 1828 } 1829 1830 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg) 1831 { 1832 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1833 1834 PetscFunctionBegin; 1835 switch (op) { 1836 case MAT_NO_OFF_PROC_ENTRIES: 1837 if (flg) { 1838 PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0)); 1839 } 1840 break; 1841 case MAT_SORTED_FULL: 1842 hA->sorted_full = flg; 1843 break; 1844 default: 1845 break; 1846 } 1847 PetscFunctionReturn(0); 1848 } 1849 1850 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view) 1851 { 1852 hypre_ParCSRMatrix *parcsr; 1853 PetscErrorCode ierr; 1854 Mat B; 1855 PetscViewerFormat format; 1856 PetscErrorCode (*mview)(Mat,PetscViewer) = NULL; 1857 1858 PetscFunctionBegin; 1859 ierr = PetscViewerGetFormat(view,&format);CHKERRQ(ierr); 1860 if (format != PETSC_VIEWER_NATIVE) { 1861 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1862 ierr = MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);CHKERRQ(ierr); 1863 ierr = MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);CHKERRQ(ierr); 1864 if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");CHKERRQ(ierr); 1865 ierr = (*mview)(B,view);CHKERRQ(ierr); 1866 ierr = MatDestroy(&B);CHKERRQ(ierr); 1867 } else { 1868 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1869 PetscMPIInt size; 1870 PetscBool isascii; 1871 const char *filename; 1872 1873 /* HYPRE uses only text files */ 1874 ierr = PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr); 1875 if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name); 1876 ierr = PetscViewerFileGetName(view,&filename);CHKERRQ(ierr); 1877 PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename)); 1878 ierr = MPI_Comm_size(hA->comm,&size);CHKERRQ(ierr); 1879 if (size > 1) { 1880 ierr = PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);CHKERRQ(ierr); 1881 } else { 1882 ierr = PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);CHKERRQ(ierr); 1883 } 1884 } 1885 PetscFunctionReturn(0); 1886 } 1887 1888 static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B) 1889 { 1890 hypre_ParCSRMatrix *parcsr; 1891 PetscErrorCode ierr; 1892 PetscCopyMode cpmode; 1893 1894 PetscFunctionBegin; 1895 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1896 if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) { 1897 parcsr = hypre_ParCSRMatrixClone(parcsr,0); 1898 cpmode = PETSC_OWN_POINTER; 1899 } else { 1900 cpmode = PETSC_COPY_VALUES; 1901 } 1902 ierr = MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);CHKERRQ(ierr); 1903 PetscFunctionReturn(0); 1904 } 1905 1906 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str) 1907 { 1908 hypre_ParCSRMatrix *acsr,*bcsr; 1909 PetscErrorCode ierr; 1910 1911 PetscFunctionBegin; 1912 if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) { 1913 ierr = MatHYPREGetParCSR_HYPRE(A,&acsr);CHKERRQ(ierr); 1914 ierr = MatHYPREGetParCSR_HYPRE(B,&bcsr);CHKERRQ(ierr); 1915 PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1)); 1916 ierr = MatSetOption(B,MAT_SORTED_FULL,PETSC_TRUE); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */ 1917 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1918 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1919 } else { 1920 ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 1921 } 1922 PetscFunctionReturn(0); 1923 } 1924 1925 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d) 1926 { 1927 hypre_ParCSRMatrix *parcsr; 1928 hypre_CSRMatrix *dmat; 1929 HYPRE_Complex *a; 1930 HYPRE_Complex *data = NULL; 1931 HYPRE_Int *diag = NULL; 1932 PetscInt i; 1933 PetscBool cong; 1934 PetscErrorCode ierr; 1935 1936 PetscFunctionBegin; 1937 ierr = MatHasCongruentLayouts(A,&cong);CHKERRQ(ierr); 1938 if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns"); 1939 #if defined(PETSC_USE_DEBUG) 1940 { 1941 PetscBool miss; 1942 ierr = MatMissingDiagonal(A,&miss,NULL);CHKERRQ(ierr); 1943 if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing"); 1944 } 1945 #endif 1946 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1947 dmat = hypre_ParCSRMatrixDiag(parcsr); 1948 if (dmat) { 1949 /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */ 1950 ierr = VecGetArray(d,(PetscScalar**)&a);CHKERRQ(ierr); 1951 diag = hypre_CSRMatrixI(dmat); 1952 data = hypre_CSRMatrixData(dmat); 1953 for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]]; 1954 ierr = VecRestoreArray(d,(PetscScalar**)&a);CHKERRQ(ierr); 1955 } 1956 PetscFunctionReturn(0); 1957 } 1958 1959 #include <petscblaslapack.h> 1960 1961 static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str) 1962 { 1963 PetscErrorCode ierr; 1964 1965 PetscFunctionBegin; 1966 if (str == SAME_NONZERO_PATTERN) { 1967 hypre_ParCSRMatrix *x,*y; 1968 hypre_CSRMatrix *xloc,*yloc; 1969 PetscInt xnnz,ynnz; 1970 HYPRE_Complex *xarr,*yarr; 1971 PetscBLASInt one=1,bnz; 1972 1973 ierr = MatHYPREGetParCSR(Y,&y);CHKERRQ(ierr); 1974 ierr = MatHYPREGetParCSR(X,&x);CHKERRQ(ierr); 1975 1976 /* diagonal block */ 1977 xloc = hypre_ParCSRMatrixDiag(x); 1978 yloc = hypre_ParCSRMatrixDiag(y); 1979 xnnz = 0; 1980 ynnz = 0; 1981 xarr = NULL; 1982 yarr = NULL; 1983 if (xloc) { 1984 xarr = hypre_CSRMatrixData(xloc); 1985 xnnz = hypre_CSRMatrixNumNonzeros(xloc); 1986 } 1987 if (yloc) { 1988 yarr = hypre_CSRMatrixData(yloc); 1989 ynnz = hypre_CSRMatrixNumNonzeros(yloc); 1990 } 1991 if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz); 1992 ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr); 1993 PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one)); 1994 1995 /* off-diagonal block */ 1996 xloc = hypre_ParCSRMatrixOffd(x); 1997 yloc = hypre_ParCSRMatrixOffd(y); 1998 xnnz = 0; 1999 ynnz = 0; 2000 xarr = NULL; 2001 yarr = NULL; 2002 if (xloc) { 2003 xarr = hypre_CSRMatrixData(xloc); 2004 xnnz = hypre_CSRMatrixNumNonzeros(xloc); 2005 } 2006 if (yloc) { 2007 yarr = hypre_CSRMatrixData(yloc); 2008 ynnz = hypre_CSRMatrixNumNonzeros(yloc); 2009 } 2010 if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz); 2011 ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr); 2012 PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one)); 2013 } else if (str == SUBSET_NONZERO_PATTERN) { 2014 ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr); 2015 } else { 2016 Mat B; 2017 2018 ierr = MatAXPY_Basic_Preallocate(Y,X,&B);CHKERRQ(ierr); 2019 ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr); 2020 ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr); 2021 } 2022 PetscFunctionReturn(0); 2023 } 2024 2025 /*MC 2026 MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices 2027 based on the hypre IJ interface. 2028 2029 Level: intermediate 2030 2031 .seealso: MatCreate() 2032 M*/ 2033 2034 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B) 2035 { 2036 Mat_HYPRE *hB; 2037 PetscErrorCode ierr; 2038 2039 PetscFunctionBegin; 2040 ierr = PetscNewLog(B,&hB);CHKERRQ(ierr); 2041 hB->inner_free = PETSC_TRUE; 2042 hB->available = PETSC_TRUE; 2043 hB->sorted_full= PETSC_FALSE; /* no assumption whether column indices are sorted or not */ 2044 hB->size = 0; 2045 hB->array = NULL; 2046 2047 B->data = (void*)hB; 2048 B->rmap->bs = 1; 2049 B->assembled = PETSC_FALSE; 2050 2051 ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 2052 B->ops->mult = MatMult_HYPRE; 2053 B->ops->multtranspose = MatMultTranspose_HYPRE; 2054 B->ops->multadd = MatMultAdd_HYPRE; 2055 B->ops->multtransposeadd = MatMultTransposeAdd_HYPRE; 2056 B->ops->setup = MatSetUp_HYPRE; 2057 B->ops->destroy = MatDestroy_HYPRE; 2058 B->ops->assemblyend = MatAssemblyEnd_HYPRE; 2059 B->ops->assemblybegin = MatAssemblyBegin_HYPRE; 2060 B->ops->ptap = MatPtAP_HYPRE_HYPRE; 2061 B->ops->matmult = MatMatMult_HYPRE_HYPRE; 2062 B->ops->setvalues = MatSetValues_HYPRE; 2063 B->ops->missingdiagonal = MatMissingDiagonal_HYPRE; 2064 B->ops->scale = MatScale_HYPRE; 2065 B->ops->zerorowscolumns = MatZeroRowsColumns_HYPRE; 2066 B->ops->zeroentries = MatZeroEntries_HYPRE; 2067 B->ops->zerorows = MatZeroRows_HYPRE; 2068 B->ops->getrow = MatGetRow_HYPRE; 2069 B->ops->restorerow = MatRestoreRow_HYPRE; 2070 B->ops->getvalues = MatGetValues_HYPRE; 2071 B->ops->setoption = MatSetOption_HYPRE; 2072 B->ops->duplicate = MatDuplicate_HYPRE; 2073 B->ops->copy = MatCopy_HYPRE; 2074 B->ops->view = MatView_HYPRE; 2075 B->ops->getdiagonal = MatGetDiagonal_HYPRE; 2076 B->ops->axpy = MatAXPY_HYPRE; 2077 2078 /* build cache for off array entries formed */ 2079 ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr); 2080 2081 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr); 2082 ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr); 2083 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr); 2084 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr); 2085 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 2086 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaijmkl_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 2087 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 2088 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr); 2089 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr); 2090 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_is_hypre_C",MatPtAP_IS_XAIJ);CHKERRQ(ierr); 2091 PetscFunctionReturn(0); 2092 } 2093 2094 static PetscErrorCode hypre_array_destroy(void *ptr) 2095 { 2096 PetscFunctionBegin; 2097 hypre_TFree(ptr,HYPRE_MEMORY_HOST); 2098 PetscFunctionReturn(0); 2099 } 2100