1 2 /* 3 Creates hypre ijmatrix from PETSc matrix 4 */ 5 #include <petscsys.h> 6 #include <petsc/private/matimpl.h> 7 #include <../src/mat/impls/hypre/mhypre.h> 8 #include <../src/mat/impls/aij/mpi/mpiaij.h> 9 #include <../src/vec/vec/impls/hypre/vhyp.h> 10 #include <HYPRE.h> 11 #include <HYPRE_utilities.h> 12 #include <_hypre_parcsr_ls.h> 13 14 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*); 15 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix); 16 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix); 17 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix); 18 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool); 19 20 #undef __FUNCT__ 21 #define __FUNCT__ "MatHYPRE_IJMatrixPreallocate" 22 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij) 23 { 24 PetscErrorCode ierr; 25 PetscInt i,n_d,n_o; 26 const PetscInt *ia_d,*ia_o; 27 PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE; 28 PetscInt *nnz_d=NULL,*nnz_o=NULL; 29 30 PetscFunctionBegin; 31 if (A_d) { /* determine number of nonzero entries in local diagonal part */ 32 ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr); 33 if (done_d) { 34 ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr); 35 for (i=0; i<n_d; i++) { 36 nnz_d[i] = ia_d[i+1] - ia_d[i]; 37 } 38 } 39 ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr); 40 } 41 if (A_o) { /* determine number of nonzero entries in local off-diagonal part */ 42 ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 43 if (done_o) { 44 ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr); 45 for (i=0; i<n_o; i++) { 46 nnz_o[i] = ia_o[i+1] - ia_o[i]; 47 } 48 } 49 ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 50 } 51 if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */ 52 if (!done_o) { /* only diagonal part */ 53 ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr); 54 for (i=0; i<n_d; i++) { 55 nnz_o[i] = 0; 56 } 57 } 58 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o)); 59 ierr = PetscFree(nnz_d);CHKERRQ(ierr); 60 ierr = PetscFree(nnz_o);CHKERRQ(ierr); 61 } 62 PetscFunctionReturn(0); 63 } 64 65 #undef __FUNCT__ 66 #define __FUNCT__ "MatHYPRE_CreateFromMat" 67 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA) 68 { 69 PetscErrorCode ierr; 70 PetscInt rstart,rend,cstart,cend; 71 72 PetscFunctionBegin; 73 ierr = MatSetUp(A);CHKERRQ(ierr); 74 rstart = A->rmap->rstart; 75 rend = A->rmap->rend; 76 cstart = A->cmap->rstart; 77 cend = A->cmap->rend; 78 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 79 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 80 { 81 PetscBool same; 82 Mat A_d,A_o; 83 const PetscInt *colmap; 84 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr); 85 if (same) { 86 ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 87 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 88 PetscFunctionReturn(0); 89 } 90 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr); 91 if (same) { 92 ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 93 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 94 PetscFunctionReturn(0); 95 } 96 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr); 97 if (same) { 98 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 99 PetscFunctionReturn(0); 100 } 101 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr); 102 if (same) { 103 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 104 PetscFunctionReturn(0); 105 } 106 } 107 PetscFunctionReturn(0); 108 } 109 110 #undef __FUNCT__ 111 #define __FUNCT__ "MatHYPRE_IJMatrixCopy" 112 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij) 113 { 114 PetscErrorCode ierr; 115 PetscInt i,rstart,rend,ncols,nr,nc; 116 const PetscScalar *values; 117 const PetscInt *cols; 118 PetscBool flg; 119 120 PetscFunctionBegin; 121 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 122 ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr); 123 if (flg && nr == nc) { 124 ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr); 125 PetscFunctionReturn(0); 126 } 127 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 128 if (flg) { 129 ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr); 130 PetscFunctionReturn(0); 131 } 132 133 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 134 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 135 for (i=rstart; i<rend; i++) { 136 ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 137 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values)); 138 ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 139 } 140 PetscFunctionReturn(0); 141 } 142 143 #undef __FUNCT__ 144 #define __FUNCT__ "MatHYPRE_IJMatrixFastCopy_SeqAIJ" 145 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij) 146 { 147 PetscErrorCode ierr; 148 Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data; 149 HYPRE_Int type; 150 hypre_ParCSRMatrix *par_matrix; 151 hypre_AuxParCSRMatrix *aux_matrix; 152 hypre_CSRMatrix *hdiag; 153 154 PetscFunctionBegin; 155 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 156 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 157 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 158 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 159 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 160 /* 161 this is the Hack part where we monkey directly with the hypre datastructures 162 */ 163 ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt)); 164 ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt)); 165 ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar)); 166 167 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 168 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 169 PetscFunctionReturn(0); 170 } 171 172 #undef __FUNCT__ 173 #define __FUNCT__ "MatHYPRE_IJMatrixFastCopy_MPIAIJ" 174 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij) 175 { 176 PetscErrorCode ierr; 177 Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data; 178 Mat_SeqAIJ *pdiag,*poffd; 179 PetscInt i,*garray = pA->garray,*jj,cstart,*pjj; 180 HYPRE_Int type; 181 hypre_ParCSRMatrix *par_matrix; 182 hypre_AuxParCSRMatrix *aux_matrix; 183 hypre_CSRMatrix *hdiag,*hoffd; 184 185 PetscFunctionBegin; 186 pdiag = (Mat_SeqAIJ*) pA->A->data; 187 poffd = (Mat_SeqAIJ*) pA->B->data; 188 /* cstart is only valid for square MPIAIJ layed out in the usual way */ 189 ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr); 190 191 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 192 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 193 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 194 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 195 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 196 hoffd = hypre_ParCSRMatrixOffd(par_matrix); 197 198 /* 199 this is the Hack part where we monkey directly with the hypre datastructures 200 */ 201 ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt)); 202 /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */ 203 jj = (PetscInt*)hdiag->j; 204 pjj = (PetscInt*)pdiag->j; 205 for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i]; 206 ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar)); 207 ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt)); 208 /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this 209 If we hacked a hypre a bit more we might be able to avoid this step */ 210 jj = (PetscInt*) hoffd->j; 211 pjj = (PetscInt*) poffd->j; 212 for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]]; 213 ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar)); 214 215 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 216 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 217 PetscFunctionReturn(0); 218 } 219 220 #undef __FUNCT__ 221 #define __FUNCT__ "MatConvert_HYPRE_IS" 222 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B) 223 { 224 Mat_HYPRE* mhA = (Mat_HYPRE*)(A->data); 225 Mat lA; 226 ISLocalToGlobalMapping rl2g,cl2g; 227 IS is; 228 hypre_ParCSRMatrix *hA; 229 hypre_CSRMatrix *hdiag,*hoffd; 230 MPI_Comm comm; 231 PetscScalar *hdd,*hod,*aa,*data; 232 PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj; 233 PetscInt *ii,*jj,*iptr,*jptr; 234 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N; 235 HYPRE_Int type; 236 PetscErrorCode ierr; 237 238 PetscFunctionBegin; 239 comm = PetscObjectComm((PetscObject)A); 240 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type)); 241 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 242 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA)); 243 M = hypre_ParCSRMatrixGlobalNumRows(hA); 244 N = hypre_ParCSRMatrixGlobalNumCols(hA); 245 str = hypre_ParCSRMatrixFirstRowIndex(hA); 246 stc = hypre_ParCSRMatrixFirstColDiag(hA); 247 hdiag = hypre_ParCSRMatrixDiag(hA); 248 hoffd = hypre_ParCSRMatrixOffd(hA); 249 dr = hypre_CSRMatrixNumRows(hdiag); 250 dc = hypre_CSRMatrixNumCols(hdiag); 251 nnz = hypre_CSRMatrixNumNonzeros(hdiag); 252 hdi = hypre_CSRMatrixI(hdiag); 253 hdj = hypre_CSRMatrixJ(hdiag); 254 hdd = hypre_CSRMatrixData(hdiag); 255 oc = hypre_CSRMatrixNumCols(hoffd); 256 nnz += hypre_CSRMatrixNumNonzeros(hoffd); 257 hoi = hypre_CSRMatrixI(hoffd); 258 hoj = hypre_CSRMatrixJ(hoffd); 259 hod = hypre_CSRMatrixData(hoffd); 260 if (reuse != MAT_REUSE_MATRIX) { 261 PetscInt *aux; 262 263 /* generate l2g maps for rows and cols */ 264 ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr); 265 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 266 ierr = ISDestroy(&is);CHKERRQ(ierr); 267 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA); 268 ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr); 269 for (i=0; i<dc; i++) aux[i] = i+stc; 270 for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i]; 271 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 272 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 273 ierr = ISDestroy(&is);CHKERRQ(ierr); 274 /* create MATIS object */ 275 ierr = MatCreate(comm,B);CHKERRQ(ierr); 276 ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr); 277 ierr = MatSetType(*B,MATIS);CHKERRQ(ierr); 278 ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr); 279 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 280 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 281 282 /* allocate CSR for local matrix */ 283 ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr); 284 ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr); 285 ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr); 286 } else { 287 PetscInt nr; 288 PetscBool done; 289 ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr); 290 ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr); 291 if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr); 292 if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz); 293 ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr); 294 } 295 /* merge local matrices */ 296 ii = iptr; 297 jj = jptr; 298 aa = data; 299 *ii = *(hdi++) + *(hoi++); 300 for (jd=0,jo=0,cum=0; *ii<nnz; cum++) { 301 PetscScalar *aold = aa; 302 PetscInt *jold = jj,nc = jd+jo; 303 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; } 304 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; } 305 *(++ii) = *(hdi++) + *(hoi++); 306 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr); 307 } 308 for (; cum<dr; cum++) *(++ii) = nnz; 309 if (reuse != MAT_REUSE_MATRIX) { 310 Mat_SeqAIJ* a; 311 312 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr); 313 ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr); 314 /* hack SeqAIJ */ 315 a = (Mat_SeqAIJ*)(lA->data); 316 a->free_a = PETSC_TRUE; 317 a->free_ij = PETSC_TRUE; 318 ierr = MatDestroy(&lA);CHKERRQ(ierr); 319 } 320 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 321 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 322 if (reuse == MAT_INPLACE_MATRIX) { 323 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 324 } 325 PetscFunctionReturn(0); 326 } 327 328 #undef __FUNCT__ 329 #define __FUNCT__ "MatConvert_AIJ_HYPRE" 330 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B) 331 { 332 Mat_HYPRE *hB; 333 MPI_Comm comm = PetscObjectComm((PetscObject)A); 334 PetscErrorCode ierr; 335 336 PetscFunctionBegin; 337 if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_INPLACE_MATRIX"); 338 if (reuse == MAT_REUSE_MATRIX) { 339 /* always destroy the old matrix and create a new memory; 340 hope this does not churn the memory too much. The problem 341 is I do not know if it is possible to put the matrix back to 342 its initial state so that we can directly copy the values 343 the second time through. */ 344 hB = (Mat_HYPRE*)((*B)->data); 345 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij)); 346 } else { 347 ierr = MatCreate(comm,B);CHKERRQ(ierr); 348 ierr = MatSetType(*B,MATHYPRE);CHKERRQ(ierr); 349 ierr = MatSetSizes(*B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 350 ierr = MatSetUp(*B);CHKERRQ(ierr); 351 hB = (Mat_HYPRE*)((*B)->data); 352 } 353 ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr); 354 ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr); 355 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 356 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 357 PetscFunctionReturn(0); 358 } 359 360 #undef __FUNCT__ 361 #define __FUNCT__ "MatConvert_HYPRE_AIJ" 362 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B) 363 { 364 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 365 hypre_ParCSRMatrix *parcsr; 366 hypre_CSRMatrix *hdiag,*hoffd; 367 MPI_Comm comm; 368 PetscScalar *da,*oa,*aptr; 369 PetscInt *dii,*djj,*oii,*ojj,*iptr; 370 PetscInt i,dnnz,onnz,m,n; 371 HYPRE_Int type; 372 PetscMPIInt size; 373 PetscErrorCode ierr; 374 375 PetscFunctionBegin; 376 comm = PetscObjectComm((PetscObject)A); 377 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 378 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 379 if (reuse == MAT_REUSE_MATRIX) { 380 PetscBool ismpiaij,isseqaij; 381 ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 382 ierr = PetscObjectTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 383 if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported"); 384 } 385 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 386 387 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 388 hdiag = hypre_ParCSRMatrixDiag(parcsr); 389 hoffd = hypre_ParCSRMatrixOffd(parcsr); 390 m = hypre_CSRMatrixNumRows(hdiag); 391 n = hypre_CSRMatrixNumCols(hdiag); 392 dnnz = hypre_CSRMatrixNumNonzeros(hdiag); 393 onnz = hypre_CSRMatrixNumNonzeros(hoffd); 394 if (reuse != MAT_REUSE_MATRIX) { 395 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 396 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 397 ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr); 398 } else { 399 PetscInt nr; 400 PetscBool done; 401 if (size > 1) { 402 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 403 404 ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 405 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m); 406 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz); 407 ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr); 408 } else { 409 ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 410 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m); 411 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz); 412 ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr); 413 } 414 } 415 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr); 416 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr); 417 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr); 418 iptr = djj; 419 aptr = da; 420 for (i=0; i<m; i++) { 421 PetscInt nc = dii[i+1]-dii[i]; 422 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 423 iptr += nc; 424 aptr += nc; 425 } 426 if (size > 1) { 427 PetscInt *offdj,*coffd; 428 429 if (reuse != MAT_REUSE_MATRIX) { 430 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 431 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 432 ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr); 433 } else { 434 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 435 PetscInt nr,hr = hypre_CSRMatrixNumRows(hoffd); 436 PetscBool done; 437 438 ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr); 439 if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr); 440 if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz); 441 ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr); 442 } 443 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr); 444 offdj = hypre_CSRMatrixJ(hoffd); 445 coffd = hypre_ParCSRMatrixColMapOffd(parcsr); 446 for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]]; 447 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr); 448 iptr = ojj; 449 aptr = oa; 450 for (i=0; i<m; i++) { 451 PetscInt nc = oii[i+1]-oii[i]; 452 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 453 iptr += nc; 454 aptr += nc; 455 } 456 if (reuse != MAT_REUSE_MATRIX) { 457 Mat_MPIAIJ *b; 458 Mat_SeqAIJ *d,*o; 459 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii, 460 djj,da,oii,ojj,oa,B);CHKERRQ(ierr); 461 /* hack MPIAIJ */ 462 b = (Mat_MPIAIJ*)((*B)->data); 463 d = (Mat_SeqAIJ*)b->A->data; 464 o = (Mat_SeqAIJ*)b->B->data; 465 d->free_a = PETSC_TRUE; 466 d->free_ij = PETSC_TRUE; 467 o->free_a = PETSC_TRUE; 468 o->free_ij = PETSC_TRUE; 469 } 470 } else if (reuse != MAT_REUSE_MATRIX) { 471 Mat_SeqAIJ* b; 472 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr); 473 /* hack SeqAIJ */ 474 b = (Mat_SeqAIJ*)((*B)->data); 475 b->free_a = PETSC_TRUE; 476 b->free_ij = PETSC_TRUE; 477 } 478 if (reuse == MAT_INPLACE_MATRIX) { 479 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 480 } 481 PetscFunctionReturn(0); 482 } 483 484 #undef __FUNCT__ 485 #define __FUNCT__ "MatPtAP_AIJ_HYPRE" 486 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 487 { 488 Mat_HYPRE *hP = (Mat_HYPRE*)P->data; 489 hypre_ParCSRMatrix *tA,*Pparcsr,*ptapparcsr; 490 hypre_CSRMatrix *hdiag,*hoffd; 491 Mat_SeqAIJ *diag,*offd; 492 PetscInt *garray,noffd,dnnz,onnz,*row_starts,*col_starts; 493 HYPRE_Int type,P_owns_col_starts; 494 PetscBool ismpiaij,isseqaij; 495 MPI_Comm comm = PetscObjectComm((PetscObject)A); 496 PetscErrorCode ierr; 497 498 PetscFunctionBegin; 499 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 500 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 501 if (scall == MAT_REUSE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_REUSE_MATRIX"); 502 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 503 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 504 if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type); 505 if (ismpiaij) { 506 Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data); 507 508 diag = (Mat_SeqAIJ*)a->A->data; 509 offd = (Mat_SeqAIJ*)a->B->data; 510 garray = a->garray; 511 noffd = a->B->cmap->N; 512 dnnz = diag->nz; 513 onnz = offd->nz; 514 } else { 515 diag = (Mat_SeqAIJ*)A->data; 516 offd = NULL; 517 garray = NULL; 518 noffd = 0; 519 dnnz = diag->nz; 520 onnz = 0; 521 } 522 /* create a temporary ParCSR */ 523 if (HYPRE_AssumedPartitionCheck()) { 524 PetscMPIInt myid; 525 526 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 527 row_starts = A->rmap->range + myid; 528 col_starts = A->cmap->range + myid; 529 } else { 530 row_starts = A->rmap->range; 531 col_starts = A->cmap->range; 532 } 533 tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,row_starts,col_starts,noffd,dnnz,onnz); 534 hypre_ParCSRMatrixSetRowStartsOwner(tA,0); 535 hypre_ParCSRMatrixSetColStartsOwner(tA,0); 536 537 hdiag = hypre_ParCSRMatrixDiag(tA); 538 hypre_CSRMatrixI(hdiag) = diag->i; 539 hypre_CSRMatrixJ(hdiag) = diag->j; 540 hypre_CSRMatrixData(hdiag) = diag->a; 541 hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz; 542 hypre_CSRMatrixSetRownnz(hdiag); 543 hypre_CSRMatrixSetDataOwner(hdiag,0); 544 545 hoffd = hypre_ParCSRMatrixOffd(tA); 546 if (offd) { 547 hypre_CSRMatrixI(hoffd) = offd->i; 548 hypre_CSRMatrixJ(hoffd) = offd->j; 549 hypre_CSRMatrixData(hoffd) = offd->a; 550 hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz; 551 hypre_CSRMatrixSetRownnz(hoffd); 552 hypre_CSRMatrixSetDataOwner(hoffd,0); 553 hypre_ParCSRMatrixSetNumNonzeros(tA); 554 hypre_ParCSRMatrixColMapOffd(tA) = garray; 555 } 556 557 /* call RAP from BoomerAMG */ 558 /* Warning: hypre_BoomerAMGBuildCoarseOperator steals the col_starts 559 from Pparcsr (even if it does not own them)! */ 560 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 561 P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(Pparcsr); 562 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 563 PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(Pparcsr,tA,Pparcsr,&ptapparcsr)); 564 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 565 hypre_ParCSRMatrixSetRowStartsOwner(ptapparcsr,0); 566 hypre_ParCSRMatrixSetColStartsOwner(ptapparcsr,0); 567 if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(Pparcsr, 1); 568 569 /* create MatHYPRE */ 570 ierr = MatHYPRECreateFromParCSR(ptapparcsr,PETSC_OWN_POINTER,C);CHKERRQ(ierr); 571 572 /* set pointers to NULL before destroying tA */ 573 hypre_CSRMatrixI(hdiag) = NULL; 574 hypre_CSRMatrixJ(hdiag) = NULL; 575 hypre_CSRMatrixData(hdiag) = NULL; 576 hypre_CSRMatrixI(hoffd) = NULL; 577 hypre_CSRMatrixJ(hoffd) = NULL; 578 hypre_CSRMatrixData(hoffd) = NULL; 579 hypre_ParCSRMatrixColMapOffd(tA) = NULL; 580 hypre_ParCSRMatrixDestroy(tA); 581 PetscFunctionReturn(0); 582 } 583 584 #undef __FUNCT__ 585 #define __FUNCT__ "MatPtAP_HYPRE_HYPRE" 586 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 587 { 588 hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr; 589 Mat_HYPRE *hA = (Mat_HYPRE*)A->data, *hP = (Mat_HYPRE*)P->data; 590 HYPRE_Int type,P_owns_col_starts; 591 PetscErrorCode ierr; 592 593 PetscFunctionBegin; 594 if (scall == MAT_REUSE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Unsupported MAT_REUSE_MATRIX"); 595 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 596 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 597 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 598 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 599 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 600 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 601 602 /* call RAP from BoomerAMG */ 603 /* Warning: hypre_BoomerAMGBuildCoarseOperator steals the col_starts 604 from Pparcsr (even if it does not own them)! */ 605 P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(Pparcsr); 606 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 607 PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr)); 608 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 609 hypre_ParCSRMatrixSetRowStartsOwner(ptapparcsr,0); 610 hypre_ParCSRMatrixSetColStartsOwner(ptapparcsr,0); 611 if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(Pparcsr, 1); 612 613 /* create MatHYPRE */ 614 ierr = MatHYPRECreateFromParCSR(ptapparcsr,PETSC_OWN_POINTER,C);CHKERRQ(ierr); 615 PetscFunctionReturn(0); 616 } 617 618 #undef __FUNCT__ 619 #define __FUNCT__ "MatMultTranspose_HYPRE" 620 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y) 621 { 622 PetscErrorCode ierr; 623 624 PetscFunctionBegin; 625 ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);CHKERRQ(ierr); 626 PetscFunctionReturn(0); 627 } 628 629 #undef __FUNCT__ 630 #define __FUNCT__ "MatMult_HYPRE" 631 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y) 632 { 633 PetscErrorCode ierr; 634 635 PetscFunctionBegin; 636 ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);CHKERRQ(ierr); 637 PetscFunctionReturn(0); 638 } 639 640 #undef __FUNCT__ 641 #define __FUNCT__ "MatHYPRE_MultKernel_Private" 642 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans) 643 { 644 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 645 hypre_ParCSRMatrix *parcsr; 646 hypre_ParVector *hx,*hy; 647 PetscScalar *ax,*ay,*sax,*say; 648 PetscErrorCode ierr; 649 650 PetscFunctionBegin; 651 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 652 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx)); 653 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy)); 654 ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 655 ierr = VecGetArray(y,&ay);CHKERRQ(ierr); 656 if (trans) { 657 VecHYPRE_ParVectorReplacePointer(hA->x,ay,say); 658 VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax); 659 hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx); 660 VecHYPRE_ParVectorReplacePointer(hA->x,say,ay); 661 VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax); 662 } else { 663 VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax); 664 VecHYPRE_ParVectorReplacePointer(hA->b,ay,say); 665 hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy); 666 VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax); 667 VecHYPRE_ParVectorReplacePointer(hA->b,say,ay); 668 } 669 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 670 ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr); 671 PetscFunctionReturn(0); 672 } 673 674 #undef __FUNCT__ 675 #define __FUNCT__ "MatDestroy_HYPRE" 676 static PetscErrorCode MatDestroy_HYPRE(Mat A) 677 { 678 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 679 PetscErrorCode ierr; 680 681 PetscFunctionBegin; 682 if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x)); 683 if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b)); 684 if (hA->ij) { 685 if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL; 686 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij)); 687 } 688 if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); } 689 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr); 690 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr); 691 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr); 692 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr); 693 ierr = PetscFree(A->data);CHKERRQ(ierr); 694 PetscFunctionReturn(0); 695 } 696 697 #undef __FUNCT__ 698 #define __FUNCT__ "MatSetUp_HYPRE" 699 static PetscErrorCode MatSetUp_HYPRE(Mat A) 700 { 701 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 702 Vec x,b; 703 PetscErrorCode ierr; 704 705 PetscFunctionBegin; 706 if (hA->x) PetscFunctionReturn(0); 707 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 708 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 709 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr); 710 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr); 711 ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr); 712 ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr); 713 ierr = VecDestroy(&x);CHKERRQ(ierr); 714 ierr = VecDestroy(&b);CHKERRQ(ierr); 715 PetscFunctionReturn(0); 716 } 717 718 #undef __FUNCT__ 719 #define __FUNCT__ "MatAssemblyEnd_HYPRE" 720 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode) 721 { 722 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 723 PetscErrorCode ierr; 724 725 PetscFunctionBegin; 726 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij)); 727 PetscFunctionReturn(0); 728 } 729 730 #undef __FUNCT__ 731 #define __FUNCT__ "MatHYPRECreateFromParCSR" 732 PETSC_EXTERN PetscErrorCode MatHYPRECreateFromParCSR(void *vparcsr, PetscCopyMode copymode, Mat* A) 733 { 734 Mat_HYPRE *hA; 735 hypre_ParCSRMatrix *parcsr; 736 MPI_Comm comm; 737 PetscInt rstart,rend,cstart,cend,M,N; 738 PetscErrorCode ierr; 739 740 PetscFunctionBegin; 741 parcsr = (hypre_ParCSRMatrix *)vparcsr; 742 comm = hypre_ParCSRMatrixComm(parcsr); 743 if (copymode == PETSC_COPY_VALUES) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported copymode PETSC_COPY_VALUES"); 744 745 /* access ParCSRMatrix */ 746 rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr); 747 rend = hypre_ParCSRMatrixLastRowIndex(parcsr); 748 cstart = hypre_ParCSRMatrixFirstColDiag(parcsr); 749 cend = hypre_ParCSRMatrixLastColDiag(parcsr); 750 M = hypre_ParCSRMatrixGlobalNumRows(parcsr); 751 N = hypre_ParCSRMatrixGlobalNumCols(parcsr); 752 753 /* create PETSc matrix with MatHYPRE */ 754 ierr = MatCreate(comm,A);CHKERRQ(ierr); 755 ierr = MatSetSizes(*A,rend-rstart+1,cend-cstart+1,M,N);CHKERRQ(ierr); 756 ierr = MatSetType(*A,MATHYPRE);CHKERRQ(ierr); 757 ierr = MatSetUp(*A);CHKERRQ(ierr); 758 hA = (Mat_HYPRE*)((*A)->data); 759 760 /* create HYPRE_IJMatrix */ 761 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 762 763 /* set ParCSR object */ 764 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 765 hypre_IJMatrixObject(hA->ij) = parcsr; 766 767 /* set assembled flag */ 768 hypre_IJMatrixAssembleFlag(hA->ij) = 1; 769 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 770 771 /* prevent from freeing the pointer */ 772 if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE; 773 774 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 775 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 776 PetscFunctionReturn(0); 777 } 778 779 #undef __FUNCT__ 780 #define __FUNCT__ "MatCreate_HYPRE" 781 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B) 782 { 783 Mat_HYPRE *hB; 784 PetscErrorCode ierr; 785 786 PetscFunctionBegin; 787 ierr = PetscNewLog(B,&hB);CHKERRQ(ierr); 788 hB->inner_free = PETSC_TRUE; 789 790 B->data = (void*)hB; 791 B->rmap->bs = 1; 792 B->assembled = PETSC_FALSE; 793 794 B->ops->mult = MatMult_HYPRE; 795 B->ops->multtranspose = MatMultTranspose_HYPRE; 796 B->ops->setup = MatSetUp_HYPRE; 797 B->ops->destroy = MatDestroy_HYPRE; 798 B->ops->assemblyend = MatAssemblyEnd_HYPRE; 799 B->ops->ptap = MatPtAP_HYPRE_HYPRE; 800 801 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr); 802 ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr); 803 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr); 804 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr); 805 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 806 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 807 PetscFunctionReturn(0); 808 } 809 810 #if 0 811 /* 812 Does NOT copy the data over, instead uses DIRECTLY the pointers from the PETSc MPIAIJ format 813 814 This is UNFINISHED and does NOT work! The problem is that hypre puts the diagonal entry first 815 which will corrupt the PETSc data structure if we did this. Need a work around to this problem. 816 */ 817 #include <_hypre_IJ_mv.h> 818 #include <HYPRE_IJ_mv.h> 819 820 #undef __FUNCT__ 821 #define __FUNCT__ "MatHYPRE_IJMatrixLink" 822 PetscErrorCode MatHYPRE_IJMatrixLink(Mat A, HYPRE_IJMatrix *ij) 823 { 824 PetscErrorCode ierr; 825 PetscInt rstart,rend,cstart,cend; 826 PetscBool flg; 827 hypre_AuxParCSRMatrix *aux_matrix; 828 829 PetscFunctionBegin; 830 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 831 PetscValidType(A,1); 832 PetscValidPointer(ij,2); 833 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 834 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Can only use with PETSc MPIAIJ matrices"); 835 ierr = MatSetUp(A);CHKERRQ(ierr); 836 837 ierr = PetscLogEventBegin(MAT_Convert,A,0,0,0);CHKERRQ(ierr); 838 rstart = A->rmap->rstart; 839 rend = A->rmap->rend; 840 cstart = A->cmap->rstart; 841 cend = A->cmap->rend; 842 PetscStackCallStandard(HYPRE_IJMatrixCreate,(PetscObjectComm((PetscObject)A),rstart,rend-1,cstart,cend-1,ij)); 843 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(*ij,HYPRE_PARCSR)); 844 845 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(*ij)); 846 PetscStackCall("hypre_IJMatrixTranslator",aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(*ij)); 847 848 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 849 850 /* this is the Hack part where we monkey directly with the hypre datastructures */ 851 852 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(*ij)); 853 ierr = PetscLogEventEnd(MAT_Convert,A,0,0,0);CHKERRQ(ierr); 854 PetscFunctionReturn(0); 855 } 856 #endif 857