1 2 /* 3 Creates hypre ijmatrix from PETSc matrix 4 */ 5 6 /*MC 7 MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices 8 based on the hypre IJ interface. 9 10 Level: intermediate 11 12 .seealso: MatCreate() 13 M*/ 14 15 #include <petscmathypre.h> 16 #include <petsc/private/matimpl.h> 17 #include <../src/mat/impls/hypre/mhypre.h> 18 #include <../src/mat/impls/aij/mpi/mpiaij.h> 19 #include <../src/vec/vec/impls/hypre/vhyp.h> 20 #include <HYPRE.h> 21 #include <HYPRE_utilities.h> 22 #include <_hypre_parcsr_ls.h> 23 24 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*); 25 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix); 26 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix); 27 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix); 28 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool); 29 static PetscErrorCode hypre_array_destroy(void*); 30 31 #undef __FUNCT__ 32 #define __FUNCT__ "MatHYPRE_IJMatrixPreallocate" 33 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij) 34 { 35 PetscErrorCode ierr; 36 PetscInt i,n_d,n_o; 37 const PetscInt *ia_d,*ia_o; 38 PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE; 39 PetscInt *nnz_d=NULL,*nnz_o=NULL; 40 41 PetscFunctionBegin; 42 if (A_d) { /* determine number of nonzero entries in local diagonal part */ 43 ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr); 44 if (done_d) { 45 ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr); 46 for (i=0; i<n_d; i++) { 47 nnz_d[i] = ia_d[i+1] - ia_d[i]; 48 } 49 } 50 ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr); 51 } 52 if (A_o) { /* determine number of nonzero entries in local off-diagonal part */ 53 ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 54 if (done_o) { 55 ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr); 56 for (i=0; i<n_o; i++) { 57 nnz_o[i] = ia_o[i+1] - ia_o[i]; 58 } 59 } 60 ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 61 } 62 if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */ 63 if (!done_o) { /* only diagonal part */ 64 ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr); 65 for (i=0; i<n_d; i++) { 66 nnz_o[i] = 0; 67 } 68 } 69 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o)); 70 ierr = PetscFree(nnz_d);CHKERRQ(ierr); 71 ierr = PetscFree(nnz_o);CHKERRQ(ierr); 72 } 73 PetscFunctionReturn(0); 74 } 75 76 #undef __FUNCT__ 77 #define __FUNCT__ "MatHYPRE_CreateFromMat" 78 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA) 79 { 80 PetscErrorCode ierr; 81 PetscInt rstart,rend,cstart,cend; 82 83 PetscFunctionBegin; 84 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 85 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 86 rstart = A->rmap->rstart; 87 rend = A->rmap->rend; 88 cstart = A->cmap->rstart; 89 cend = A->cmap->rend; 90 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 91 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 92 { 93 PetscBool same; 94 Mat A_d,A_o; 95 const PetscInt *colmap; 96 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr); 97 if (same) { 98 ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 99 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 100 PetscFunctionReturn(0); 101 } 102 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr); 103 if (same) { 104 ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 105 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 106 PetscFunctionReturn(0); 107 } 108 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr); 109 if (same) { 110 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 111 PetscFunctionReturn(0); 112 } 113 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr); 114 if (same) { 115 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 116 PetscFunctionReturn(0); 117 } 118 } 119 PetscFunctionReturn(0); 120 } 121 122 #undef __FUNCT__ 123 #define __FUNCT__ "MatHYPRE_IJMatrixCopy" 124 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij) 125 { 126 PetscErrorCode ierr; 127 PetscInt i,rstart,rend,ncols,nr,nc; 128 const PetscScalar *values; 129 const PetscInt *cols; 130 PetscBool flg; 131 132 PetscFunctionBegin; 133 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 134 ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr); 135 if (flg && nr == nc) { 136 ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr); 137 PetscFunctionReturn(0); 138 } 139 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 140 if (flg) { 141 ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr); 142 PetscFunctionReturn(0); 143 } 144 145 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 146 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 147 for (i=rstart; i<rend; i++) { 148 ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 149 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values)); 150 ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 151 } 152 PetscFunctionReturn(0); 153 } 154 155 #undef __FUNCT__ 156 #define __FUNCT__ "MatHYPRE_IJMatrixFastCopy_SeqAIJ" 157 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij) 158 { 159 PetscErrorCode ierr; 160 Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data; 161 HYPRE_Int type; 162 hypre_ParCSRMatrix *par_matrix; 163 hypre_AuxParCSRMatrix *aux_matrix; 164 hypre_CSRMatrix *hdiag; 165 166 PetscFunctionBegin; 167 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 168 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 169 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 170 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 171 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 172 /* 173 this is the Hack part where we monkey directly with the hypre datastructures 174 */ 175 ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt)); 176 ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt)); 177 ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar)); 178 179 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 180 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 181 PetscFunctionReturn(0); 182 } 183 184 #undef __FUNCT__ 185 #define __FUNCT__ "MatHYPRE_IJMatrixFastCopy_MPIAIJ" 186 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij) 187 { 188 PetscErrorCode ierr; 189 Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data; 190 Mat_SeqAIJ *pdiag,*poffd; 191 PetscInt i,*garray = pA->garray,*jj,cstart,*pjj; 192 HYPRE_Int type; 193 hypre_ParCSRMatrix *par_matrix; 194 hypre_AuxParCSRMatrix *aux_matrix; 195 hypre_CSRMatrix *hdiag,*hoffd; 196 197 PetscFunctionBegin; 198 pdiag = (Mat_SeqAIJ*) pA->A->data; 199 poffd = (Mat_SeqAIJ*) pA->B->data; 200 /* cstart is only valid for square MPIAIJ layed out in the usual way */ 201 ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr); 202 203 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 204 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 205 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 206 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 207 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 208 hoffd = hypre_ParCSRMatrixOffd(par_matrix); 209 210 /* 211 this is the Hack part where we monkey directly with the hypre datastructures 212 */ 213 ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt)); 214 /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */ 215 jj = (PetscInt*)hdiag->j; 216 pjj = (PetscInt*)pdiag->j; 217 for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i]; 218 ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar)); 219 ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt)); 220 /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this 221 If we hacked a hypre a bit more we might be able to avoid this step */ 222 jj = (PetscInt*) hoffd->j; 223 pjj = (PetscInt*) poffd->j; 224 for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]]; 225 ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar)); 226 227 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 228 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 229 PetscFunctionReturn(0); 230 } 231 232 #undef __FUNCT__ 233 #define __FUNCT__ "MatConvert_HYPRE_IS" 234 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B) 235 { 236 Mat_HYPRE* mhA = (Mat_HYPRE*)(A->data); 237 Mat lA; 238 ISLocalToGlobalMapping rl2g,cl2g; 239 IS is; 240 hypre_ParCSRMatrix *hA; 241 hypre_CSRMatrix *hdiag,*hoffd; 242 MPI_Comm comm; 243 PetscScalar *hdd,*hod,*aa,*data; 244 HYPRE_Int *col_map_offd,*hdi,*hdj,*hoi,*hoj; 245 PetscInt *ii,*jj,*iptr,*jptr; 246 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N; 247 HYPRE_Int type; 248 PetscErrorCode ierr; 249 250 PetscFunctionBegin; 251 comm = PetscObjectComm((PetscObject)A); 252 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type)); 253 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 254 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA)); 255 M = hypre_ParCSRMatrixGlobalNumRows(hA); 256 N = hypre_ParCSRMatrixGlobalNumCols(hA); 257 str = hypre_ParCSRMatrixFirstRowIndex(hA); 258 stc = hypre_ParCSRMatrixFirstColDiag(hA); 259 hdiag = hypre_ParCSRMatrixDiag(hA); 260 hoffd = hypre_ParCSRMatrixOffd(hA); 261 dr = hypre_CSRMatrixNumRows(hdiag); 262 dc = hypre_CSRMatrixNumCols(hdiag); 263 nnz = hypre_CSRMatrixNumNonzeros(hdiag); 264 hdi = hypre_CSRMatrixI(hdiag); 265 hdj = hypre_CSRMatrixJ(hdiag); 266 hdd = hypre_CSRMatrixData(hdiag); 267 oc = hypre_CSRMatrixNumCols(hoffd); 268 nnz += hypre_CSRMatrixNumNonzeros(hoffd); 269 hoi = hypre_CSRMatrixI(hoffd); 270 hoj = hypre_CSRMatrixJ(hoffd); 271 hod = hypre_CSRMatrixData(hoffd); 272 if (reuse != MAT_REUSE_MATRIX) { 273 PetscInt *aux; 274 275 /* generate l2g maps for rows and cols */ 276 ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr); 277 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 278 ierr = ISDestroy(&is);CHKERRQ(ierr); 279 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA); 280 ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr); 281 for (i=0; i<dc; i++) aux[i] = i+stc; 282 for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i]; 283 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 284 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 285 ierr = ISDestroy(&is);CHKERRQ(ierr); 286 /* create MATIS object */ 287 ierr = MatCreate(comm,B);CHKERRQ(ierr); 288 ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr); 289 ierr = MatSetType(*B,MATIS);CHKERRQ(ierr); 290 ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr); 291 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 292 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 293 294 /* allocate CSR for local matrix */ 295 ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr); 296 ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr); 297 ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr); 298 } else { 299 PetscInt nr; 300 PetscBool done; 301 ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr); 302 ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr); 303 if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr); 304 if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz); 305 ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr); 306 } 307 /* merge local matrices */ 308 ii = iptr; 309 jj = jptr; 310 aa = data; 311 *ii = *(hdi++) + *(hoi++); 312 for (jd=0,jo=0,cum=0; *ii<nnz; cum++) { 313 PetscScalar *aold = aa; 314 PetscInt *jold = jj,nc = jd+jo; 315 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; } 316 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; } 317 *(++ii) = *(hdi++) + *(hoi++); 318 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr); 319 } 320 for (; cum<dr; cum++) *(++ii) = nnz; 321 if (reuse != MAT_REUSE_MATRIX) { 322 Mat_SeqAIJ* a; 323 324 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr); 325 ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr); 326 /* hack SeqAIJ */ 327 a = (Mat_SeqAIJ*)(lA->data); 328 a->free_a = PETSC_TRUE; 329 a->free_ij = PETSC_TRUE; 330 ierr = MatDestroy(&lA);CHKERRQ(ierr); 331 } 332 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 333 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 334 if (reuse == MAT_INPLACE_MATRIX) { 335 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 336 } 337 PetscFunctionReturn(0); 338 } 339 340 #undef __FUNCT__ 341 #define __FUNCT__ "MatConvert_AIJ_HYPRE" 342 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B) 343 { 344 Mat_HYPRE *hB; 345 MPI_Comm comm = PetscObjectComm((PetscObject)A); 346 PetscErrorCode ierr; 347 348 PetscFunctionBegin; 349 if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_INPLACE_MATRIX"); 350 if (reuse == MAT_REUSE_MATRIX) { 351 /* always destroy the old matrix and create a new memory; 352 hope this does not churn the memory too much. The problem 353 is I do not know if it is possible to put the matrix back to 354 its initial state so that we can directly copy the values 355 the second time through. */ 356 hB = (Mat_HYPRE*)((*B)->data); 357 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij)); 358 } else { 359 ierr = MatCreate(comm,B);CHKERRQ(ierr); 360 ierr = MatSetType(*B,MATHYPRE);CHKERRQ(ierr); 361 ierr = MatSetSizes(*B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 362 hB = (Mat_HYPRE*)((*B)->data); 363 } 364 ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr); 365 ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr); 366 (*B)->preallocated = PETSC_TRUE; 367 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 368 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 369 PetscFunctionReturn(0); 370 } 371 372 #undef __FUNCT__ 373 #define __FUNCT__ "MatConvert_HYPRE_AIJ" 374 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B) 375 { 376 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 377 hypre_ParCSRMatrix *parcsr; 378 hypre_CSRMatrix *hdiag,*hoffd; 379 MPI_Comm comm; 380 PetscScalar *da,*oa,*aptr; 381 PetscInt *dii,*djj,*oii,*ojj,*iptr; 382 PetscInt i,dnnz,onnz,m,n; 383 HYPRE_Int type; 384 PetscMPIInt size; 385 PetscErrorCode ierr; 386 387 PetscFunctionBegin; 388 comm = PetscObjectComm((PetscObject)A); 389 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 390 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 391 if (reuse == MAT_REUSE_MATRIX) { 392 PetscBool ismpiaij,isseqaij; 393 ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 394 ierr = PetscObjectTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 395 if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported"); 396 } 397 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 398 399 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 400 hdiag = hypre_ParCSRMatrixDiag(parcsr); 401 hoffd = hypre_ParCSRMatrixOffd(parcsr); 402 m = hypre_CSRMatrixNumRows(hdiag); 403 n = hypre_CSRMatrixNumCols(hdiag); 404 dnnz = hypre_CSRMatrixNumNonzeros(hdiag); 405 onnz = hypre_CSRMatrixNumNonzeros(hoffd); 406 if (reuse == MAT_INITIAL_MATRIX) { 407 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 408 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 409 ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr); 410 } else if (reuse == MAT_REUSE_MATRIX) { 411 PetscInt nr; 412 PetscBool done; 413 if (size > 1) { 414 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 415 416 ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 417 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m); 418 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz); 419 ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr); 420 } else { 421 ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 422 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m); 423 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz); 424 ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr); 425 } 426 } else { /* MAT_INPLACE_MATRIX */ 427 dii = (PetscInt*)hypre_CSRMatrixI(hdiag); 428 djj = (PetscInt*)hypre_CSRMatrixJ(hdiag); 429 da = hypre_CSRMatrixData(hdiag); 430 } 431 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr); 432 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr); 433 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr); 434 iptr = djj; 435 aptr = da; 436 for (i=0; i<m; i++) { 437 PetscInt nc = dii[i+1]-dii[i]; 438 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 439 iptr += nc; 440 aptr += nc; 441 } 442 if (size > 1) { 443 HYPRE_Int *offdj,*coffd; 444 445 if (reuse == MAT_INITIAL_MATRIX) { 446 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 447 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 448 ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr); 449 } else if (reuse == MAT_REUSE_MATRIX) { 450 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 451 PetscInt nr,hr = hypre_CSRMatrixNumRows(hoffd); 452 PetscBool done; 453 454 ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr); 455 if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr); 456 if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz); 457 ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr); 458 } else { /* MAT_INPLACE_MATRIX */ 459 oii = (PetscInt*)hypre_CSRMatrixI(hoffd); 460 ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd); 461 oa = hypre_CSRMatrixData(hoffd); 462 } 463 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr); 464 offdj = hypre_CSRMatrixJ(hoffd); 465 coffd = hypre_ParCSRMatrixColMapOffd(parcsr); 466 for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]]; 467 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr); 468 iptr = ojj; 469 aptr = oa; 470 for (i=0; i<m; i++) { 471 PetscInt nc = oii[i+1]-oii[i]; 472 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 473 iptr += nc; 474 aptr += nc; 475 } 476 if (reuse == MAT_INITIAL_MATRIX) { 477 Mat_MPIAIJ *b; 478 Mat_SeqAIJ *d,*o; 479 480 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr); 481 /* hack MPIAIJ */ 482 b = (Mat_MPIAIJ*)((*B)->data); 483 d = (Mat_SeqAIJ*)b->A->data; 484 o = (Mat_SeqAIJ*)b->B->data; 485 d->free_a = PETSC_TRUE; 486 d->free_ij = PETSC_TRUE; 487 o->free_a = PETSC_TRUE; 488 o->free_ij = PETSC_TRUE; 489 } else if (reuse == MAT_INPLACE_MATRIX) { 490 Mat T; 491 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr); 492 hypre_CSRMatrixI(hdiag) = NULL; 493 hypre_CSRMatrixJ(hdiag) = NULL; 494 hypre_CSRMatrixData(hdiag) = NULL; 495 hypre_CSRMatrixI(hoffd) = NULL; 496 hypre_CSRMatrixJ(hoffd) = NULL; 497 hypre_CSRMatrixData(hoffd) = NULL; 498 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 499 } 500 } else { 501 oii = NULL; 502 ojj = NULL; 503 oa = NULL; 504 if (reuse == MAT_INITIAL_MATRIX) { 505 Mat_SeqAIJ* b; 506 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr); 507 /* hack SeqAIJ */ 508 b = (Mat_SeqAIJ*)((*B)->data); 509 b->free_a = PETSC_TRUE; 510 b->free_ij = PETSC_TRUE; 511 } else if (reuse == MAT_INPLACE_MATRIX) { 512 Mat T; 513 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr); 514 hypre_CSRMatrixI(hdiag) = NULL; 515 hypre_CSRMatrixJ(hdiag) = NULL; 516 hypre_CSRMatrixData(hdiag) = NULL; 517 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 518 } 519 } 520 521 /* we have to use hypre_Tfree to free the arrays */ 522 if (reuse == MAT_INPLACE_MATRIX) { 523 void *ptrs[6] = {dii,djj,da,oii,ojj,oa}; 524 const char *names[6] = {"_hypre_csr_dii", 525 "_hypre_csr_djj", 526 "_hypre_csr_da", 527 "_hypre_csr_oii", 528 "_hypre_csr_ojj", 529 "_hypre_csr_oa"}; 530 for (i=0; i<6; i++) { 531 PetscContainer c; 532 533 ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr); 534 ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr); 535 ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr); 536 ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr); 537 ierr = PetscContainerDestroy(&c);CHKERRQ(ierr); 538 } 539 } 540 PetscFunctionReturn(0); 541 } 542 543 #undef __FUNCT__ 544 #define __FUNCT__ "MatAIJGetParCSR_Private" 545 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 546 { 547 hypre_ParCSRMatrix *tA; 548 hypre_CSRMatrix *hdiag,*hoffd; 549 Mat_SeqAIJ *diag,*offd; 550 PetscInt *garray,noffd,dnnz,onnz,*row_starts,*col_starts; 551 MPI_Comm comm = PetscObjectComm((PetscObject)A); 552 PetscBool ismpiaij,isseqaij; 553 PetscErrorCode ierr; 554 555 PetscFunctionBegin; 556 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 557 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 558 if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type); 559 if (ismpiaij) { 560 Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data); 561 562 diag = (Mat_SeqAIJ*)a->A->data; 563 offd = (Mat_SeqAIJ*)a->B->data; 564 garray = a->garray; 565 noffd = a->B->cmap->N; 566 dnnz = diag->nz; 567 onnz = offd->nz; 568 } else { 569 diag = (Mat_SeqAIJ*)A->data; 570 offd = NULL; 571 garray = NULL; 572 noffd = 0; 573 dnnz = diag->nz; 574 onnz = 0; 575 } 576 577 /* create a temporary ParCSR */ 578 if (HYPRE_AssumedPartitionCheck()) { 579 PetscMPIInt myid; 580 581 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 582 row_starts = A->rmap->range + myid; 583 col_starts = A->cmap->range + myid; 584 } else { 585 row_starts = A->rmap->range; 586 col_starts = A->cmap->range; 587 } 588 tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz); 589 hypre_ParCSRMatrixSetRowStartsOwner(tA,0); 590 hypre_ParCSRMatrixSetColStartsOwner(tA,0); 591 592 /* set diagonal part */ 593 hdiag = hypre_ParCSRMatrixDiag(tA); 594 hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i; 595 hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j; 596 hypre_CSRMatrixData(hdiag) = diag->a; 597 hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz; 598 hypre_CSRMatrixSetRownnz(hdiag); 599 hypre_CSRMatrixSetDataOwner(hdiag,0); 600 601 /* set offdiagonal part */ 602 hoffd = hypre_ParCSRMatrixOffd(tA); 603 if (offd) { 604 hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i; 605 hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j; 606 hypre_CSRMatrixData(hoffd) = offd->a; 607 hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz; 608 hypre_CSRMatrixSetRownnz(hoffd); 609 hypre_CSRMatrixSetDataOwner(hoffd,0); 610 hypre_ParCSRMatrixSetNumNonzeros(tA); 611 hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray; 612 } 613 *hA = tA; 614 PetscFunctionReturn(0); 615 } 616 617 #undef __FUNCT__ 618 #define __FUNCT__ "MatAIJRestoreParCSR_Private" 619 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 620 { 621 hypre_CSRMatrix *hdiag,*hoffd; 622 623 PetscFunctionBegin; 624 hdiag = hypre_ParCSRMatrixDiag(*hA); 625 hoffd = hypre_ParCSRMatrixOffd(*hA); 626 /* set pointers to NULL before destroying tA */ 627 hypre_CSRMatrixI(hdiag) = NULL; 628 hypre_CSRMatrixJ(hdiag) = NULL; 629 hypre_CSRMatrixData(hdiag) = NULL; 630 hypre_CSRMatrixI(hoffd) = NULL; 631 hypre_CSRMatrixJ(hoffd) = NULL; 632 hypre_CSRMatrixData(hoffd) = NULL; 633 hypre_ParCSRMatrixColMapOffd(*hA) = NULL; 634 hypre_ParCSRMatrixDestroy(*hA); 635 *hA = NULL; 636 PetscFunctionReturn(0); 637 } 638 639 /* calls RAP from BoomerAMG: 640 the resulting ParCSR will not own the column and row starts */ 641 #undef __FUNCT__ 642 #define __FUNCT__ "MatHYPRE_ParCSR_RAP" 643 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA, 644 hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP) 645 { 646 PetscErrorCode ierr; 647 HYPRE_Int P_owns_col_starts,R_owns_row_starts; 648 649 PetscFunctionBegin; 650 P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP); 651 R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR); 652 PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP)); 653 PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP)); 654 /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */ 655 hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0); 656 hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0); 657 if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1); 658 if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1); 659 PetscFunctionReturn(0); 660 } 661 662 #undef __FUNCT__ 663 #define __FUNCT__ "MatPtAPNumeric_AIJ_AIJ_wHYPRE" 664 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C) 665 { 666 Mat B; 667 hypre_ParCSRMatrix *hA,*hP,*hPtAP; 668 PetscErrorCode ierr; 669 670 PetscFunctionBegin; 671 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 672 ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr); 673 ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr); 674 ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 675 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 676 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 677 ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr); 678 PetscFunctionReturn(0); 679 } 680 681 #undef __FUNCT__ 682 #define __FUNCT__ "MatPtAPSymbolic_AIJ_AIJ_wHYPRE" 683 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C) 684 { 685 PetscErrorCode ierr; 686 PetscFunctionBegin; 687 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 688 ierr = MatSetType(*C,MATAIJ);CHKERRQ(ierr); 689 (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE; 690 PetscFunctionReturn(0); 691 } 692 693 #undef __FUNCT__ 694 #define __FUNCT__ "MatPtAPNumeric_AIJ_HYPRE" 695 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C) 696 { 697 Mat B; 698 Mat_HYPRE *hP; 699 hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr; 700 HYPRE_Int type; 701 MPI_Comm comm = PetscObjectComm((PetscObject)A); 702 PetscBool ishypre; 703 PetscErrorCode ierr; 704 705 PetscFunctionBegin; 706 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 707 if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 708 hP = (Mat_HYPRE*)P->data; 709 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 710 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 711 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 712 713 /* It looks like we don't need to have the diagonal entries 714 ordered first in the rows of the diagonal part 715 for boomerAMGBuildCoarseOperator to work */ 716 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 717 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 718 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 719 720 /* create temporary matrix and merge to C */ 721 ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 722 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 723 PetscFunctionReturn(0); 724 } 725 726 #undef __FUNCT__ 727 #define __FUNCT__ "MatPtAP_AIJ_HYPRE" 728 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 729 { 730 PetscErrorCode ierr; 731 732 PetscFunctionBegin; 733 if (scall == MAT_INITIAL_MATRIX) { 734 const char *deft = MATAIJ; 735 char type[256]; 736 PetscBool flg; 737 738 ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr); 739 ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr); 740 ierr = PetscOptionsEnd();CHKERRQ(ierr); 741 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 742 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 743 if (flg) { 744 ierr = MatSetType(*C,type);CHKERRQ(ierr); 745 } else { 746 ierr = MatSetType(*C,deft);CHKERRQ(ierr); 747 } 748 (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE; 749 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 750 } 751 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 752 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 753 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 754 PetscFunctionReturn(0); 755 } 756 757 #undef __FUNCT__ 758 #define __FUNCT__ "MatPtAPNumeric_HYPRE_HYPRE" 759 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C) 760 { 761 Mat B; 762 hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr; 763 Mat_HYPRE *hA,*hP; 764 PetscBool ishypre; 765 HYPRE_Int type; 766 PetscErrorCode ierr; 767 768 PetscFunctionBegin; 769 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 770 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 771 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 772 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 773 hA = (Mat_HYPRE*)A->data; 774 hP = (Mat_HYPRE*)P->data; 775 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 776 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 777 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 778 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 779 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 780 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 781 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 782 ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 783 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 784 PetscFunctionReturn(0); 785 } 786 787 #undef __FUNCT__ 788 #define __FUNCT__ "MatPtAP_HYPRE_HYPRE" 789 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 790 { 791 PetscErrorCode ierr; 792 793 PetscFunctionBegin; 794 if (scall == MAT_INITIAL_MATRIX) { 795 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 796 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 797 ierr = MatSetType(*C,MATHYPRE);CHKERRQ(ierr); 798 (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE; 799 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 800 } 801 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 802 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 803 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 804 PetscFunctionReturn(0); 805 } 806 807 #undef __FUNCT__ 808 #define __FUNCT__ "MatMatMultNumeric_AIJ_AIJ_wHYPRE" 809 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C) 810 { 811 Mat D; 812 hypre_ParCSRMatrix *hA,*hB,*hAB; 813 PetscErrorCode ierr; 814 815 PetscFunctionBegin; 816 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 817 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 818 PetscStackPush("hypre_ParMatmul"); 819 hAB = hypre_ParMatmul(hA,hB); 820 PetscStackPop; 821 ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 822 ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr); 823 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 824 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 825 PetscFunctionReturn(0); 826 } 827 828 #undef __FUNCT__ 829 #define __FUNCT__ "MatMatMultSymbolic_AIJ_AIJ_wHYPRE" 830 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C) 831 { 832 PetscErrorCode ierr; 833 PetscFunctionBegin; 834 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 835 ierr = MatSetType(*C,MATAIJ);CHKERRQ(ierr); 836 (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE; 837 PetscFunctionReturn(0); 838 } 839 840 #undef __FUNCT__ 841 #define __FUNCT__ "MatMultTranspose_HYPRE" 842 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y) 843 { 844 PetscErrorCode ierr; 845 846 PetscFunctionBegin; 847 ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);CHKERRQ(ierr); 848 PetscFunctionReturn(0); 849 } 850 851 #undef __FUNCT__ 852 #define __FUNCT__ "MatMult_HYPRE" 853 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y) 854 { 855 PetscErrorCode ierr; 856 857 PetscFunctionBegin; 858 ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);CHKERRQ(ierr); 859 PetscFunctionReturn(0); 860 } 861 862 #undef __FUNCT__ 863 #define __FUNCT__ "MatHYPRE_MultKernel_Private" 864 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans) 865 { 866 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 867 hypre_ParCSRMatrix *parcsr; 868 hypre_ParVector *hx,*hy; 869 PetscScalar *ax,*ay,*sax,*say; 870 PetscErrorCode ierr; 871 872 PetscFunctionBegin; 873 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 874 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx)); 875 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy)); 876 ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 877 ierr = VecGetArray(y,&ay);CHKERRQ(ierr); 878 if (trans) { 879 VecHYPRE_ParVectorReplacePointer(hA->x,ay,say); 880 VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax); 881 hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx); 882 VecHYPRE_ParVectorReplacePointer(hA->x,say,ay); 883 VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax); 884 } else { 885 VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax); 886 VecHYPRE_ParVectorReplacePointer(hA->b,ay,say); 887 hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy); 888 VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax); 889 VecHYPRE_ParVectorReplacePointer(hA->b,say,ay); 890 } 891 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 892 ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr); 893 PetscFunctionReturn(0); 894 } 895 896 #undef __FUNCT__ 897 #define __FUNCT__ "MatDestroy_HYPRE" 898 static PetscErrorCode MatDestroy_HYPRE(Mat A) 899 { 900 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 901 PetscErrorCode ierr; 902 903 PetscFunctionBegin; 904 if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x)); 905 if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b)); 906 if (hA->ij) { 907 if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL; 908 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij)); 909 } 910 if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); } 911 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr); 912 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr); 913 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr); 914 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr); 915 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr); 916 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr); 917 ierr = PetscFree(A->data);CHKERRQ(ierr); 918 PetscFunctionReturn(0); 919 } 920 921 #undef __FUNCT__ 922 #define __FUNCT__ "MatSetUp_HYPRE" 923 static PetscErrorCode MatSetUp_HYPRE(Mat A) 924 { 925 PetscErrorCode ierr; 926 927 PetscFunctionBegin; 928 ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr); 929 PetscFunctionReturn(0); 930 } 931 932 #undef __FUNCT__ 933 #define __FUNCT__ "MatAssemblyEnd_HYPRE" 934 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode) 935 { 936 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 937 Vec x,b; 938 PetscErrorCode ierr; 939 940 PetscFunctionBegin; 941 if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE"); 942 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij)); 943 if (hA->x) PetscFunctionReturn(0); 944 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 945 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 946 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr); 947 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr); 948 ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr); 949 ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr); 950 ierr = VecDestroy(&x);CHKERRQ(ierr); 951 ierr = VecDestroy(&b);CHKERRQ(ierr); 952 PetscFunctionReturn(0); 953 } 954 955 #define MATHYPRE_SCRATCH 2048 956 957 #undef __FUNCT__ 958 #define __FUNCT__ "MatSetValues_HYPRE" 959 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins) 960 { 961 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 962 PetscScalar *vals = (PetscScalar *)v; 963 PetscScalar sscr[MATHYPRE_SCRATCH]; 964 HYPRE_Int cscr[2][MATHYPRE_SCRATCH]; 965 HYPRE_Int i,nzc; 966 PetscErrorCode ierr; 967 968 PetscFunctionBegin; 969 for (i=0,nzc=0;i<nc;i++) { 970 if (cols[i] >= 0) { 971 cscr[0][nzc ] = cols[i]; 972 cscr[1][nzc++] = i; 973 } 974 } 975 if (!nzc) PetscFunctionReturn(0); 976 977 if (ins == ADD_VALUES) { 978 for (i=0;i<nr;i++) { 979 if (rows[i] >= 0) { 980 PetscInt j; 981 for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]]; 982 PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr)); 983 } 984 vals += nc; 985 } 986 } else { /* INSERT_VALUES */ 987 #if defined(PETSC_USE_DEBUG) 988 /* Insert values cannot be used to insert offproc entries */ 989 PetscInt rst,ren; 990 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 991 for (i=0;i<nr;i++) 992 if (rows[i] >= 0 && (rows[i] < rst || rows[i] >= ren)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use INSERT_VALUES for off-proc entries with MatHYPRE. Use ADD_VALUES instead"); 993 #endif 994 for (i=0;i<nr;i++) { 995 if (rows[i] >= 0) { 996 PetscInt j; 997 for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]]; 998 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr)); 999 } 1000 vals += nc; 1001 } 1002 } 1003 PetscFunctionReturn(0); 1004 } 1005 1006 #undef __FUNCT__ 1007 #define __FUNCT__ "MatHYPRESetPreallocation_HYPRE" 1008 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1009 { 1010 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1011 HYPRE_Int *hdnnz,*honnz; 1012 PetscInt i,rs,re,cs,ce,bs; 1013 PetscMPIInt size; 1014 PetscErrorCode ierr; 1015 1016 PetscFunctionBegin; 1017 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1018 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1019 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1020 rs = A->rmap->rstart; 1021 re = A->rmap->rend; 1022 cs = A->cmap->rstart; 1023 ce = A->cmap->rend; 1024 if (!hA->ij) { 1025 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij)); 1026 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1027 } else { 1028 HYPRE_Int hrs,hre,hcs,hce; 1029 PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce)); 1030 if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re); 1031 if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce); 1032 } 1033 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1034 1035 if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs; 1036 if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs; 1037 1038 if (!dnnz) { 1039 ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr); 1040 for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz; 1041 } else { 1042 hdnnz = (HYPRE_Int*)dnnz; 1043 } 1044 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 1045 if (size > 1) { 1046 if (!onnz) { 1047 ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr); 1048 for (i=0;i<A->rmap->n;i++) honnz[i] = onz; 1049 } else { 1050 honnz = (HYPRE_Int*)onnz; 1051 } 1052 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz)); 1053 } else { 1054 honnz = NULL; 1055 PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz)); 1056 } 1057 if (!dnnz) { 1058 ierr = PetscFree(hdnnz);CHKERRQ(ierr); 1059 } 1060 if (!onnz && honnz) { 1061 ierr = PetscFree(honnz);CHKERRQ(ierr); 1062 } 1063 A->preallocated = PETSC_TRUE; 1064 1065 /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0 */ 1066 { 1067 hypre_AuxParCSRMatrix *aux_matrix; 1068 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1069 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; 1070 } 1071 PetscFunctionReturn(0); 1072 } 1073 1074 /*@C 1075 MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format 1076 1077 Collective on Mat 1078 1079 Input Parameters: 1080 + A - the matrix 1081 . dnz - number of nonzeros per row in DIAGONAL portion of local submatrix 1082 (same value is used for all local rows) 1083 . dnnz - array containing the number of nonzeros in the various rows of the 1084 DIAGONAL portion of the local submatrix (possibly different for each row) 1085 or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure. 1086 The size of this array is equal to the number of local rows, i.e 'm'. 1087 For matrices that will be factored, you must leave room for (and set) 1088 the diagonal entry even if it is zero. 1089 . onz - number of nonzeros per row in the OFF-DIAGONAL portion of local 1090 submatrix (same value is used for all local rows). 1091 - onnz - array containing the number of nonzeros in the various rows of the 1092 OFF-DIAGONAL portion of the local submatrix (possibly different for 1093 each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero 1094 structure. The size of this array is equal to the number 1095 of local rows, i.e 'm'. 1096 1097 Notes: If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored. 1098 1099 Level: intermediate 1100 1101 .keywords: matrix, aij, compressed row, sparse, parallel 1102 1103 .seealso: MatCreate(), MatMPIAIJSetPreallocation, MATHYPRE 1104 @*/ 1105 #undef __FUNCT__ 1106 #define __FUNCT__ "MatHYPRESetPreallocation" 1107 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1108 { 1109 PetscErrorCode ierr; 1110 1111 PetscFunctionBegin; 1112 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1113 PetscValidType(A,1); 1114 ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr); 1115 PetscFunctionReturn(0); 1116 } 1117 1118 /* 1119 MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix 1120 1121 Collective 1122 1123 Input Parameters: 1124 + vparcsr - the pointer to the hypre_ParCSRMatrix 1125 . mtype - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported. 1126 - copymode - PETSc copying options 1127 1128 Output Parameter: 1129 . A - the matrix 1130 1131 Level: intermediate 1132 1133 .seealso: MatHYPRE, PetscCopyMode 1134 */ 1135 #undef __FUNCT__ 1136 #define __FUNCT__ "MatCreateFromParCSR" 1137 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *vparcsr, MatType mtype, PetscCopyMode copymode, Mat* A) 1138 { 1139 Mat T; 1140 Mat_HYPRE *hA; 1141 hypre_ParCSRMatrix *parcsr; 1142 MPI_Comm comm; 1143 PetscInt rstart,rend,cstart,cend,M,N; 1144 PetscBool isseqaij,ismpiaij,isaij,ishyp,isis; 1145 PetscErrorCode ierr; 1146 1147 PetscFunctionBegin; 1148 parcsr = (hypre_ParCSRMatrix *)vparcsr; 1149 comm = hypre_ParCSRMatrixComm(parcsr); 1150 ierr = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 1151 ierr = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 1152 ierr = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr); 1153 ierr = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr); 1154 ierr = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr); 1155 isaij = (PetscBool)(isseqaij || ismpiaij || isaij); 1156 if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE); 1157 if (ishyp && copymode == PETSC_COPY_VALUES) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported copymode PETSC_COPY_VALUES"); 1158 1159 /* access ParCSRMatrix */ 1160 rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr); 1161 rend = hypre_ParCSRMatrixLastRowIndex(parcsr); 1162 cstart = hypre_ParCSRMatrixFirstColDiag(parcsr); 1163 cend = hypre_ParCSRMatrixLastColDiag(parcsr); 1164 M = hypre_ParCSRMatrixGlobalNumRows(parcsr); 1165 N = hypre_ParCSRMatrixGlobalNumCols(parcsr); 1166 1167 /* fix for empty local rows/columns */ 1168 if (rend < rstart) rend = rstart; 1169 if (cend < cstart) cend = cstart; 1170 1171 /* create PETSc matrix with MatHYPRE */ 1172 ierr = MatCreate(comm,&T);CHKERRQ(ierr); 1173 ierr = MatSetSizes(T,rend-rstart+1,cend-cstart+1,M,N);CHKERRQ(ierr); 1174 ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr); 1175 hA = (Mat_HYPRE*)(T->data); 1176 1177 /* create HYPRE_IJMatrix */ 1178 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 1179 1180 /* set ParCSR object */ 1181 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1182 hypre_IJMatrixObject(hA->ij) = parcsr; 1183 T->preallocated = PETSC_TRUE; 1184 1185 /* set assembled flag */ 1186 hypre_IJMatrixAssembleFlag(hA->ij) = 1; 1187 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1188 if (ishyp) { 1189 PetscMPIInt myid = 0; 1190 1191 /* make sure we always have row_starts and col_starts available */ 1192 if (HYPRE_AssumedPartitionCheck()) { 1193 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 1194 } 1195 if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) { 1196 PetscLayout map; 1197 1198 ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr); 1199 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1200 hypre_ParCSRMatrixColStarts(parcsr) = map->range + myid; 1201 } 1202 if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) { 1203 PetscLayout map; 1204 1205 ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr); 1206 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1207 hypre_ParCSRMatrixRowStarts(parcsr) = map->range + myid; 1208 } 1209 /* prevent from freeing the pointer */ 1210 if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE; 1211 *A = T; 1212 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1213 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1214 } else if (isaij) { 1215 if (copymode != PETSC_OWN_POINTER) { 1216 /* prevent from freeing the pointer */ 1217 hA->inner_free = PETSC_FALSE; 1218 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1219 ierr = MatDestroy(&T);CHKERRQ(ierr); 1220 } else { /* AIJ return type with PETSC_OWN_POINTER */ 1221 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr); 1222 *A = T; 1223 } 1224 } else if (isis) { 1225 ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1226 if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE; 1227 ierr = MatDestroy(&T);CHKERRQ(ierr); 1228 } 1229 PetscFunctionReturn(0); 1230 } 1231 1232 #undef __FUNCT__ 1233 #define __FUNCT__ "MatHYPREGetParCSR_HYPRE" 1234 PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr) 1235 { 1236 Mat_HYPRE* hA = (Mat_HYPRE*)A->data; 1237 HYPRE_Int type; 1238 PetscErrorCode ierr; 1239 1240 PetscFunctionBegin; 1241 if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present"); 1242 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 1243 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR"); 1244 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr)); 1245 PetscFunctionReturn(0); 1246 } 1247 1248 /* 1249 MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix 1250 1251 Not collective 1252 1253 Input Parameters: 1254 + A - the MATHYPRE object 1255 1256 Output Parameter: 1257 . parcsr - the pointer to the hypre_ParCSRMatrix 1258 1259 Level: intermediate 1260 1261 .seealso: MatHYPRE, PetscCopyMode 1262 */ 1263 #undef __FUNCT__ 1264 #define __FUNCT__ "MatHYPREGetParCSR" 1265 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr) 1266 { 1267 PetscErrorCode ierr; 1268 1269 PetscFunctionBegin; 1270 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1271 PetscValidType(A,1); 1272 ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr); 1273 PetscFunctionReturn(0); 1274 } 1275 1276 #undef __FUNCT__ 1277 #define __FUNCT__ "MatCreate_HYPRE" 1278 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B) 1279 { 1280 Mat_HYPRE *hB; 1281 PetscErrorCode ierr; 1282 1283 PetscFunctionBegin; 1284 ierr = PetscNewLog(B,&hB);CHKERRQ(ierr); 1285 hB->inner_free = PETSC_TRUE; 1286 1287 B->data = (void*)hB; 1288 B->rmap->bs = 1; 1289 B->assembled = PETSC_FALSE; 1290 1291 B->ops->mult = MatMult_HYPRE; 1292 B->ops->multtranspose = MatMultTranspose_HYPRE; 1293 B->ops->setup = MatSetUp_HYPRE; 1294 B->ops->destroy = MatDestroy_HYPRE; 1295 B->ops->assemblyend = MatAssemblyEnd_HYPRE; 1296 B->ops->ptap = MatPtAP_HYPRE_HYPRE; 1297 B->ops->setvalues = MatSetValues_HYPRE; 1298 1299 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr); 1300 ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr); 1301 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr); 1302 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr); 1303 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 1304 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 1305 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr); 1306 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr); 1307 PetscFunctionReturn(0); 1308 } 1309 1310 #undef __FUNCT__ 1311 #define __FUNCT__ "hypre_array_destroy" 1312 static PetscErrorCode hypre_array_destroy(void *ptr) 1313 { 1314 PetscFunctionBegin; 1315 hypre_TFree(ptr); 1316 PetscFunctionReturn(0); 1317 } 1318 1319 #if 0 1320 /* 1321 Does NOT copy the data over, instead uses DIRECTLY the pointers from the PETSc MPIAIJ format 1322 1323 This is UNFINISHED and does NOT work! The problem is that hypre puts the diagonal entry first 1324 which will corrupt the PETSc data structure if we did this. Need a work around to this problem. 1325 */ 1326 #include <_hypre_IJ_mv.h> 1327 #include <HYPRE_IJ_mv.h> 1328 1329 #undef __FUNCT__ 1330 #define __FUNCT__ "MatHYPRE_IJMatrixLink" 1331 PetscErrorCode MatHYPRE_IJMatrixLink(Mat A, HYPRE_IJMatrix *ij) 1332 { 1333 PetscErrorCode ierr; 1334 PetscInt rstart,rend,cstart,cend; 1335 PetscBool flg; 1336 hypre_AuxParCSRMatrix *aux_matrix; 1337 1338 PetscFunctionBegin; 1339 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1340 PetscValidType(A,1); 1341 PetscValidPointer(ij,2); 1342 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 1343 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Can only use with PETSc MPIAIJ matrices"); 1344 ierr = MatSetUp(A);CHKERRQ(ierr); 1345 1346 ierr = PetscLogEventBegin(MAT_Convert,A,0,0,0);CHKERRQ(ierr); 1347 rstart = A->rmap->rstart; 1348 rend = A->rmap->rend; 1349 cstart = A->cmap->rstart; 1350 cend = A->cmap->rend; 1351 PetscStackCallStandard(HYPRE_IJMatrixCreate,(PetscObjectComm((PetscObject)A),rstart,rend-1,cstart,cend-1,ij)); 1352 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(*ij,HYPRE_PARCSR)); 1353 1354 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(*ij)); 1355 PetscStackCall("hypre_IJMatrixTranslator",aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(*ij)); 1356 1357 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 1358 1359 /* this is the Hack part where we monkey directly with the hypre datastructures */ 1360 1361 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(*ij)); 1362 ierr = PetscLogEventEnd(MAT_Convert,A,0,0,0);CHKERRQ(ierr); 1363 PetscFunctionReturn(0); 1364 } 1365 #endif 1366