1 2 /* 3 Creates hypre ijmatrix from PETSc matrix 4 */ 5 6 /*MC 7 MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices 8 based on the hypre IJ interface. 9 10 Level: intermediate 11 12 .seealso: MatCreate() 13 M*/ 14 15 #include <petscmathypre.h> 16 #include <petsc/private/matimpl.h> 17 #include <../src/mat/impls/hypre/mhypre.h> 18 #include <../src/mat/impls/aij/mpi/mpiaij.h> 19 #include <../src/vec/vec/impls/hypre/vhyp.h> 20 #include <HYPRE.h> 21 #include <HYPRE_utilities.h> 22 #include <_hypre_parcsr_ls.h> 23 24 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*); 25 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix); 26 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix); 27 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix); 28 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool); 29 static PetscErrorCode hypre_array_destroy(void*); 30 31 #undef __FUNCT__ 32 #define __FUNCT__ "MatHYPRE_IJMatrixPreallocate" 33 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij) 34 { 35 PetscErrorCode ierr; 36 PetscInt i,n_d,n_o; 37 const PetscInt *ia_d,*ia_o; 38 PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE; 39 PetscInt *nnz_d=NULL,*nnz_o=NULL; 40 41 PetscFunctionBegin; 42 if (A_d) { /* determine number of nonzero entries in local diagonal part */ 43 ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr); 44 if (done_d) { 45 ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr); 46 for (i=0; i<n_d; i++) { 47 nnz_d[i] = ia_d[i+1] - ia_d[i]; 48 } 49 } 50 ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr); 51 } 52 if (A_o) { /* determine number of nonzero entries in local off-diagonal part */ 53 ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 54 if (done_o) { 55 ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr); 56 for (i=0; i<n_o; i++) { 57 nnz_o[i] = ia_o[i+1] - ia_o[i]; 58 } 59 } 60 ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 61 } 62 if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */ 63 if (!done_o) { /* only diagonal part */ 64 ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr); 65 for (i=0; i<n_d; i++) { 66 nnz_o[i] = 0; 67 } 68 } 69 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o)); 70 ierr = PetscFree(nnz_d);CHKERRQ(ierr); 71 ierr = PetscFree(nnz_o);CHKERRQ(ierr); 72 } 73 PetscFunctionReturn(0); 74 } 75 76 #undef __FUNCT__ 77 #define __FUNCT__ "MatHYPRE_CreateFromMat" 78 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA) 79 { 80 PetscErrorCode ierr; 81 PetscInt rstart,rend,cstart,cend; 82 83 PetscFunctionBegin; 84 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 85 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 86 rstart = A->rmap->rstart; 87 rend = A->rmap->rend; 88 cstart = A->cmap->rstart; 89 cend = A->cmap->rend; 90 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 91 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 92 { 93 PetscBool same; 94 Mat A_d,A_o; 95 const PetscInt *colmap; 96 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr); 97 if (same) { 98 ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 99 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 100 PetscFunctionReturn(0); 101 } 102 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr); 103 if (same) { 104 ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 105 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 106 PetscFunctionReturn(0); 107 } 108 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr); 109 if (same) { 110 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 111 PetscFunctionReturn(0); 112 } 113 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr); 114 if (same) { 115 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 116 PetscFunctionReturn(0); 117 } 118 } 119 PetscFunctionReturn(0); 120 } 121 122 #undef __FUNCT__ 123 #define __FUNCT__ "MatHYPRE_IJMatrixCopy" 124 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij) 125 { 126 PetscErrorCode ierr; 127 PetscInt i,rstart,rend,ncols,nr,nc; 128 const PetscScalar *values; 129 const PetscInt *cols; 130 PetscBool flg; 131 132 PetscFunctionBegin; 133 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 134 ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr); 135 if (flg && nr == nc) { 136 ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr); 137 PetscFunctionReturn(0); 138 } 139 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 140 if (flg) { 141 ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr); 142 PetscFunctionReturn(0); 143 } 144 145 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 146 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 147 for (i=rstart; i<rend; i++) { 148 ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 149 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values)); 150 ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 151 } 152 PetscFunctionReturn(0); 153 } 154 155 #undef __FUNCT__ 156 #define __FUNCT__ "MatHYPRE_IJMatrixFastCopy_SeqAIJ" 157 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij) 158 { 159 PetscErrorCode ierr; 160 Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data; 161 HYPRE_Int type; 162 hypre_ParCSRMatrix *par_matrix; 163 hypre_AuxParCSRMatrix *aux_matrix; 164 hypre_CSRMatrix *hdiag; 165 166 PetscFunctionBegin; 167 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 168 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 169 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 170 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 171 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 172 /* 173 this is the Hack part where we monkey directly with the hypre datastructures 174 */ 175 ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt)); 176 ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt)); 177 ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar)); 178 179 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 180 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 181 PetscFunctionReturn(0); 182 } 183 184 #undef __FUNCT__ 185 #define __FUNCT__ "MatHYPRE_IJMatrixFastCopy_MPIAIJ" 186 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij) 187 { 188 PetscErrorCode ierr; 189 Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data; 190 Mat_SeqAIJ *pdiag,*poffd; 191 PetscInt i,*garray = pA->garray,*jj,cstart,*pjj; 192 HYPRE_Int type; 193 hypre_ParCSRMatrix *par_matrix; 194 hypre_AuxParCSRMatrix *aux_matrix; 195 hypre_CSRMatrix *hdiag,*hoffd; 196 197 PetscFunctionBegin; 198 pdiag = (Mat_SeqAIJ*) pA->A->data; 199 poffd = (Mat_SeqAIJ*) pA->B->data; 200 /* cstart is only valid for square MPIAIJ layed out in the usual way */ 201 ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr); 202 203 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 204 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 205 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 206 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 207 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 208 hoffd = hypre_ParCSRMatrixOffd(par_matrix); 209 210 /* 211 this is the Hack part where we monkey directly with the hypre datastructures 212 */ 213 ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt)); 214 /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */ 215 jj = (PetscInt*)hdiag->j; 216 pjj = (PetscInt*)pdiag->j; 217 for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i]; 218 ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar)); 219 ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt)); 220 /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this 221 If we hacked a hypre a bit more we might be able to avoid this step */ 222 jj = (PetscInt*) hoffd->j; 223 pjj = (PetscInt*) poffd->j; 224 for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]]; 225 ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar)); 226 227 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 228 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 229 PetscFunctionReturn(0); 230 } 231 232 #undef __FUNCT__ 233 #define __FUNCT__ "MatConvert_HYPRE_IS" 234 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B) 235 { 236 Mat_HYPRE* mhA = (Mat_HYPRE*)(A->data); 237 Mat lA; 238 ISLocalToGlobalMapping rl2g,cl2g; 239 IS is; 240 hypre_ParCSRMatrix *hA; 241 hypre_CSRMatrix *hdiag,*hoffd; 242 MPI_Comm comm; 243 PetscScalar *hdd,*hod,*aa,*data; 244 HYPRE_Int *col_map_offd,*hdi,*hdj,*hoi,*hoj; 245 PetscInt *ii,*jj,*iptr,*jptr; 246 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N; 247 HYPRE_Int type; 248 PetscErrorCode ierr; 249 250 PetscFunctionBegin; 251 comm = PetscObjectComm((PetscObject)A); 252 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type)); 253 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 254 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA)); 255 M = hypre_ParCSRMatrixGlobalNumRows(hA); 256 N = hypre_ParCSRMatrixGlobalNumCols(hA); 257 str = hypre_ParCSRMatrixFirstRowIndex(hA); 258 stc = hypre_ParCSRMatrixFirstColDiag(hA); 259 hdiag = hypre_ParCSRMatrixDiag(hA); 260 hoffd = hypre_ParCSRMatrixOffd(hA); 261 dr = hypre_CSRMatrixNumRows(hdiag); 262 dc = hypre_CSRMatrixNumCols(hdiag); 263 nnz = hypre_CSRMatrixNumNonzeros(hdiag); 264 hdi = hypre_CSRMatrixI(hdiag); 265 hdj = hypre_CSRMatrixJ(hdiag); 266 hdd = hypre_CSRMatrixData(hdiag); 267 oc = hypre_CSRMatrixNumCols(hoffd); 268 nnz += hypre_CSRMatrixNumNonzeros(hoffd); 269 hoi = hypre_CSRMatrixI(hoffd); 270 hoj = hypre_CSRMatrixJ(hoffd); 271 hod = hypre_CSRMatrixData(hoffd); 272 if (reuse != MAT_REUSE_MATRIX) { 273 PetscInt *aux; 274 275 /* generate l2g maps for rows and cols */ 276 ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr); 277 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 278 ierr = ISDestroy(&is);CHKERRQ(ierr); 279 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA); 280 ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr); 281 for (i=0; i<dc; i++) aux[i] = i+stc; 282 for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i]; 283 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 284 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 285 ierr = ISDestroy(&is);CHKERRQ(ierr); 286 /* create MATIS object */ 287 ierr = MatCreate(comm,B);CHKERRQ(ierr); 288 ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr); 289 ierr = MatSetType(*B,MATIS);CHKERRQ(ierr); 290 ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr); 291 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 292 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 293 294 /* allocate CSR for local matrix */ 295 ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr); 296 ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr); 297 ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr); 298 } else { 299 PetscInt nr; 300 PetscBool done; 301 ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr); 302 ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr); 303 if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr); 304 if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz); 305 ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr); 306 } 307 /* merge local matrices */ 308 ii = iptr; 309 jj = jptr; 310 aa = data; 311 *ii = *(hdi++) + *(hoi++); 312 for (jd=0,jo=0,cum=0; *ii<nnz; cum++) { 313 PetscScalar *aold = aa; 314 PetscInt *jold = jj,nc = jd+jo; 315 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; } 316 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; } 317 *(++ii) = *(hdi++) + *(hoi++); 318 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr); 319 } 320 for (; cum<dr; cum++) *(++ii) = nnz; 321 if (reuse != MAT_REUSE_MATRIX) { 322 Mat_SeqAIJ* a; 323 324 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr); 325 ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr); 326 /* hack SeqAIJ */ 327 a = (Mat_SeqAIJ*)(lA->data); 328 a->free_a = PETSC_TRUE; 329 a->free_ij = PETSC_TRUE; 330 ierr = MatDestroy(&lA);CHKERRQ(ierr); 331 } 332 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 333 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 334 if (reuse == MAT_INPLACE_MATRIX) { 335 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 336 } 337 PetscFunctionReturn(0); 338 } 339 340 #undef __FUNCT__ 341 #define __FUNCT__ "MatConvert_AIJ_HYPRE" 342 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B) 343 { 344 Mat_HYPRE *hB; 345 MPI_Comm comm = PetscObjectComm((PetscObject)A); 346 PetscErrorCode ierr; 347 348 PetscFunctionBegin; 349 if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_INPLACE_MATRIX"); 350 if (reuse == MAT_REUSE_MATRIX) { 351 /* always destroy the old matrix and create a new memory; 352 hope this does not churn the memory too much. The problem 353 is I do not know if it is possible to put the matrix back to 354 its initial state so that we can directly copy the values 355 the second time through. */ 356 hB = (Mat_HYPRE*)((*B)->data); 357 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij)); 358 } else { 359 ierr = MatCreate(comm,B);CHKERRQ(ierr); 360 ierr = MatSetType(*B,MATHYPRE);CHKERRQ(ierr); 361 ierr = MatSetSizes(*B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 362 hB = (Mat_HYPRE*)((*B)->data); 363 } 364 ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr); 365 ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr); 366 (*B)->preallocated = PETSC_TRUE; 367 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 368 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 369 PetscFunctionReturn(0); 370 } 371 372 #undef __FUNCT__ 373 #define __FUNCT__ "MatConvert_HYPRE_AIJ" 374 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B) 375 { 376 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 377 hypre_ParCSRMatrix *parcsr; 378 hypre_CSRMatrix *hdiag,*hoffd; 379 MPI_Comm comm; 380 PetscScalar *da,*oa,*aptr; 381 PetscInt *dii,*djj,*oii,*ojj,*iptr; 382 PetscInt i,dnnz,onnz,m,n; 383 HYPRE_Int type; 384 PetscMPIInt size; 385 PetscErrorCode ierr; 386 387 PetscFunctionBegin; 388 comm = PetscObjectComm((PetscObject)A); 389 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 390 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 391 if (reuse == MAT_REUSE_MATRIX) { 392 PetscBool ismpiaij,isseqaij; 393 ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 394 ierr = PetscObjectTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 395 if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported"); 396 } 397 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 398 399 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 400 hdiag = hypre_ParCSRMatrixDiag(parcsr); 401 hoffd = hypre_ParCSRMatrixOffd(parcsr); 402 m = hypre_CSRMatrixNumRows(hdiag); 403 n = hypre_CSRMatrixNumCols(hdiag); 404 dnnz = hypre_CSRMatrixNumNonzeros(hdiag); 405 onnz = hypre_CSRMatrixNumNonzeros(hoffd); 406 if (reuse == MAT_INITIAL_MATRIX) { 407 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 408 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 409 ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr); 410 } else if (reuse == MAT_REUSE_MATRIX) { 411 PetscInt nr; 412 PetscBool done; 413 if (size > 1) { 414 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 415 416 ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 417 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m); 418 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz); 419 ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr); 420 } else { 421 ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 422 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m); 423 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz); 424 ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr); 425 } 426 } else { /* MAT_INPLACE_MATRIX */ 427 dii = (PetscInt*)hypre_CSRMatrixI(hdiag); 428 djj = (PetscInt*)hypre_CSRMatrixJ(hdiag); 429 da = hypre_CSRMatrixData(hdiag); 430 } 431 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr); 432 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr); 433 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr); 434 iptr = djj; 435 aptr = da; 436 for (i=0; i<m; i++) { 437 PetscInt nc = dii[i+1]-dii[i]; 438 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 439 iptr += nc; 440 aptr += nc; 441 } 442 if (size > 1) { 443 HYPRE_Int *offdj,*coffd; 444 445 if (reuse == MAT_INITIAL_MATRIX) { 446 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 447 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 448 ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr); 449 } else if (reuse == MAT_REUSE_MATRIX) { 450 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 451 PetscInt nr,hr = hypre_CSRMatrixNumRows(hoffd); 452 PetscBool done; 453 454 ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr); 455 if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr); 456 if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz); 457 ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr); 458 } else { /* MAT_INPLACE_MATRIX */ 459 oii = (PetscInt*)hypre_CSRMatrixI(hoffd); 460 ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd); 461 oa = hypre_CSRMatrixData(hoffd); 462 } 463 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr); 464 offdj = hypre_CSRMatrixJ(hoffd); 465 coffd = hypre_ParCSRMatrixColMapOffd(parcsr); 466 for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]]; 467 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr); 468 iptr = ojj; 469 aptr = oa; 470 for (i=0; i<m; i++) { 471 PetscInt nc = oii[i+1]-oii[i]; 472 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 473 iptr += nc; 474 aptr += nc; 475 } 476 if (reuse == MAT_INITIAL_MATRIX) { 477 Mat_MPIAIJ *b; 478 Mat_SeqAIJ *d,*o; 479 480 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr); 481 /* hack MPIAIJ */ 482 b = (Mat_MPIAIJ*)((*B)->data); 483 d = (Mat_SeqAIJ*)b->A->data; 484 o = (Mat_SeqAIJ*)b->B->data; 485 d->free_a = PETSC_TRUE; 486 d->free_ij = PETSC_TRUE; 487 o->free_a = PETSC_TRUE; 488 o->free_ij = PETSC_TRUE; 489 } else if (reuse == MAT_INPLACE_MATRIX) { 490 Mat T; 491 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr); 492 hypre_CSRMatrixI(hdiag) = NULL; 493 hypre_CSRMatrixJ(hdiag) = NULL; 494 hypre_CSRMatrixData(hdiag) = NULL; 495 hypre_CSRMatrixI(hoffd) = NULL; 496 hypre_CSRMatrixJ(hoffd) = NULL; 497 hypre_CSRMatrixData(hoffd) = NULL; 498 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 499 } 500 } else { 501 oii = NULL; 502 ojj = NULL; 503 oa = NULL; 504 if (reuse == MAT_INITIAL_MATRIX) { 505 Mat_SeqAIJ* b; 506 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr); 507 /* hack SeqAIJ */ 508 b = (Mat_SeqAIJ*)((*B)->data); 509 b->free_a = PETSC_TRUE; 510 b->free_ij = PETSC_TRUE; 511 } else if (reuse == MAT_INPLACE_MATRIX) { 512 Mat T; 513 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr); 514 hypre_CSRMatrixI(hdiag) = NULL; 515 hypre_CSRMatrixJ(hdiag) = NULL; 516 hypre_CSRMatrixData(hdiag) = NULL; 517 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 518 } 519 } 520 521 /* we have to use hypre_Tfree to free the arrays */ 522 if (reuse == MAT_INPLACE_MATRIX) { 523 void *ptrs[6] = {dii,djj,da,oii,ojj,oa}; 524 const char *names[6] = {"_hypre_csr_dii", 525 "_hypre_csr_djj", 526 "_hypre_csr_da", 527 "_hypre_csr_oii", 528 "_hypre_csr_ojj", 529 "_hypre_csr_oa"}; 530 for (i=0; i<6; i++) { 531 PetscContainer c; 532 533 ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr); 534 ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr); 535 ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr); 536 ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr); 537 ierr = PetscContainerDestroy(&c);CHKERRQ(ierr); 538 } 539 } 540 PetscFunctionReturn(0); 541 } 542 543 #undef __FUNCT__ 544 #define __FUNCT__ "MatAIJGetParCSR_Private" 545 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 546 { 547 hypre_ParCSRMatrix *tA; 548 hypre_CSRMatrix *hdiag,*hoffd; 549 Mat_SeqAIJ *diag,*offd; 550 PetscInt *garray,noffd,dnnz,onnz,*row_starts,*col_starts; 551 MPI_Comm comm = PetscObjectComm((PetscObject)A); 552 PetscBool ismpiaij,isseqaij; 553 PetscErrorCode ierr; 554 555 PetscFunctionBegin; 556 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 557 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 558 if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type); 559 if (ismpiaij) { 560 Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data); 561 562 diag = (Mat_SeqAIJ*)a->A->data; 563 offd = (Mat_SeqAIJ*)a->B->data; 564 garray = a->garray; 565 noffd = a->B->cmap->N; 566 dnnz = diag->nz; 567 onnz = offd->nz; 568 } else { 569 diag = (Mat_SeqAIJ*)A->data; 570 offd = NULL; 571 garray = NULL; 572 noffd = 0; 573 dnnz = diag->nz; 574 onnz = 0; 575 } 576 577 /* create a temporary ParCSR */ 578 if (HYPRE_AssumedPartitionCheck()) { 579 PetscMPIInt myid; 580 581 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 582 row_starts = A->rmap->range + myid; 583 col_starts = A->cmap->range + myid; 584 } else { 585 row_starts = A->rmap->range; 586 col_starts = A->cmap->range; 587 } 588 tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz); 589 hypre_ParCSRMatrixSetRowStartsOwner(tA,0); 590 hypre_ParCSRMatrixSetColStartsOwner(tA,0); 591 592 /* set diagonal part */ 593 hdiag = hypre_ParCSRMatrixDiag(tA); 594 hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i; 595 hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j; 596 hypre_CSRMatrixData(hdiag) = diag->a; 597 hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz; 598 hypre_CSRMatrixSetRownnz(hdiag); 599 hypre_CSRMatrixSetDataOwner(hdiag,0); 600 601 /* set offdiagonal part */ 602 hoffd = hypre_ParCSRMatrixOffd(tA); 603 if (offd) { 604 hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i; 605 hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j; 606 hypre_CSRMatrixData(hoffd) = offd->a; 607 hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz; 608 hypre_CSRMatrixSetRownnz(hoffd); 609 hypre_CSRMatrixSetDataOwner(hoffd,0); 610 hypre_ParCSRMatrixSetNumNonzeros(tA); 611 hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray; 612 } 613 *hA = tA; 614 PetscFunctionReturn(0); 615 } 616 617 #undef __FUNCT__ 618 #define __FUNCT__ "MatAIJRestoreParCSR_Private" 619 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 620 { 621 hypre_CSRMatrix *hdiag,*hoffd; 622 623 PetscFunctionBegin; 624 hdiag = hypre_ParCSRMatrixDiag(*hA); 625 hoffd = hypre_ParCSRMatrixOffd(*hA); 626 /* set pointers to NULL before destroying tA */ 627 hypre_CSRMatrixI(hdiag) = NULL; 628 hypre_CSRMatrixJ(hdiag) = NULL; 629 hypre_CSRMatrixData(hdiag) = NULL; 630 hypre_CSRMatrixI(hoffd) = NULL; 631 hypre_CSRMatrixJ(hoffd) = NULL; 632 hypre_CSRMatrixData(hoffd) = NULL; 633 hypre_ParCSRMatrixColMapOffd(*hA) = NULL; 634 hypre_ParCSRMatrixDestroy(*hA); 635 *hA = NULL; 636 PetscFunctionReturn(0); 637 } 638 639 /* calls RAP from BoomerAMG: 640 the resulting ParCSR will not own the column and row starts */ 641 #undef __FUNCT__ 642 #define __FUNCT__ "MatHYPRE_ParCSR_RAP" 643 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA, 644 hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP) 645 { 646 PetscErrorCode ierr; 647 HYPRE_Int P_owns_col_starts,R_owns_row_starts; 648 649 PetscFunctionBegin; 650 P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP); 651 R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR); 652 PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP)); 653 PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP)); 654 /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */ 655 hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0); 656 hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0); 657 if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1); 658 if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1); 659 PetscFunctionReturn(0); 660 } 661 662 #undef __FUNCT__ 663 #define __FUNCT__ "MatPtAPNumeric_AIJ_AIJ_wHYPRE" 664 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C) 665 { 666 Mat B; 667 hypre_ParCSRMatrix *hA,*hP,*hPtAP; 668 PetscErrorCode ierr; 669 670 PetscFunctionBegin; 671 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 672 ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr); 673 ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr); 674 ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 675 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 676 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 677 ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr); 678 PetscFunctionReturn(0); 679 } 680 681 #undef __FUNCT__ 682 #define __FUNCT__ "MatPtAPSymbolic_AIJ_AIJ_wHYPRE" 683 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C) 684 { 685 PetscErrorCode ierr; 686 PetscFunctionBegin; 687 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 688 ierr = MatSetType(*C,MATAIJ);CHKERRQ(ierr); 689 (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE; 690 PetscFunctionReturn(0); 691 } 692 693 #undef __FUNCT__ 694 #define __FUNCT__ "MatPtAPNumeric_AIJ_HYPRE" 695 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C) 696 { 697 Mat B; 698 Mat_HYPRE *hP; 699 hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr; 700 HYPRE_Int type; 701 MPI_Comm comm = PetscObjectComm((PetscObject)A); 702 PetscBool ishypre; 703 PetscErrorCode ierr; 704 705 PetscFunctionBegin; 706 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 707 if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 708 hP = (Mat_HYPRE*)P->data; 709 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 710 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 711 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 712 713 /* It looks like we don't need to have the diagonal entries 714 ordered first in the rows of the diagonal part 715 for boomerAMGBuildCoarseOperator to work */ 716 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 717 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 718 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 719 720 /* create temporary matrix and merge to C */ 721 ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 722 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 723 PetscFunctionReturn(0); 724 } 725 726 #undef __FUNCT__ 727 #define __FUNCT__ "MatPtAP_AIJ_HYPRE" 728 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 729 { 730 PetscErrorCode ierr; 731 732 PetscFunctionBegin; 733 if (scall == MAT_INITIAL_MATRIX) { 734 const char *deft = MATAIJ; 735 char type[256]; 736 PetscBool flg; 737 738 ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr); 739 ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr); 740 ierr = PetscOptionsEnd();CHKERRQ(ierr); 741 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 742 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 743 if (flg) { 744 ierr = MatSetType(*C,type);CHKERRQ(ierr); 745 } else { 746 ierr = MatSetType(*C,deft);CHKERRQ(ierr); 747 } 748 (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE; 749 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 750 } 751 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 752 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 753 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 754 PetscFunctionReturn(0); 755 } 756 757 #undef __FUNCT__ 758 #define __FUNCT__ "MatPtAPNumeric_HYPRE_HYPRE" 759 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C) 760 { 761 Mat B; 762 hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr; 763 Mat_HYPRE *hA,*hP; 764 PetscBool ishypre; 765 HYPRE_Int type; 766 PetscErrorCode ierr; 767 768 PetscFunctionBegin; 769 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 770 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 771 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 772 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 773 hA = (Mat_HYPRE*)A->data; 774 hP = (Mat_HYPRE*)P->data; 775 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 776 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 777 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 778 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 779 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 780 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 781 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 782 ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 783 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 784 PetscFunctionReturn(0); 785 } 786 787 #undef __FUNCT__ 788 #define __FUNCT__ "MatPtAP_HYPRE_HYPRE" 789 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 790 { 791 PetscErrorCode ierr; 792 793 PetscFunctionBegin; 794 if (scall == MAT_INITIAL_MATRIX) { 795 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 796 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 797 ierr = MatSetType(*C,MATHYPRE);CHKERRQ(ierr); 798 (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE; 799 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 800 } 801 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 802 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 803 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 804 PetscFunctionReturn(0); 805 } 806 807 #undef __FUNCT__ 808 #define __FUNCT__ "MatMatMultNumeric_AIJ_AIJ_wHYPRE" 809 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C) 810 { 811 Mat D; 812 hypre_ParCSRMatrix *hA,*hB,*hAB; 813 PetscErrorCode ierr; 814 815 PetscFunctionBegin; 816 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 817 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 818 PetscStackPush("hypre_ParMatmul"); 819 hAB = hypre_ParMatmul(hA,hB); 820 PetscStackPop; 821 ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 822 ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr); 823 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 824 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 825 PetscFunctionReturn(0); 826 } 827 828 #undef __FUNCT__ 829 #define __FUNCT__ "MatMatMultSymbolic_AIJ_AIJ_wHYPRE" 830 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C) 831 { 832 PetscErrorCode ierr; 833 834 PetscFunctionBegin; 835 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 836 ierr = MatSetType(*C,MATAIJ);CHKERRQ(ierr); 837 (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE; 838 PetscFunctionReturn(0); 839 } 840 841 #undef __FUNCT__ 842 #define __FUNCT__ "MatMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE" 843 static PetscErrorCode MatMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D) 844 { 845 Mat E; 846 hypre_ParCSRMatrix *hA,*hB,*hC,*hABC; 847 PetscErrorCode ierr; 848 849 PetscFunctionBegin; 850 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 851 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 852 ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr); 853 ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr); 854 ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr); 855 ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr); 856 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 857 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 858 ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr); 859 PetscFunctionReturn(0); 860 } 861 862 #undef __FUNCT__ 863 #define __FUNCT__ "MatMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE" 864 PETSC_INTERN PetscErrorCode MatMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D) 865 { 866 PetscErrorCode ierr; 867 868 PetscFunctionBegin; 869 ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr); 870 ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr); 871 (*D)->ops->matmatmultnumeric = MatMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE; 872 PetscFunctionReturn(0); 873 } 874 875 #undef __FUNCT__ 876 #define __FUNCT__ "MatMultTranspose_HYPRE" 877 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y) 878 { 879 PetscErrorCode ierr; 880 881 PetscFunctionBegin; 882 ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);CHKERRQ(ierr); 883 PetscFunctionReturn(0); 884 } 885 886 #undef __FUNCT__ 887 #define __FUNCT__ "MatMult_HYPRE" 888 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y) 889 { 890 PetscErrorCode ierr; 891 892 PetscFunctionBegin; 893 ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);CHKERRQ(ierr); 894 PetscFunctionReturn(0); 895 } 896 897 #undef __FUNCT__ 898 #define __FUNCT__ "MatHYPRE_MultKernel_Private" 899 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans) 900 { 901 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 902 hypre_ParCSRMatrix *parcsr; 903 hypre_ParVector *hx,*hy; 904 PetscScalar *ax,*ay,*sax,*say; 905 PetscErrorCode ierr; 906 907 PetscFunctionBegin; 908 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 909 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx)); 910 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy)); 911 ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 912 ierr = VecGetArray(y,&ay);CHKERRQ(ierr); 913 if (trans) { 914 VecHYPRE_ParVectorReplacePointer(hA->x,ay,say); 915 VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax); 916 hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx); 917 VecHYPRE_ParVectorReplacePointer(hA->x,say,ay); 918 VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax); 919 } else { 920 VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax); 921 VecHYPRE_ParVectorReplacePointer(hA->b,ay,say); 922 hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy); 923 VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax); 924 VecHYPRE_ParVectorReplacePointer(hA->b,say,ay); 925 } 926 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 927 ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr); 928 PetscFunctionReturn(0); 929 } 930 931 #undef __FUNCT__ 932 #define __FUNCT__ "MatDestroy_HYPRE" 933 static PetscErrorCode MatDestroy_HYPRE(Mat A) 934 { 935 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 936 PetscErrorCode ierr; 937 938 PetscFunctionBegin; 939 if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x)); 940 if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b)); 941 if (hA->ij) { 942 if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL; 943 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij)); 944 } 945 if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); } 946 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr); 947 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr); 948 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr); 949 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr); 950 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr); 951 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr); 952 ierr = PetscFree(A->data);CHKERRQ(ierr); 953 PetscFunctionReturn(0); 954 } 955 956 #undef __FUNCT__ 957 #define __FUNCT__ "MatSetUp_HYPRE" 958 static PetscErrorCode MatSetUp_HYPRE(Mat A) 959 { 960 PetscErrorCode ierr; 961 962 PetscFunctionBegin; 963 ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr); 964 PetscFunctionReturn(0); 965 } 966 967 #undef __FUNCT__ 968 #define __FUNCT__ "MatAssemblyEnd_HYPRE" 969 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode) 970 { 971 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 972 Vec x,b; 973 PetscErrorCode ierr; 974 975 PetscFunctionBegin; 976 if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE"); 977 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij)); 978 if (hA->x) PetscFunctionReturn(0); 979 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 980 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 981 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr); 982 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr); 983 ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr); 984 ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr); 985 ierr = VecDestroy(&x);CHKERRQ(ierr); 986 ierr = VecDestroy(&b);CHKERRQ(ierr); 987 PetscFunctionReturn(0); 988 } 989 990 #define MATHYPRE_SCRATCH 2048 991 992 #undef __FUNCT__ 993 #define __FUNCT__ "MatSetValues_HYPRE" 994 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins) 995 { 996 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 997 PetscScalar *vals = (PetscScalar *)v; 998 PetscScalar sscr[MATHYPRE_SCRATCH]; 999 HYPRE_Int cscr[2][MATHYPRE_SCRATCH]; 1000 HYPRE_Int i,nzc; 1001 PetscErrorCode ierr; 1002 1003 PetscFunctionBegin; 1004 for (i=0,nzc=0;i<nc;i++) { 1005 if (cols[i] >= 0) { 1006 cscr[0][nzc ] = cols[i]; 1007 cscr[1][nzc++] = i; 1008 } 1009 } 1010 if (!nzc) PetscFunctionReturn(0); 1011 1012 if (ins == ADD_VALUES) { 1013 for (i=0;i<nr;i++) { 1014 if (rows[i] >= 0) { 1015 PetscInt j; 1016 for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]]; 1017 PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr)); 1018 } 1019 vals += nc; 1020 } 1021 } else { /* INSERT_VALUES */ 1022 #if defined(PETSC_USE_DEBUG) 1023 /* Insert values cannot be used to insert offproc entries */ 1024 PetscInt rst,ren; 1025 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1026 for (i=0;i<nr;i++) 1027 if (rows[i] >= 0 && (rows[i] < rst || rows[i] >= ren)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use INSERT_VALUES for off-proc entries with MatHYPRE. Use ADD_VALUES instead"); 1028 #endif 1029 for (i=0;i<nr;i++) { 1030 if (rows[i] >= 0) { 1031 PetscInt j; 1032 for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]]; 1033 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr)); 1034 } 1035 vals += nc; 1036 } 1037 } 1038 PetscFunctionReturn(0); 1039 } 1040 1041 #undef __FUNCT__ 1042 #define __FUNCT__ "MatHYPRESetPreallocation_HYPRE" 1043 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1044 { 1045 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1046 HYPRE_Int *hdnnz,*honnz; 1047 PetscInt i,rs,re,cs,ce,bs; 1048 PetscMPIInt size; 1049 PetscErrorCode ierr; 1050 1051 PetscFunctionBegin; 1052 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1053 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1054 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1055 rs = A->rmap->rstart; 1056 re = A->rmap->rend; 1057 cs = A->cmap->rstart; 1058 ce = A->cmap->rend; 1059 if (!hA->ij) { 1060 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij)); 1061 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1062 } else { 1063 HYPRE_Int hrs,hre,hcs,hce; 1064 PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce)); 1065 if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re); 1066 if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce); 1067 } 1068 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1069 1070 if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs; 1071 if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs; 1072 1073 if (!dnnz) { 1074 ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr); 1075 for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz; 1076 } else { 1077 hdnnz = (HYPRE_Int*)dnnz; 1078 } 1079 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 1080 if (size > 1) { 1081 if (!onnz) { 1082 ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr); 1083 for (i=0;i<A->rmap->n;i++) honnz[i] = onz; 1084 } else { 1085 honnz = (HYPRE_Int*)onnz; 1086 } 1087 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz)); 1088 } else { 1089 honnz = NULL; 1090 PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz)); 1091 } 1092 if (!dnnz) { 1093 ierr = PetscFree(hdnnz);CHKERRQ(ierr); 1094 } 1095 if (!onnz && honnz) { 1096 ierr = PetscFree(honnz);CHKERRQ(ierr); 1097 } 1098 A->preallocated = PETSC_TRUE; 1099 1100 /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0 */ 1101 { 1102 hypre_AuxParCSRMatrix *aux_matrix; 1103 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1104 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; 1105 } 1106 PetscFunctionReturn(0); 1107 } 1108 1109 /*@C 1110 MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format 1111 1112 Collective on Mat 1113 1114 Input Parameters: 1115 + A - the matrix 1116 . dnz - number of nonzeros per row in DIAGONAL portion of local submatrix 1117 (same value is used for all local rows) 1118 . dnnz - array containing the number of nonzeros in the various rows of the 1119 DIAGONAL portion of the local submatrix (possibly different for each row) 1120 or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure. 1121 The size of this array is equal to the number of local rows, i.e 'm'. 1122 For matrices that will be factored, you must leave room for (and set) 1123 the diagonal entry even if it is zero. 1124 . onz - number of nonzeros per row in the OFF-DIAGONAL portion of local 1125 submatrix (same value is used for all local rows). 1126 - onnz - array containing the number of nonzeros in the various rows of the 1127 OFF-DIAGONAL portion of the local submatrix (possibly different for 1128 each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero 1129 structure. The size of this array is equal to the number 1130 of local rows, i.e 'm'. 1131 1132 Notes: If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored. 1133 1134 Level: intermediate 1135 1136 .keywords: matrix, aij, compressed row, sparse, parallel 1137 1138 .seealso: MatCreate(), MatMPIAIJSetPreallocation, MATHYPRE 1139 @*/ 1140 #undef __FUNCT__ 1141 #define __FUNCT__ "MatHYPRESetPreallocation" 1142 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1143 { 1144 PetscErrorCode ierr; 1145 1146 PetscFunctionBegin; 1147 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1148 PetscValidType(A,1); 1149 ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr); 1150 PetscFunctionReturn(0); 1151 } 1152 1153 /* 1154 MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix 1155 1156 Collective 1157 1158 Input Parameters: 1159 + vparcsr - the pointer to the hypre_ParCSRMatrix 1160 . mtype - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported. 1161 - copymode - PETSc copying options 1162 1163 Output Parameter: 1164 . A - the matrix 1165 1166 Level: intermediate 1167 1168 .seealso: MatHYPRE, PetscCopyMode 1169 */ 1170 #undef __FUNCT__ 1171 #define __FUNCT__ "MatCreateFromParCSR" 1172 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *vparcsr, MatType mtype, PetscCopyMode copymode, Mat* A) 1173 { 1174 Mat T; 1175 Mat_HYPRE *hA; 1176 hypre_ParCSRMatrix *parcsr; 1177 MPI_Comm comm; 1178 PetscInt rstart,rend,cstart,cend,M,N; 1179 PetscBool isseqaij,ismpiaij,isaij,ishyp,isis; 1180 PetscErrorCode ierr; 1181 1182 PetscFunctionBegin; 1183 parcsr = (hypre_ParCSRMatrix *)vparcsr; 1184 comm = hypre_ParCSRMatrixComm(parcsr); 1185 ierr = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 1186 ierr = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 1187 ierr = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr); 1188 ierr = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr); 1189 ierr = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr); 1190 isaij = (PetscBool)(isseqaij || ismpiaij || isaij); 1191 if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE); 1192 if (ishyp && copymode == PETSC_COPY_VALUES) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported copymode PETSC_COPY_VALUES"); 1193 1194 /* access ParCSRMatrix */ 1195 rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr); 1196 rend = hypre_ParCSRMatrixLastRowIndex(parcsr); 1197 cstart = hypre_ParCSRMatrixFirstColDiag(parcsr); 1198 cend = hypre_ParCSRMatrixLastColDiag(parcsr); 1199 M = hypre_ParCSRMatrixGlobalNumRows(parcsr); 1200 N = hypre_ParCSRMatrixGlobalNumCols(parcsr); 1201 1202 /* fix for empty local rows/columns */ 1203 if (rend < rstart) rend = rstart; 1204 if (cend < cstart) cend = cstart; 1205 1206 /* create PETSc matrix with MatHYPRE */ 1207 ierr = MatCreate(comm,&T);CHKERRQ(ierr); 1208 ierr = MatSetSizes(T,rend-rstart+1,cend-cstart+1,M,N);CHKERRQ(ierr); 1209 ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr); 1210 hA = (Mat_HYPRE*)(T->data); 1211 1212 /* create HYPRE_IJMatrix */ 1213 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 1214 1215 /* set ParCSR object */ 1216 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1217 hypre_IJMatrixObject(hA->ij) = parcsr; 1218 T->preallocated = PETSC_TRUE; 1219 1220 /* set assembled flag */ 1221 hypre_IJMatrixAssembleFlag(hA->ij) = 1; 1222 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1223 if (ishyp) { 1224 PetscMPIInt myid = 0; 1225 1226 /* make sure we always have row_starts and col_starts available */ 1227 if (HYPRE_AssumedPartitionCheck()) { 1228 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 1229 } 1230 if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) { 1231 PetscLayout map; 1232 1233 ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr); 1234 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1235 hypre_ParCSRMatrixColStarts(parcsr) = map->range + myid; 1236 } 1237 if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) { 1238 PetscLayout map; 1239 1240 ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr); 1241 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1242 hypre_ParCSRMatrixRowStarts(parcsr) = map->range + myid; 1243 } 1244 /* prevent from freeing the pointer */ 1245 if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE; 1246 *A = T; 1247 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1248 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1249 } else if (isaij) { 1250 if (copymode != PETSC_OWN_POINTER) { 1251 /* prevent from freeing the pointer */ 1252 hA->inner_free = PETSC_FALSE; 1253 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1254 ierr = MatDestroy(&T);CHKERRQ(ierr); 1255 } else { /* AIJ return type with PETSC_OWN_POINTER */ 1256 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr); 1257 *A = T; 1258 } 1259 } else if (isis) { 1260 ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1261 if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE; 1262 ierr = MatDestroy(&T);CHKERRQ(ierr); 1263 } 1264 PetscFunctionReturn(0); 1265 } 1266 1267 #undef __FUNCT__ 1268 #define __FUNCT__ "MatHYPREGetParCSR_HYPRE" 1269 PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr) 1270 { 1271 Mat_HYPRE* hA = (Mat_HYPRE*)A->data; 1272 HYPRE_Int type; 1273 PetscErrorCode ierr; 1274 1275 PetscFunctionBegin; 1276 if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present"); 1277 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 1278 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR"); 1279 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr)); 1280 PetscFunctionReturn(0); 1281 } 1282 1283 /* 1284 MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix 1285 1286 Not collective 1287 1288 Input Parameters: 1289 + A - the MATHYPRE object 1290 1291 Output Parameter: 1292 . parcsr - the pointer to the hypre_ParCSRMatrix 1293 1294 Level: intermediate 1295 1296 .seealso: MatHYPRE, PetscCopyMode 1297 */ 1298 #undef __FUNCT__ 1299 #define __FUNCT__ "MatHYPREGetParCSR" 1300 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr) 1301 { 1302 PetscErrorCode ierr; 1303 1304 PetscFunctionBegin; 1305 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1306 PetscValidType(A,1); 1307 ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr); 1308 PetscFunctionReturn(0); 1309 } 1310 1311 #undef __FUNCT__ 1312 #define __FUNCT__ "MatCreate_HYPRE" 1313 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B) 1314 { 1315 Mat_HYPRE *hB; 1316 PetscErrorCode ierr; 1317 1318 PetscFunctionBegin; 1319 ierr = PetscNewLog(B,&hB);CHKERRQ(ierr); 1320 hB->inner_free = PETSC_TRUE; 1321 1322 B->data = (void*)hB; 1323 B->rmap->bs = 1; 1324 B->assembled = PETSC_FALSE; 1325 1326 B->ops->mult = MatMult_HYPRE; 1327 B->ops->multtranspose = MatMultTranspose_HYPRE; 1328 B->ops->setup = MatSetUp_HYPRE; 1329 B->ops->destroy = MatDestroy_HYPRE; 1330 B->ops->assemblyend = MatAssemblyEnd_HYPRE; 1331 B->ops->ptap = MatPtAP_HYPRE_HYPRE; 1332 B->ops->setvalues = MatSetValues_HYPRE; 1333 1334 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr); 1335 ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr); 1336 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr); 1337 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr); 1338 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 1339 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 1340 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr); 1341 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr); 1342 PetscFunctionReturn(0); 1343 } 1344 1345 #undef __FUNCT__ 1346 #define __FUNCT__ "hypre_array_destroy" 1347 static PetscErrorCode hypre_array_destroy(void *ptr) 1348 { 1349 PetscFunctionBegin; 1350 hypre_TFree(ptr); 1351 PetscFunctionReturn(0); 1352 } 1353 1354 #if 0 1355 /* 1356 Does NOT copy the data over, instead uses DIRECTLY the pointers from the PETSc MPIAIJ format 1357 1358 This is UNFINISHED and does NOT work! The problem is that hypre puts the diagonal entry first 1359 which will corrupt the PETSc data structure if we did this. Need a work around to this problem. 1360 */ 1361 #include <_hypre_IJ_mv.h> 1362 #include <HYPRE_IJ_mv.h> 1363 1364 #undef __FUNCT__ 1365 #define __FUNCT__ "MatHYPRE_IJMatrixLink" 1366 PetscErrorCode MatHYPRE_IJMatrixLink(Mat A, HYPRE_IJMatrix *ij) 1367 { 1368 PetscErrorCode ierr; 1369 PetscInt rstart,rend,cstart,cend; 1370 PetscBool flg; 1371 hypre_AuxParCSRMatrix *aux_matrix; 1372 1373 PetscFunctionBegin; 1374 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1375 PetscValidType(A,1); 1376 PetscValidPointer(ij,2); 1377 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 1378 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Can only use with PETSc MPIAIJ matrices"); 1379 ierr = MatSetUp(A);CHKERRQ(ierr); 1380 1381 ierr = PetscLogEventBegin(MAT_Convert,A,0,0,0);CHKERRQ(ierr); 1382 rstart = A->rmap->rstart; 1383 rend = A->rmap->rend; 1384 cstart = A->cmap->rstart; 1385 cend = A->cmap->rend; 1386 PetscStackCallStandard(HYPRE_IJMatrixCreate,(PetscObjectComm((PetscObject)A),rstart,rend-1,cstart,cend-1,ij)); 1387 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(*ij,HYPRE_PARCSR)); 1388 1389 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(*ij)); 1390 PetscStackCall("hypre_IJMatrixTranslator",aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(*ij)); 1391 1392 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 1393 1394 /* this is the Hack part where we monkey directly with the hypre datastructures */ 1395 1396 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(*ij)); 1397 ierr = PetscLogEventEnd(MAT_Convert,A,0,0,0);CHKERRQ(ierr); 1398 PetscFunctionReturn(0); 1399 } 1400 #endif 1401