1 2 /* 3 Creates hypre ijmatrix from PETSc matrix 4 */ 5 6 #include <petscmathypre.h> 7 #include <petsc/private/matimpl.h> 8 #include <../src/mat/impls/hypre/mhypre.h> 9 #include <../src/mat/impls/aij/mpi/mpiaij.h> 10 #include <../src/vec/vec/impls/hypre/vhyp.h> 11 #include <HYPRE.h> 12 #include <HYPRE_utilities.h> 13 #include <_hypre_parcsr_ls.h> 14 #include <_hypre_sstruct_ls.h> 15 16 PETSC_INTERN PetscErrorCode MatPtAP_IS_XAIJ(Mat,Mat,MatReuse,PetscReal,Mat*); 17 18 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*); 19 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix); 20 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix); 21 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix); 22 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool); 23 static PetscErrorCode hypre_array_destroy(void*); 24 PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins); 25 26 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij) 27 { 28 PetscErrorCode ierr; 29 PetscInt i,n_d,n_o; 30 const PetscInt *ia_d,*ia_o; 31 PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE; 32 PetscInt *nnz_d=NULL,*nnz_o=NULL; 33 34 PetscFunctionBegin; 35 if (A_d) { /* determine number of nonzero entries in local diagonal part */ 36 ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr); 37 if (done_d) { 38 ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr); 39 for (i=0; i<n_d; i++) { 40 nnz_d[i] = ia_d[i+1] - ia_d[i]; 41 } 42 } 43 ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr); 44 } 45 if (A_o) { /* determine number of nonzero entries in local off-diagonal part */ 46 ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 47 if (done_o) { 48 ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr); 49 for (i=0; i<n_o; i++) { 50 nnz_o[i] = ia_o[i+1] - ia_o[i]; 51 } 52 } 53 ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 54 } 55 if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */ 56 if (!done_o) { /* only diagonal part */ 57 ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr); 58 for (i=0; i<n_d; i++) { 59 nnz_o[i] = 0; 60 } 61 } 62 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o)); 63 ierr = PetscFree(nnz_d);CHKERRQ(ierr); 64 ierr = PetscFree(nnz_o);CHKERRQ(ierr); 65 } 66 PetscFunctionReturn(0); 67 } 68 69 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA) 70 { 71 PetscErrorCode ierr; 72 PetscInt rstart,rend,cstart,cend; 73 74 PetscFunctionBegin; 75 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 76 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 77 rstart = A->rmap->rstart; 78 rend = A->rmap->rend; 79 cstart = A->cmap->rstart; 80 cend = A->cmap->rend; 81 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 82 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 83 { 84 PetscBool same; 85 Mat A_d,A_o; 86 const PetscInt *colmap; 87 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr); 88 if (same) { 89 ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 90 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 91 PetscFunctionReturn(0); 92 } 93 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr); 94 if (same) { 95 ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 96 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 97 PetscFunctionReturn(0); 98 } 99 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr); 100 if (same) { 101 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 102 PetscFunctionReturn(0); 103 } 104 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr); 105 if (same) { 106 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 107 PetscFunctionReturn(0); 108 } 109 } 110 PetscFunctionReturn(0); 111 } 112 113 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij) 114 { 115 PetscErrorCode ierr; 116 PetscInt i,rstart,rend,ncols,nr,nc; 117 const PetscScalar *values; 118 const PetscInt *cols; 119 PetscBool flg; 120 121 PetscFunctionBegin; 122 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 123 ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr); 124 if (flg && nr == nc) { 125 ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr); 126 PetscFunctionReturn(0); 127 } 128 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 129 if (flg) { 130 ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr); 131 PetscFunctionReturn(0); 132 } 133 134 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 135 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 136 for (i=rstart; i<rend; i++) { 137 ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 138 if (ncols) { 139 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values)); 140 } 141 ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 142 } 143 PetscFunctionReturn(0); 144 } 145 146 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij) 147 { 148 PetscErrorCode ierr; 149 Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data; 150 HYPRE_Int type; 151 hypre_ParCSRMatrix *par_matrix; 152 hypre_AuxParCSRMatrix *aux_matrix; 153 hypre_CSRMatrix *hdiag; 154 155 PetscFunctionBegin; 156 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 157 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 158 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 159 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 160 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 161 /* 162 this is the Hack part where we monkey directly with the hypre datastructures 163 */ 164 ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));CHKERRQ(ierr); 165 ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));CHKERRQ(ierr); 166 ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));CHKERRQ(ierr); 167 168 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 169 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 170 PetscFunctionReturn(0); 171 } 172 173 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij) 174 { 175 PetscErrorCode ierr; 176 Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data; 177 Mat_SeqAIJ *pdiag,*poffd; 178 PetscInt i,*garray = pA->garray,*jj,cstart,*pjj; 179 HYPRE_Int type; 180 hypre_ParCSRMatrix *par_matrix; 181 hypre_AuxParCSRMatrix *aux_matrix; 182 hypre_CSRMatrix *hdiag,*hoffd; 183 184 PetscFunctionBegin; 185 pdiag = (Mat_SeqAIJ*) pA->A->data; 186 poffd = (Mat_SeqAIJ*) pA->B->data; 187 /* cstart is only valid for square MPIAIJ layed out in the usual way */ 188 ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr); 189 190 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 191 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 192 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 193 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 194 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 195 hoffd = hypre_ParCSRMatrixOffd(par_matrix); 196 197 /* 198 this is the Hack part where we monkey directly with the hypre datastructures 199 */ 200 ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt)); 201 /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */ 202 jj = (PetscInt*)hdiag->j; 203 pjj = (PetscInt*)pdiag->j; 204 for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i]; 205 ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar)); 206 ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt)); 207 /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this 208 If we hacked a hypre a bit more we might be able to avoid this step */ 209 jj = (PetscInt*) hoffd->j; 210 pjj = (PetscInt*) poffd->j; 211 for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]]; 212 ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar)); 213 214 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 215 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 216 PetscFunctionReturn(0); 217 } 218 219 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B) 220 { 221 Mat_HYPRE* mhA = (Mat_HYPRE*)(A->data); 222 Mat lA; 223 ISLocalToGlobalMapping rl2g,cl2g; 224 IS is; 225 hypre_ParCSRMatrix *hA; 226 hypre_CSRMatrix *hdiag,*hoffd; 227 MPI_Comm comm; 228 PetscScalar *hdd,*hod,*aa,*data; 229 HYPRE_Int *col_map_offd,*hdi,*hdj,*hoi,*hoj; 230 PetscInt *ii,*jj,*iptr,*jptr; 231 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N; 232 HYPRE_Int type; 233 PetscErrorCode ierr; 234 235 PetscFunctionBegin; 236 comm = PetscObjectComm((PetscObject)A); 237 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type)); 238 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 239 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA)); 240 M = hypre_ParCSRMatrixGlobalNumRows(hA); 241 N = hypre_ParCSRMatrixGlobalNumCols(hA); 242 str = hypre_ParCSRMatrixFirstRowIndex(hA); 243 stc = hypre_ParCSRMatrixFirstColDiag(hA); 244 hdiag = hypre_ParCSRMatrixDiag(hA); 245 hoffd = hypre_ParCSRMatrixOffd(hA); 246 dr = hypre_CSRMatrixNumRows(hdiag); 247 dc = hypre_CSRMatrixNumCols(hdiag); 248 nnz = hypre_CSRMatrixNumNonzeros(hdiag); 249 hdi = hypre_CSRMatrixI(hdiag); 250 hdj = hypre_CSRMatrixJ(hdiag); 251 hdd = hypre_CSRMatrixData(hdiag); 252 oc = hypre_CSRMatrixNumCols(hoffd); 253 nnz += hypre_CSRMatrixNumNonzeros(hoffd); 254 hoi = hypre_CSRMatrixI(hoffd); 255 hoj = hypre_CSRMatrixJ(hoffd); 256 hod = hypre_CSRMatrixData(hoffd); 257 if (reuse != MAT_REUSE_MATRIX) { 258 PetscInt *aux; 259 260 /* generate l2g maps for rows and cols */ 261 ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr); 262 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 263 ierr = ISDestroy(&is);CHKERRQ(ierr); 264 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA); 265 ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr); 266 for (i=0; i<dc; i++) aux[i] = i+stc; 267 for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i]; 268 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 269 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 270 ierr = ISDestroy(&is);CHKERRQ(ierr); 271 /* create MATIS object */ 272 ierr = MatCreate(comm,B);CHKERRQ(ierr); 273 ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr); 274 ierr = MatSetType(*B,MATIS);CHKERRQ(ierr); 275 ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr); 276 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 277 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 278 279 /* allocate CSR for local matrix */ 280 ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr); 281 ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr); 282 ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr); 283 } else { 284 PetscInt nr; 285 PetscBool done; 286 ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr); 287 ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr); 288 if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr); 289 if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz); 290 ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr); 291 } 292 /* merge local matrices */ 293 ii = iptr; 294 jj = jptr; 295 aa = data; 296 *ii = *(hdi++) + *(hoi++); 297 for (jd=0,jo=0,cum=0; *ii<nnz; cum++) { 298 PetscScalar *aold = aa; 299 PetscInt *jold = jj,nc = jd+jo; 300 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; } 301 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; } 302 *(++ii) = *(hdi++) + *(hoi++); 303 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr); 304 } 305 for (; cum<dr; cum++) *(++ii) = nnz; 306 if (reuse != MAT_REUSE_MATRIX) { 307 Mat_SeqAIJ* a; 308 309 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr); 310 ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr); 311 /* hack SeqAIJ */ 312 a = (Mat_SeqAIJ*)(lA->data); 313 a->free_a = PETSC_TRUE; 314 a->free_ij = PETSC_TRUE; 315 ierr = MatDestroy(&lA);CHKERRQ(ierr); 316 } 317 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 318 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 319 if (reuse == MAT_INPLACE_MATRIX) { 320 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 321 } 322 PetscFunctionReturn(0); 323 } 324 325 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B) 326 { 327 Mat_HYPRE *hB; 328 MPI_Comm comm = PetscObjectComm((PetscObject)A); 329 PetscErrorCode ierr; 330 331 PetscFunctionBegin; 332 if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_INPLACE_MATRIX"); 333 if (reuse == MAT_REUSE_MATRIX) { 334 /* always destroy the old matrix and create a new memory; 335 hope this does not churn the memory too much. The problem 336 is I do not know if it is possible to put the matrix back to 337 its initial state so that we can directly copy the values 338 the second time through. */ 339 hB = (Mat_HYPRE*)((*B)->data); 340 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij)); 341 } else { 342 ierr = MatCreate(comm,B);CHKERRQ(ierr); 343 ierr = MatSetType(*B,MATHYPRE);CHKERRQ(ierr); 344 ierr = MatSetSizes(*B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 345 hB = (Mat_HYPRE*)((*B)->data); 346 } 347 ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr); 348 ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr); 349 (*B)->preallocated = PETSC_TRUE; 350 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 351 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 352 PetscFunctionReturn(0); 353 } 354 355 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B) 356 { 357 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 358 hypre_ParCSRMatrix *parcsr; 359 hypre_CSRMatrix *hdiag,*hoffd; 360 MPI_Comm comm; 361 PetscScalar *da,*oa,*aptr; 362 PetscInt *dii,*djj,*oii,*ojj,*iptr; 363 PetscInt i,dnnz,onnz,m,n; 364 HYPRE_Int type; 365 PetscMPIInt size; 366 PetscErrorCode ierr; 367 368 PetscFunctionBegin; 369 comm = PetscObjectComm((PetscObject)A); 370 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 371 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 372 if (reuse == MAT_REUSE_MATRIX) { 373 PetscBool ismpiaij,isseqaij; 374 ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 375 ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 376 if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported"); 377 } 378 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 379 380 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 381 hdiag = hypre_ParCSRMatrixDiag(parcsr); 382 hoffd = hypre_ParCSRMatrixOffd(parcsr); 383 m = hypre_CSRMatrixNumRows(hdiag); 384 n = hypre_CSRMatrixNumCols(hdiag); 385 dnnz = hypre_CSRMatrixNumNonzeros(hdiag); 386 onnz = hypre_CSRMatrixNumNonzeros(hoffd); 387 if (reuse == MAT_INITIAL_MATRIX) { 388 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 389 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 390 ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr); 391 } else if (reuse == MAT_REUSE_MATRIX) { 392 PetscInt nr; 393 PetscBool done; 394 if (size > 1) { 395 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 396 397 ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 398 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m); 399 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz); 400 ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr); 401 } else { 402 ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 403 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m); 404 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz); 405 ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr); 406 } 407 } else { /* MAT_INPLACE_MATRIX */ 408 dii = (PetscInt*)hypre_CSRMatrixI(hdiag); 409 djj = (PetscInt*)hypre_CSRMatrixJ(hdiag); 410 da = hypre_CSRMatrixData(hdiag); 411 } 412 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr); 413 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr); 414 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr); 415 iptr = djj; 416 aptr = da; 417 for (i=0; i<m; i++) { 418 PetscInt nc = dii[i+1]-dii[i]; 419 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 420 iptr += nc; 421 aptr += nc; 422 } 423 if (size > 1) { 424 HYPRE_Int *offdj,*coffd; 425 426 if (reuse == MAT_INITIAL_MATRIX) { 427 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 428 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 429 ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr); 430 } else if (reuse == MAT_REUSE_MATRIX) { 431 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 432 PetscInt nr,hr = hypre_CSRMatrixNumRows(hoffd); 433 PetscBool done; 434 435 ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr); 436 if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr); 437 if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz); 438 ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr); 439 } else { /* MAT_INPLACE_MATRIX */ 440 oii = (PetscInt*)hypre_CSRMatrixI(hoffd); 441 ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd); 442 oa = hypre_CSRMatrixData(hoffd); 443 } 444 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr); 445 offdj = hypre_CSRMatrixJ(hoffd); 446 coffd = hypre_ParCSRMatrixColMapOffd(parcsr); 447 for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]]; 448 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr); 449 iptr = ojj; 450 aptr = oa; 451 for (i=0; i<m; i++) { 452 PetscInt nc = oii[i+1]-oii[i]; 453 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 454 iptr += nc; 455 aptr += nc; 456 } 457 if (reuse == MAT_INITIAL_MATRIX) { 458 Mat_MPIAIJ *b; 459 Mat_SeqAIJ *d,*o; 460 461 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr); 462 /* hack MPIAIJ */ 463 b = (Mat_MPIAIJ*)((*B)->data); 464 d = (Mat_SeqAIJ*)b->A->data; 465 o = (Mat_SeqAIJ*)b->B->data; 466 d->free_a = PETSC_TRUE; 467 d->free_ij = PETSC_TRUE; 468 o->free_a = PETSC_TRUE; 469 o->free_ij = PETSC_TRUE; 470 } else if (reuse == MAT_INPLACE_MATRIX) { 471 Mat T; 472 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr); 473 hypre_CSRMatrixI(hdiag) = NULL; 474 hypre_CSRMatrixJ(hdiag) = NULL; 475 hypre_CSRMatrixData(hdiag) = NULL; 476 hypre_CSRMatrixI(hoffd) = NULL; 477 hypre_CSRMatrixJ(hoffd) = NULL; 478 hypre_CSRMatrixData(hoffd) = NULL; 479 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 480 } 481 } else { 482 oii = NULL; 483 ojj = NULL; 484 oa = NULL; 485 if (reuse == MAT_INITIAL_MATRIX) { 486 Mat_SeqAIJ* b; 487 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr); 488 /* hack SeqAIJ */ 489 b = (Mat_SeqAIJ*)((*B)->data); 490 b->free_a = PETSC_TRUE; 491 b->free_ij = PETSC_TRUE; 492 } else if (reuse == MAT_INPLACE_MATRIX) { 493 Mat T; 494 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr); 495 hypre_CSRMatrixI(hdiag) = NULL; 496 hypre_CSRMatrixJ(hdiag) = NULL; 497 hypre_CSRMatrixData(hdiag) = NULL; 498 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 499 } 500 } 501 502 /* we have to use hypre_Tfree to free the arrays */ 503 if (reuse == MAT_INPLACE_MATRIX) { 504 void *ptrs[6] = {dii,djj,da,oii,ojj,oa}; 505 const char *names[6] = {"_hypre_csr_dii", 506 "_hypre_csr_djj", 507 "_hypre_csr_da", 508 "_hypre_csr_oii", 509 "_hypre_csr_ojj", 510 "_hypre_csr_oa"}; 511 for (i=0; i<6; i++) { 512 PetscContainer c; 513 514 ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr); 515 ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr); 516 ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr); 517 ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr); 518 ierr = PetscContainerDestroy(&c);CHKERRQ(ierr); 519 } 520 } 521 PetscFunctionReturn(0); 522 } 523 524 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 525 { 526 hypre_ParCSRMatrix *tA; 527 hypre_CSRMatrix *hdiag,*hoffd; 528 Mat_SeqAIJ *diag,*offd; 529 PetscInt *garray,noffd,dnnz,onnz,*row_starts,*col_starts; 530 MPI_Comm comm = PetscObjectComm((PetscObject)A); 531 PetscBool ismpiaij,isseqaij; 532 PetscErrorCode ierr; 533 534 PetscFunctionBegin; 535 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 536 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 537 if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type); 538 if (ismpiaij) { 539 Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data); 540 541 diag = (Mat_SeqAIJ*)a->A->data; 542 offd = (Mat_SeqAIJ*)a->B->data; 543 garray = a->garray; 544 noffd = a->B->cmap->N; 545 dnnz = diag->nz; 546 onnz = offd->nz; 547 } else { 548 diag = (Mat_SeqAIJ*)A->data; 549 offd = NULL; 550 garray = NULL; 551 noffd = 0; 552 dnnz = diag->nz; 553 onnz = 0; 554 } 555 556 /* create a temporary ParCSR */ 557 if (HYPRE_AssumedPartitionCheck()) { 558 PetscMPIInt myid; 559 560 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 561 row_starts = A->rmap->range + myid; 562 col_starts = A->cmap->range + myid; 563 } else { 564 row_starts = A->rmap->range; 565 col_starts = A->cmap->range; 566 } 567 tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz); 568 hypre_ParCSRMatrixSetRowStartsOwner(tA,0); 569 hypre_ParCSRMatrixSetColStartsOwner(tA,0); 570 571 /* set diagonal part */ 572 hdiag = hypre_ParCSRMatrixDiag(tA); 573 hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i; 574 hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j; 575 hypre_CSRMatrixData(hdiag) = diag->a; 576 hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz; 577 hypre_CSRMatrixSetRownnz(hdiag); 578 hypre_CSRMatrixSetDataOwner(hdiag,0); 579 580 /* set offdiagonal part */ 581 hoffd = hypre_ParCSRMatrixOffd(tA); 582 if (offd) { 583 hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i; 584 hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j; 585 hypre_CSRMatrixData(hoffd) = offd->a; 586 hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz; 587 hypre_CSRMatrixSetRownnz(hoffd); 588 hypre_CSRMatrixSetDataOwner(hoffd,0); 589 hypre_ParCSRMatrixSetNumNonzeros(tA); 590 hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray; 591 } 592 *hA = tA; 593 PetscFunctionReturn(0); 594 } 595 596 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 597 { 598 hypre_CSRMatrix *hdiag,*hoffd; 599 600 PetscFunctionBegin; 601 hdiag = hypre_ParCSRMatrixDiag(*hA); 602 hoffd = hypre_ParCSRMatrixOffd(*hA); 603 /* set pointers to NULL before destroying tA */ 604 hypre_CSRMatrixI(hdiag) = NULL; 605 hypre_CSRMatrixJ(hdiag) = NULL; 606 hypre_CSRMatrixData(hdiag) = NULL; 607 hypre_CSRMatrixI(hoffd) = NULL; 608 hypre_CSRMatrixJ(hoffd) = NULL; 609 hypre_CSRMatrixData(hoffd) = NULL; 610 hypre_ParCSRMatrixColMapOffd(*hA) = NULL; 611 hypre_ParCSRMatrixDestroy(*hA); 612 *hA = NULL; 613 PetscFunctionReturn(0); 614 } 615 616 /* calls RAP from BoomerAMG: 617 the resulting ParCSR will not own the column and row starts 618 It looks like we don't need to have the diagonal entries 619 ordered first in the rows of the diagonal part 620 for boomerAMGBuildCoarseOperator to work */ 621 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP) 622 { 623 HYPRE_Int P_owns_col_starts,R_owns_row_starts; 624 625 PetscFunctionBegin; 626 P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP); 627 R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR); 628 PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP)); 629 PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP)); 630 /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */ 631 hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0); 632 hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0); 633 if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1); 634 if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1); 635 PetscFunctionReturn(0); 636 } 637 638 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C) 639 { 640 Mat B; 641 hypre_ParCSRMatrix *hA,*hP,*hPtAP; 642 PetscErrorCode ierr; 643 644 PetscFunctionBegin; 645 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 646 ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr); 647 ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr); 648 ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 649 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 650 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 651 ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr); 652 PetscFunctionReturn(0); 653 } 654 655 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C) 656 { 657 PetscErrorCode ierr; 658 659 PetscFunctionBegin; 660 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 661 ierr = MatSetType(*C,MATAIJ);CHKERRQ(ierr); 662 (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE; 663 PetscFunctionReturn(0); 664 } 665 666 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C) 667 { 668 Mat B; 669 Mat_HYPRE *hP; 670 hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr; 671 HYPRE_Int type; 672 MPI_Comm comm = PetscObjectComm((PetscObject)A); 673 PetscBool ishypre; 674 PetscErrorCode ierr; 675 676 PetscFunctionBegin; 677 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 678 if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 679 hP = (Mat_HYPRE*)P->data; 680 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 681 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 682 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 683 684 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 685 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 686 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 687 688 /* create temporary matrix and merge to C */ 689 ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 690 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 691 PetscFunctionReturn(0); 692 } 693 694 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 695 { 696 PetscErrorCode ierr; 697 698 PetscFunctionBegin; 699 if (scall == MAT_INITIAL_MATRIX) { 700 const char *deft = MATAIJ; 701 char type[256]; 702 PetscBool flg; 703 704 ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr); 705 ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr); 706 ierr = PetscOptionsEnd();CHKERRQ(ierr); 707 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 708 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 709 if (flg) { 710 ierr = MatSetType(*C,type);CHKERRQ(ierr); 711 } else { 712 ierr = MatSetType(*C,deft);CHKERRQ(ierr); 713 } 714 (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE; 715 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 716 } 717 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 718 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 719 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 720 PetscFunctionReturn(0); 721 } 722 723 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C) 724 { 725 Mat B; 726 hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr; 727 Mat_HYPRE *hA,*hP; 728 PetscBool ishypre; 729 HYPRE_Int type; 730 PetscErrorCode ierr; 731 732 PetscFunctionBegin; 733 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 734 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 735 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 736 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 737 hA = (Mat_HYPRE*)A->data; 738 hP = (Mat_HYPRE*)P->data; 739 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 740 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 741 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 742 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 743 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 744 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 745 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 746 ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 747 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 748 PetscFunctionReturn(0); 749 } 750 751 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 752 { 753 PetscErrorCode ierr; 754 755 PetscFunctionBegin; 756 if (scall == MAT_INITIAL_MATRIX) { 757 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 758 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 759 ierr = MatSetType(*C,MATHYPRE);CHKERRQ(ierr); 760 (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE; 761 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 762 } 763 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 764 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 765 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 766 PetscFunctionReturn(0); 767 } 768 769 /* calls hypre_ParMatmul 770 hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA 771 hypre_ParMatrixCreate does not duplicate the communicator 772 It looks like we don't need to have the diagonal entries 773 ordered first in the rows of the diagonal part 774 for boomerAMGBuildCoarseOperator to work */ 775 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB) 776 { 777 PetscFunctionBegin; 778 PetscStackPush("hypre_ParMatmul"); 779 *hAB = hypre_ParMatmul(hA,hB); 780 PetscStackPop; 781 PetscFunctionReturn(0); 782 } 783 784 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C) 785 { 786 Mat D; 787 hypre_ParCSRMatrix *hA,*hB,*hAB = NULL; 788 PetscErrorCode ierr; 789 790 PetscFunctionBegin; 791 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 792 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 793 ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr); 794 ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 795 ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr); 796 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 797 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 798 PetscFunctionReturn(0); 799 } 800 801 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C) 802 { 803 PetscErrorCode ierr; 804 805 PetscFunctionBegin; 806 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 807 ierr = MatSetType(*C,MATAIJ);CHKERRQ(ierr); 808 (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE; 809 PetscFunctionReturn(0); 810 } 811 812 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C) 813 { 814 Mat D; 815 hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL; 816 Mat_HYPRE *hA,*hB; 817 PetscBool ishypre; 818 HYPRE_Int type; 819 PetscErrorCode ierr; 820 821 PetscFunctionBegin; 822 ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr); 823 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE); 824 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 825 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 826 hA = (Mat_HYPRE*)A->data; 827 hB = (Mat_HYPRE*)B->data; 828 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 829 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 830 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type)); 831 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 832 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 833 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr)); 834 ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr); 835 ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 836 /* need to use HeaderReplace because HeaderMerge messes up with the communicator */ 837 ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr); 838 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 839 PetscFunctionReturn(0); 840 } 841 842 static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C) 843 { 844 PetscErrorCode ierr; 845 846 PetscFunctionBegin; 847 if (scall == MAT_INITIAL_MATRIX) { 848 ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr); 849 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 850 ierr = MatSetType(*C,MATHYPRE);CHKERRQ(ierr); 851 (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 852 ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr); 853 } 854 ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr); 855 ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr); 856 ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr); 857 PetscFunctionReturn(0); 858 } 859 860 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D) 861 { 862 Mat E; 863 hypre_ParCSRMatrix *hA,*hB,*hC,*hABC; 864 PetscErrorCode ierr; 865 866 PetscFunctionBegin; 867 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 868 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 869 ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr); 870 ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr); 871 ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr); 872 ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr); 873 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 874 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 875 ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr); 876 PetscFunctionReturn(0); 877 } 878 879 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D) 880 { 881 PetscErrorCode ierr; 882 883 PetscFunctionBegin; 884 ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr); 885 ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr); 886 PetscFunctionReturn(0); 887 } 888 889 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y) 890 { 891 PetscErrorCode ierr; 892 893 PetscFunctionBegin; 894 ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);CHKERRQ(ierr); 895 PetscFunctionReturn(0); 896 } 897 898 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y) 899 { 900 PetscErrorCode ierr; 901 902 PetscFunctionBegin; 903 ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);CHKERRQ(ierr); 904 PetscFunctionReturn(0); 905 } 906 907 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans) 908 { 909 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 910 hypre_ParCSRMatrix *parcsr; 911 hypre_ParVector *hx,*hy; 912 PetscScalar *ax,*ay,*sax,*say; 913 PetscErrorCode ierr; 914 915 PetscFunctionBegin; 916 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 917 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx)); 918 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy)); 919 ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 920 ierr = VecGetArray(y,&ay);CHKERRQ(ierr); 921 if (trans) { 922 VecHYPRE_ParVectorReplacePointer(hA->x,ay,say); 923 VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax); 924 hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx); 925 VecHYPRE_ParVectorReplacePointer(hA->x,say,ay); 926 VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax); 927 } else { 928 VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax); 929 VecHYPRE_ParVectorReplacePointer(hA->b,ay,say); 930 hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy); 931 VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax); 932 VecHYPRE_ParVectorReplacePointer(hA->b,say,ay); 933 } 934 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 935 ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr); 936 PetscFunctionReturn(0); 937 } 938 939 static PetscErrorCode MatDestroy_HYPRE(Mat A) 940 { 941 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 942 PetscErrorCode ierr; 943 944 PetscFunctionBegin; 945 if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x)); 946 if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b)); 947 if (hA->ij) { 948 if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL; 949 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij)); 950 } 951 if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); } 952 953 ierr = MatStashDestroy_Private(&A->stash);CHKERRQ(ierr); 954 955 ierr = PetscFree(hA->array);CHKERRQ(ierr); 956 957 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr); 958 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr); 959 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr); 960 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr); 961 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr); 962 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr); 963 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_is_hypre_C",NULL);CHKERRQ(ierr); 964 ierr = PetscFree(A->data);CHKERRQ(ierr); 965 PetscFunctionReturn(0); 966 } 967 968 static PetscErrorCode MatSetUp_HYPRE(Mat A) 969 { 970 PetscErrorCode ierr; 971 972 PetscFunctionBegin; 973 ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr); 974 PetscFunctionReturn(0); 975 } 976 977 978 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode) 979 { 980 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 981 Vec x,b; 982 PetscMPIInt n; 983 PetscInt i,j,rstart,ncols,flg; 984 PetscInt *row,*col; 985 PetscScalar *val; 986 PetscErrorCode ierr; 987 988 PetscFunctionBegin; 989 if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE"); 990 991 if (!A->nooffprocentries) { 992 while (1) { 993 ierr = MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 994 if (!flg) break; 995 996 for (i=0; i<n; ) { 997 /* Now identify the consecutive vals belonging to the same row */ 998 for (j=i,rstart=row[j]; j<n; j++) { 999 if (row[j] != rstart) break; 1000 } 1001 if (j < n) ncols = j-i; 1002 else ncols = n-i; 1003 /* Now assemble all these values with a single function call */ 1004 ierr = MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);CHKERRQ(ierr); 1005 1006 i = j; 1007 } 1008 } 1009 ierr = MatStashScatterEnd_Private(&A->stash);CHKERRQ(ierr); 1010 } 1011 1012 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij)); 1013 /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitalize */ 1014 { 1015 hypre_AuxParCSRMatrix *aux_matrix; 1016 1017 /* call destroy just to make sure we do not leak anything */ 1018 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1019 PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix)); 1020 hypre_IJMatrixTranslator(hA->ij) = NULL; 1021 1022 /* Initialize with assembled flag -> it only recreates the aux_par_matrix */ 1023 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1024 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1025 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */ 1026 PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix)); 1027 } 1028 if (hA->x) PetscFunctionReturn(0); 1029 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1030 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1031 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr); 1032 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr); 1033 ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr); 1034 ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr); 1035 ierr = VecDestroy(&x);CHKERRQ(ierr); 1036 ierr = VecDestroy(&b);CHKERRQ(ierr); 1037 PetscFunctionReturn(0); 1038 } 1039 1040 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array) 1041 { 1042 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1043 PetscErrorCode ierr; 1044 1045 PetscFunctionBegin; 1046 if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use"); 1047 1048 if (hA->size >= size) *array = hA->array; 1049 else { 1050 ierr = PetscFree(hA->array);CHKERRQ(ierr); 1051 hA->size = size; 1052 ierr = PetscMalloc(hA->size,&hA->array);CHKERRQ(ierr); 1053 *array = hA->array; 1054 } 1055 1056 hA->available = PETSC_FALSE; 1057 PetscFunctionReturn(0); 1058 } 1059 1060 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array) 1061 { 1062 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1063 1064 PetscFunctionBegin; 1065 *array = NULL; 1066 hA->available = PETSC_TRUE; 1067 PetscFunctionReturn(0); 1068 } 1069 1070 1071 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins) 1072 { 1073 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1074 PetscScalar *vals = (PetscScalar *)v; 1075 PetscScalar *sscr; 1076 PetscInt *cscr[2]; 1077 PetscInt i,nzc; 1078 void *array = NULL; 1079 PetscErrorCode ierr; 1080 1081 PetscFunctionBegin; 1082 ierr = MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(PetscScalar)*nc*nr,&array);CHKERRQ(ierr); 1083 cscr[0] = (PetscInt*)array; 1084 cscr[1] = ((PetscInt*)array)+nc; 1085 sscr = (PetscScalar*)(((PetscInt*)array)+nc*2); 1086 for (i=0,nzc=0;i<nc;i++) { 1087 if (cols[i] >= 0) { 1088 cscr[0][nzc ] = cols[i]; 1089 cscr[1][nzc++] = i; 1090 } 1091 } 1092 if (!nzc) { 1093 ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr); 1094 PetscFunctionReturn(0); 1095 } 1096 1097 if (ins == ADD_VALUES) { 1098 for (i=0;i<nr;i++) { 1099 if (rows[i] >= 0 && nzc) { 1100 PetscInt j; 1101 for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]]; 1102 PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,(HYPRE_Int*)&nzc,(HYPRE_Int*)(rows+i),(HYPRE_Int*)cscr[0],sscr)); 1103 } 1104 vals += nc; 1105 } 1106 } else { /* INSERT_VALUES */ 1107 1108 PetscInt rst,ren; 1109 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1110 1111 for (i=0;i<nr;i++) { 1112 if (rows[i] >= 0 && nzc) { 1113 PetscInt j; 1114 for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]]; 1115 /* nonlocal values */ 1116 if (rows[i] < rst || rows[i] >= ren) { ierr = MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],sscr,PETSC_FALSE);CHKERRQ(ierr); } 1117 /* local values */ 1118 else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,(HYPRE_Int*)&nzc,(HYPRE_Int*)(rows+i),(HYPRE_Int*)cscr[0],sscr)); 1119 } 1120 vals += nc; 1121 } 1122 } 1123 1124 ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr); 1125 PetscFunctionReturn(0); 1126 } 1127 1128 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1129 { 1130 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1131 HYPRE_Int *hdnnz,*honnz; 1132 PetscInt i,rs,re,cs,ce,bs; 1133 PetscMPIInt size; 1134 PetscErrorCode ierr; 1135 1136 PetscFunctionBegin; 1137 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1138 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1139 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1140 rs = A->rmap->rstart; 1141 re = A->rmap->rend; 1142 cs = A->cmap->rstart; 1143 ce = A->cmap->rend; 1144 if (!hA->ij) { 1145 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij)); 1146 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1147 } else { 1148 HYPRE_Int hrs,hre,hcs,hce; 1149 PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce)); 1150 if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re); 1151 if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce); 1152 } 1153 if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs; 1154 if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs; 1155 1156 if (!dnnz) { 1157 ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr); 1158 for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz; 1159 } else { 1160 hdnnz = (HYPRE_Int*)dnnz; 1161 } 1162 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 1163 if (size > 1) { 1164 hypre_AuxParCSRMatrix *aux_matrix; 1165 if (!onnz) { 1166 ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr); 1167 for (i=0;i<A->rmap->n;i++) honnz[i] = onz; 1168 } else { 1169 honnz = (HYPRE_Int*)onnz; 1170 } 1171 /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems 1172 they assume the user will input the entire row values, properly sorted 1173 In PETSc, we don't make such an assumption, and we instead set this flag to 1 1174 Also, to avoid possible memory leaks, we destroy and recreate the translator 1175 This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize 1176 the IJ matrix for us */ 1177 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1178 hypre_AuxParCSRMatrixDestroy(aux_matrix); 1179 hypre_IJMatrixTranslator(hA->ij) = NULL; 1180 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz)); 1181 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1182 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; 1183 } else { 1184 honnz = NULL; 1185 PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz)); 1186 } 1187 1188 /* reset assembled flag and call the initialize method */ 1189 hypre_IJMatrixAssembleFlag(hA->ij) = 0; 1190 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1191 if (!dnnz) { 1192 ierr = PetscFree(hdnnz);CHKERRQ(ierr); 1193 } 1194 if (!onnz && honnz) { 1195 ierr = PetscFree(honnz);CHKERRQ(ierr); 1196 } 1197 1198 /* Match AIJ logic */ 1199 A->preallocated = PETSC_TRUE; 1200 A->assembled = PETSC_FALSE; 1201 PetscFunctionReturn(0); 1202 } 1203 1204 /*@C 1205 MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format 1206 1207 Collective on Mat 1208 1209 Input Parameters: 1210 + A - the matrix 1211 . dnz - number of nonzeros per row in DIAGONAL portion of local submatrix 1212 (same value is used for all local rows) 1213 . dnnz - array containing the number of nonzeros in the various rows of the 1214 DIAGONAL portion of the local submatrix (possibly different for each row) 1215 or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure. 1216 The size of this array is equal to the number of local rows, i.e 'm'. 1217 For matrices that will be factored, you must leave room for (and set) 1218 the diagonal entry even if it is zero. 1219 . onz - number of nonzeros per row in the OFF-DIAGONAL portion of local 1220 submatrix (same value is used for all local rows). 1221 - onnz - array containing the number of nonzeros in the various rows of the 1222 OFF-DIAGONAL portion of the local submatrix (possibly different for 1223 each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero 1224 structure. The size of this array is equal to the number 1225 of local rows, i.e 'm'. 1226 1227 Notes: 1228 If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored. 1229 1230 Level: intermediate 1231 1232 .keywords: matrix, aij, compressed row, sparse, parallel 1233 1234 .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE 1235 @*/ 1236 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1237 { 1238 PetscErrorCode ierr; 1239 1240 PetscFunctionBegin; 1241 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1242 PetscValidType(A,1); 1243 ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr); 1244 PetscFunctionReturn(0); 1245 } 1246 1247 /* 1248 MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix 1249 1250 Collective 1251 1252 Input Parameters: 1253 + parcsr - the pointer to the hypre_ParCSRMatrix 1254 . mtype - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported. 1255 - copymode - PETSc copying options 1256 1257 Output Parameter: 1258 . A - the matrix 1259 1260 Level: intermediate 1261 1262 .seealso: MatHYPRE, PetscCopyMode 1263 */ 1264 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A) 1265 { 1266 Mat T; 1267 Mat_HYPRE *hA; 1268 MPI_Comm comm; 1269 PetscInt rstart,rend,cstart,cend,M,N; 1270 PetscBool isseqaij,ismpiaij,isaij,ishyp,isis; 1271 PetscErrorCode ierr; 1272 1273 PetscFunctionBegin; 1274 comm = hypre_ParCSRMatrixComm(parcsr); 1275 ierr = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 1276 ierr = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 1277 ierr = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr); 1278 ierr = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr); 1279 ierr = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr); 1280 isaij = (PetscBool)(isseqaij || ismpiaij || isaij); 1281 if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE); 1282 /* access ParCSRMatrix */ 1283 rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr); 1284 rend = hypre_ParCSRMatrixLastRowIndex(parcsr); 1285 cstart = hypre_ParCSRMatrixFirstColDiag(parcsr); 1286 cend = hypre_ParCSRMatrixLastColDiag(parcsr); 1287 M = hypre_ParCSRMatrixGlobalNumRows(parcsr); 1288 N = hypre_ParCSRMatrixGlobalNumCols(parcsr); 1289 1290 /* fix for empty local rows/columns */ 1291 if (rend < rstart) rend = rstart; 1292 if (cend < cstart) cend = cstart; 1293 1294 /* PETSc convention */ 1295 rend++; 1296 cend++; 1297 rend = PetscMin(rend,M); 1298 cend = PetscMin(cend,N); 1299 1300 /* create PETSc matrix with MatHYPRE */ 1301 ierr = MatCreate(comm,&T);CHKERRQ(ierr); 1302 ierr = MatSetSizes(T,rend-rstart,cend-cstart,M,N);CHKERRQ(ierr); 1303 ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr); 1304 hA = (Mat_HYPRE*)(T->data); 1305 1306 /* create HYPRE_IJMatrix */ 1307 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 1308 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1309 1310 /* create new ParCSR object if needed */ 1311 if (ishyp && copymode == PETSC_COPY_VALUES) { 1312 hypre_ParCSRMatrix *new_parcsr; 1313 hypre_CSRMatrix *hdiag,*hoffd,*ndiag,*noffd; 1314 1315 new_parcsr = hypre_ParCSRMatrixCompleteClone(parcsr); 1316 hdiag = hypre_ParCSRMatrixDiag(parcsr); 1317 hoffd = hypre_ParCSRMatrixOffd(parcsr); 1318 ndiag = hypre_ParCSRMatrixDiag(new_parcsr); 1319 noffd = hypre_ParCSRMatrixOffd(new_parcsr); 1320 ierr = PetscMemcpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag)*sizeof(PetscScalar));CHKERRQ(ierr); 1321 ierr = PetscMemcpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd)*sizeof(PetscScalar));CHKERRQ(ierr); 1322 parcsr = new_parcsr; 1323 copymode = PETSC_OWN_POINTER; 1324 } 1325 1326 /* set ParCSR object */ 1327 hypre_IJMatrixObject(hA->ij) = parcsr; 1328 T->preallocated = PETSC_TRUE; 1329 1330 /* set assembled flag */ 1331 hypre_IJMatrixAssembleFlag(hA->ij) = 1; 1332 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1333 if (ishyp) { 1334 PetscMPIInt myid = 0; 1335 1336 /* make sure we always have row_starts and col_starts available */ 1337 if (HYPRE_AssumedPartitionCheck()) { 1338 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 1339 } 1340 if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) { 1341 PetscLayout map; 1342 1343 ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr); 1344 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1345 hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_Int*)(map->range + myid); 1346 } 1347 if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) { 1348 PetscLayout map; 1349 1350 ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr); 1351 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1352 hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_Int*)(map->range + myid); 1353 } 1354 /* prevent from freeing the pointer */ 1355 if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE; 1356 *A = T; 1357 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1358 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1359 } else if (isaij) { 1360 if (copymode != PETSC_OWN_POINTER) { 1361 /* prevent from freeing the pointer */ 1362 hA->inner_free = PETSC_FALSE; 1363 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1364 ierr = MatDestroy(&T);CHKERRQ(ierr); 1365 } else { /* AIJ return type with PETSC_OWN_POINTER */ 1366 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr); 1367 *A = T; 1368 } 1369 } else if (isis) { 1370 ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1371 if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE; 1372 ierr = MatDestroy(&T);CHKERRQ(ierr); 1373 } 1374 PetscFunctionReturn(0); 1375 } 1376 1377 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr) 1378 { 1379 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1380 HYPRE_Int type; 1381 1382 PetscFunctionBegin; 1383 if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present"); 1384 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 1385 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR"); 1386 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr)); 1387 PetscFunctionReturn(0); 1388 } 1389 1390 /* 1391 MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix 1392 1393 Not collective 1394 1395 Input Parameters: 1396 + A - the MATHYPRE object 1397 1398 Output Parameter: 1399 . parcsr - the pointer to the hypre_ParCSRMatrix 1400 1401 Level: intermediate 1402 1403 .seealso: MatHYPRE, PetscCopyMode 1404 */ 1405 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr) 1406 { 1407 PetscErrorCode ierr; 1408 1409 PetscFunctionBegin; 1410 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1411 PetscValidType(A,1); 1412 ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr); 1413 PetscFunctionReturn(0); 1414 } 1415 1416 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd) 1417 { 1418 hypre_ParCSRMatrix *parcsr; 1419 hypre_CSRMatrix *ha; 1420 PetscInt rst; 1421 PetscErrorCode ierr; 1422 1423 PetscFunctionBegin; 1424 if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks"); 1425 ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr); 1426 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1427 if (missing) *missing = PETSC_FALSE; 1428 if (dd) *dd = -1; 1429 ha = hypre_ParCSRMatrixDiag(parcsr); 1430 if (ha) { 1431 PetscInt size,i; 1432 HYPRE_Int *ii,*jj; 1433 1434 size = hypre_CSRMatrixNumRows(ha); 1435 ii = hypre_CSRMatrixI(ha); 1436 jj = hypre_CSRMatrixJ(ha); 1437 for (i = 0; i < size; i++) { 1438 PetscInt j; 1439 PetscBool found = PETSC_FALSE; 1440 1441 for (j = ii[i]; j < ii[i+1] && !found; j++) 1442 found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE; 1443 1444 if (!found) { 1445 PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i); 1446 if (missing) *missing = PETSC_TRUE; 1447 if (dd) *dd = i+rst; 1448 PetscFunctionReturn(0); 1449 } 1450 } 1451 if (!size) { 1452 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1453 if (missing) *missing = PETSC_TRUE; 1454 if (dd) *dd = rst; 1455 } 1456 } else { 1457 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1458 if (missing) *missing = PETSC_TRUE; 1459 if (dd) *dd = rst; 1460 } 1461 PetscFunctionReturn(0); 1462 } 1463 1464 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s) 1465 { 1466 hypre_ParCSRMatrix *parcsr; 1467 hypre_CSRMatrix *ha; 1468 PetscErrorCode ierr; 1469 1470 PetscFunctionBegin; 1471 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1472 /* diagonal part */ 1473 ha = hypre_ParCSRMatrixDiag(parcsr); 1474 if (ha) { 1475 PetscInt size,i; 1476 HYPRE_Int *ii; 1477 PetscScalar *a; 1478 1479 size = hypre_CSRMatrixNumRows(ha); 1480 a = hypre_CSRMatrixData(ha); 1481 ii = hypre_CSRMatrixI(ha); 1482 for (i = 0; i < ii[size]; i++) a[i] *= s; 1483 } 1484 /* offdiagonal part */ 1485 ha = hypre_ParCSRMatrixOffd(parcsr); 1486 if (ha) { 1487 PetscInt size,i; 1488 HYPRE_Int *ii; 1489 PetscScalar *a; 1490 1491 size = hypre_CSRMatrixNumRows(ha); 1492 a = hypre_CSRMatrixData(ha); 1493 ii = hypre_CSRMatrixI(ha); 1494 for (i = 0; i < ii[size]; i++) a[i] *= s; 1495 } 1496 PetscFunctionReturn(0); 1497 } 1498 1499 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 1500 { 1501 hypre_ParCSRMatrix *parcsr; 1502 HYPRE_Int *lrows; 1503 PetscInt rst,ren,i; 1504 PetscErrorCode ierr; 1505 1506 PetscFunctionBegin; 1507 if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented"); 1508 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1509 ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr); 1510 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1511 for (i=0;i<numRows;i++) { 1512 if (rows[i] < rst || rows[i] >= ren) 1513 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported"); 1514 lrows[i] = rows[i] - rst; 1515 } 1516 PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows)); 1517 ierr = PetscFree(lrows);CHKERRQ(ierr); 1518 PetscFunctionReturn(0); 1519 } 1520 1521 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha) 1522 { 1523 PetscErrorCode ierr; 1524 1525 PetscFunctionBegin; 1526 if (ha) { 1527 HYPRE_Int *ii, size; 1528 HYPRE_Complex *a; 1529 1530 size = hypre_CSRMatrixNumRows(ha); 1531 a = hypre_CSRMatrixData(ha); 1532 ii = hypre_CSRMatrixI(ha); 1533 1534 if (a) { ierr = PetscMemzero(a,(ii[size])*sizeof(HYPRE_Complex));CHKERRQ(ierr); } 1535 } 1536 PetscFunctionReturn(0); 1537 } 1538 1539 PetscErrorCode MatZeroEntries_HYPRE(Mat A) 1540 { 1541 hypre_ParCSRMatrix *parcsr; 1542 PetscErrorCode ierr; 1543 1544 PetscFunctionBegin; 1545 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1546 /* diagonal part */ 1547 ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));CHKERRQ(ierr); 1548 /* off-diagonal part */ 1549 ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));CHKERRQ(ierr); 1550 PetscFunctionReturn(0); 1551 } 1552 1553 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],PetscScalar diag) 1554 { 1555 PetscInt ii, jj, ibeg, iend, irow; 1556 PetscInt *i, *j; 1557 PetscScalar *a; 1558 1559 PetscFunctionBegin; 1560 if (!hA) PetscFunctionReturn(0); 1561 1562 i = (PetscInt*) hypre_CSRMatrixI(hA); 1563 j = (PetscInt*) hypre_CSRMatrixJ(hA); 1564 a = hypre_CSRMatrixData(hA); 1565 1566 for (ii = 0; ii < N; ii++) { 1567 irow = rows[ii]; 1568 ibeg = i[irow]; 1569 iend = i[irow+1]; 1570 for (jj = ibeg; jj < iend; jj++) 1571 if (j[jj] == irow) a[jj] = diag; 1572 else a[jj] = 0.0; 1573 } 1574 PetscFunctionReturn(0); 1575 } 1576 1577 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1578 { 1579 hypre_ParCSRMatrix *parcsr; 1580 PetscInt *lrows,len; 1581 PetscErrorCode ierr; 1582 1583 PetscFunctionBegin; 1584 if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size \n"); 1585 /* retrieve the internal matrix */ 1586 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1587 /* get locally owned rows */ 1588 ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr); 1589 /* zero diagonal part */ 1590 ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,diag);CHKERRQ(ierr); 1591 /* zero off-diagonal part */ 1592 ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);CHKERRQ(ierr); 1593 1594 ierr = PetscFree(lrows);CHKERRQ(ierr); 1595 PetscFunctionReturn(0); 1596 } 1597 1598 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode) 1599 { 1600 PetscErrorCode ierr; 1601 1602 PetscFunctionBegin; 1603 if (mat->nooffprocentries) PetscFunctionReturn(0); 1604 1605 ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 1606 PetscFunctionReturn(0); 1607 } 1608 1609 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1610 { 1611 hypre_ParCSRMatrix *parcsr; 1612 PetscErrorCode ierr; 1613 1614 PetscFunctionBegin; 1615 /* retrieve the internal matrix */ 1616 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1617 /* call HYPRE API */ 1618 PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,(HYPRE_Int*)nz,(HYPRE_Int**)idx,v)); 1619 PetscFunctionReturn(0); 1620 } 1621 1622 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1623 { 1624 hypre_ParCSRMatrix *parcsr; 1625 PetscErrorCode ierr; 1626 1627 PetscFunctionBegin; 1628 /* retrieve the internal matrix */ 1629 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1630 /* call HYPRE API */ 1631 PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,(HYPRE_Int*)nz,(HYPRE_Int**)idx,v)); 1632 PetscFunctionReturn(0); 1633 } 1634 1635 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 1636 { 1637 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1638 PetscInt i; 1639 1640 PetscFunctionBegin; 1641 if (!m || !n) PetscFunctionReturn(0); 1642 /* Ignore negative row indices 1643 * And negative column indices should be automatically ignored in hypre 1644 * */ 1645 for (i=0; i<m; i++) 1646 if (idxm[i] >= 0) PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,(HYPRE_Int*)&n,(HYPRE_Int*)&idxm[i],(HYPRE_Int*)idxn,&v[i*n])); 1647 PetscFunctionReturn(0); 1648 } 1649 1650 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg) 1651 { 1652 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1653 1654 PetscFunctionBegin; 1655 switch (op) 1656 { 1657 case MAT_NO_OFF_PROC_ENTRIES: 1658 if (flg) { 1659 PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0)); 1660 } 1661 break; 1662 default: 1663 break; 1664 } 1665 PetscFunctionReturn(0); 1666 } 1667 1668 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view) 1669 { 1670 hypre_ParCSRMatrix *parcsr; 1671 PetscErrorCode ierr; 1672 Mat B; 1673 PetscViewerFormat format; 1674 PetscErrorCode (*mview)(Mat,PetscViewer) = NULL; 1675 1676 PetscFunctionBegin; 1677 ierr = PetscViewerGetFormat(view,&format);CHKERRQ(ierr); 1678 if (format != PETSC_VIEWER_NATIVE) { 1679 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1680 ierr = MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);CHKERRQ(ierr); 1681 ierr = MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);CHKERRQ(ierr); 1682 if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");CHKERRQ(ierr); 1683 ierr = (*mview)(B,view);CHKERRQ(ierr); 1684 ierr = MatDestroy(&B);CHKERRQ(ierr); 1685 } else { 1686 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1687 PetscMPIInt size; 1688 PetscBool isascii; 1689 const char *filename; 1690 1691 /* HYPRE uses only text files */ 1692 ierr = PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr); 1693 if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name); 1694 ierr = PetscViewerFileGetName(view,&filename);CHKERRQ(ierr); 1695 PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename)); 1696 ierr = MPI_Comm_size(hA->comm,&size);CHKERRQ(ierr); 1697 if (size > 1) { 1698 ierr = PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);CHKERRQ(ierr); 1699 } else { 1700 ierr = PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);CHKERRQ(ierr); 1701 } 1702 } 1703 PetscFunctionReturn(0); 1704 } 1705 1706 static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B) 1707 { 1708 hypre_ParCSRMatrix *parcsr; 1709 PetscErrorCode ierr; 1710 PetscCopyMode cpmode; 1711 1712 PetscFunctionBegin; 1713 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1714 if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) { 1715 parcsr = hypre_ParCSRMatrixCompleteClone(parcsr); 1716 cpmode = PETSC_OWN_POINTER; 1717 } else { 1718 cpmode = PETSC_COPY_VALUES; 1719 } 1720 ierr = MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);CHKERRQ(ierr); 1721 PetscFunctionReturn(0); 1722 } 1723 1724 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str) 1725 { 1726 hypre_ParCSRMatrix *acsr,*bcsr; 1727 PetscErrorCode ierr; 1728 1729 PetscFunctionBegin; 1730 if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) { 1731 ierr = MatHYPREGetParCSR_HYPRE(A,&acsr);CHKERRQ(ierr); 1732 ierr = MatHYPREGetParCSR_HYPRE(B,&bcsr);CHKERRQ(ierr); 1733 PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1)); 1734 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1735 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1736 } else { 1737 ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 1738 } 1739 PetscFunctionReturn(0); 1740 } 1741 1742 /*MC 1743 MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices 1744 based on the hypre IJ interface. 1745 1746 Level: intermediate 1747 1748 .seealso: MatCreate() 1749 M*/ 1750 1751 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B) 1752 { 1753 Mat_HYPRE *hB; 1754 PetscErrorCode ierr; 1755 1756 PetscFunctionBegin; 1757 ierr = PetscNewLog(B,&hB);CHKERRQ(ierr); 1758 hB->inner_free = PETSC_TRUE; 1759 hB->available = PETSC_TRUE; 1760 hB->size = 0; 1761 hB->array = NULL; 1762 1763 B->data = (void*)hB; 1764 B->rmap->bs = 1; 1765 B->assembled = PETSC_FALSE; 1766 1767 ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1768 B->ops->mult = MatMult_HYPRE; 1769 B->ops->multtranspose = MatMultTranspose_HYPRE; 1770 B->ops->setup = MatSetUp_HYPRE; 1771 B->ops->destroy = MatDestroy_HYPRE; 1772 B->ops->assemblyend = MatAssemblyEnd_HYPRE; 1773 B->ops->assemblybegin = MatAssemblyBegin_HYPRE; 1774 B->ops->ptap = MatPtAP_HYPRE_HYPRE; 1775 B->ops->matmult = MatMatMult_HYPRE_HYPRE; 1776 B->ops->setvalues = MatSetValues_HYPRE; 1777 B->ops->missingdiagonal = MatMissingDiagonal_HYPRE; 1778 B->ops->scale = MatScale_HYPRE; 1779 B->ops->zerorowscolumns = MatZeroRowsColumns_HYPRE; 1780 B->ops->zeroentries = MatZeroEntries_HYPRE; 1781 B->ops->zerorows = MatZeroRows_HYPRE; 1782 B->ops->getrow = MatGetRow_HYPRE; 1783 B->ops->restorerow = MatRestoreRow_HYPRE; 1784 B->ops->getvalues = MatGetValues_HYPRE; 1785 B->ops->setoption = MatSetOption_HYPRE; 1786 B->ops->duplicate = MatDuplicate_HYPRE; 1787 B->ops->copy = MatCopy_HYPRE; 1788 B->ops->view = MatView_HYPRE; 1789 1790 /* build cache for off array entries formed */ 1791 ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr); 1792 1793 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr); 1794 ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr); 1795 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr); 1796 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr); 1797 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 1798 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 1799 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr); 1800 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr); 1801 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_is_hypre_C",MatPtAP_IS_XAIJ);CHKERRQ(ierr); 1802 PetscFunctionReturn(0); 1803 } 1804 1805 static PetscErrorCode hypre_array_destroy(void *ptr) 1806 { 1807 PetscFunctionBegin; 1808 hypre_TFree(ptr,HYPRE_MEMORY_HOST); 1809 PetscFunctionReturn(0); 1810 } 1811