1 #include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/ 2 #include <../src/mat/impls/aij/mpi/mpiaij.h> 3 #include <StrumpackSparseSolver.h> 4 5 static PetscErrorCode MatGetDiagonal_STRUMPACK(Mat A,Vec v) 6 { 7 PetscFunctionBegin; 8 SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type: STRUMPACK factor"); 9 PetscFunctionReturn(0); 10 } 11 12 static PetscErrorCode MatDestroy_STRUMPACK(Mat A) 13 { 14 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)A->spptr; 15 PetscBool flg; 16 17 PetscFunctionBegin; 18 /* Deallocate STRUMPACK storage */ 19 PetscStackCall("STRUMPACK_destroy",STRUMPACK_destroy(S)); 20 PetscCall(PetscFree(A->spptr)); 21 PetscCall(PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg)); 22 if (flg) { 23 PetscCall(MatDestroy_SeqAIJ(A)); 24 } else { 25 PetscCall(MatDestroy_MPIAIJ(A)); 26 } 27 28 /* clear composed functions */ 29 PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL)); 30 PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetReordering_C",NULL)); 31 PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetColPerm_C",NULL)); 32 PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSRelTol_C",NULL)); 33 PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSAbsTol_C",NULL)); 34 PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSMaxRank_C",NULL)); 35 PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSLeafSize_C",NULL)); 36 PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSMinSepSize_C",NULL)); 37 38 PetscFunctionReturn(0); 39 } 40 41 static PetscErrorCode MatSTRUMPACKSetReordering_STRUMPACK(Mat F,MatSTRUMPACKReordering reordering) 42 { 43 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr; 44 45 PetscFunctionBegin; 46 PetscStackCall("STRUMPACK_reordering_method",STRUMPACK_set_reordering_method(*S,(STRUMPACK_REORDERING_STRATEGY)reordering)); 47 PetscFunctionReturn(0); 48 } 49 50 /*@ 51 MatSTRUMPACKSetReordering - Set STRUMPACK fill-reducing reordering 52 53 Input Parameters: 54 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface 55 - reordering - the code to be used to find the fill-reducing reordering 56 Possible values: NATURAL=0 METIS=1 PARMETIS=2 SCOTCH=3 PTSCOTCH=4 RCM=5 57 58 Options Database: 59 . -mat_strumpack_reordering <METIS> - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM (None) 60 61 Level: beginner 62 63 References: 64 . * - STRUMPACK manual 65 66 .seealso: `MatGetFactor()` 67 @*/ 68 PetscErrorCode MatSTRUMPACKSetReordering(Mat F,MatSTRUMPACKReordering reordering) 69 { 70 PetscFunctionBegin; 71 PetscValidHeaderSpecific(F,MAT_CLASSID,1); 72 PetscValidLogicalCollectiveEnum(F,reordering,2); 73 PetscTryMethod(F,"MatSTRUMPACKSetReordering_C",(Mat,MatSTRUMPACKReordering),(F,reordering)); 74 PetscFunctionReturn(0); 75 } 76 77 static PetscErrorCode MatSTRUMPACKSetColPerm_STRUMPACK(Mat F,PetscBool cperm) 78 { 79 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr; 80 81 PetscFunctionBegin; 82 PetscStackCall("STRUMPACK_set_mc64job",STRUMPACK_set_mc64job(*S,cperm ? 5 : 0)); 83 PetscFunctionReturn(0); 84 } 85 86 /*@ 87 MatSTRUMPACKSetColPerm - Set whether STRUMPACK should try to permute the columns of the matrix in order to get a nonzero diagonal 88 89 Logically Collective on Mat 90 91 Input Parameters: 92 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface 93 - cperm - PETSC_TRUE to permute (internally) the columns of the matrix 94 95 Options Database: 96 . -mat_strumpack_colperm <cperm> - true to use the permutation 97 98 Level: beginner 99 100 References: 101 . * - STRUMPACK manual 102 103 .seealso: `MatGetFactor()` 104 @*/ 105 PetscErrorCode MatSTRUMPACKSetColPerm(Mat F,PetscBool cperm) 106 { 107 PetscFunctionBegin; 108 PetscValidHeaderSpecific(F,MAT_CLASSID,1); 109 PetscValidLogicalCollectiveBool(F,cperm,2); 110 PetscTryMethod(F,"MatSTRUMPACKSetColPerm_C",(Mat,PetscBool),(F,cperm)); 111 PetscFunctionReturn(0); 112 } 113 114 static PetscErrorCode MatSTRUMPACKSetHSSRelTol_STRUMPACK(Mat F,PetscReal rtol) 115 { 116 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr; 117 118 PetscFunctionBegin; 119 PetscStackCall("STRUMPACK_set_HSS_rel_tol", STRUMPACK_set_HSS_rel_tol(*S,rtol)); 120 PetscFunctionReturn(0); 121 } 122 123 /*@ 124 MatSTRUMPACKSetHSSRelTol - Set STRUMPACK relative tolerance for HSS compression 125 126 Logically Collective on Mat 127 128 Input Parameters: 129 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface 130 - rtol - relative compression tolerance 131 132 Options Database: 133 . -mat_strumpack_hss_rel_tol <1e-2> - Relative compression tolerance (None) 134 135 Level: beginner 136 137 References: 138 . * - STRUMPACK manual 139 140 .seealso: `MatGetFactor()` 141 @*/ 142 PetscErrorCode MatSTRUMPACKSetHSSRelTol(Mat F,PetscReal rtol) 143 { 144 PetscFunctionBegin; 145 PetscValidHeaderSpecific(F,MAT_CLASSID,1); 146 PetscValidLogicalCollectiveReal(F,rtol,2); 147 PetscTryMethod(F,"MatSTRUMPACKSetHSSRelTol_C",(Mat,PetscReal),(F,rtol)); 148 PetscFunctionReturn(0); 149 } 150 151 static PetscErrorCode MatSTRUMPACKSetHSSAbsTol_STRUMPACK(Mat F,PetscReal atol) 152 { 153 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr; 154 155 PetscFunctionBegin; 156 PetscStackCall("STRUMPACK_set_HSS_abs_tol", STRUMPACK_set_HSS_abs_tol(*S,atol)); 157 PetscFunctionReturn(0); 158 } 159 160 /*@ 161 MatSTRUMPACKSetHSSAbsTol - Set STRUMPACK absolute tolerance for HSS compression 162 163 Logically Collective on Mat 164 165 Input Parameters: 166 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface 167 - atol - absolute compression tolerance 168 169 Options Database: 170 . -mat_strumpack_hss_abs_tol <1e-8> - Absolute compression tolerance (None) 171 172 Level: beginner 173 174 References: 175 . * - STRUMPACK manual 176 177 .seealso: `MatGetFactor()` 178 @*/ 179 PetscErrorCode MatSTRUMPACKSetHSSAbsTol(Mat F,PetscReal atol) 180 { 181 PetscFunctionBegin; 182 PetscValidHeaderSpecific(F,MAT_CLASSID,1); 183 PetscValidLogicalCollectiveReal(F,atol,2); 184 PetscTryMethod(F,"MatSTRUMPACKSetHSSAbsTol_C",(Mat,PetscReal),(F,atol)); 185 PetscFunctionReturn(0); 186 } 187 188 static PetscErrorCode MatSTRUMPACKSetHSSMaxRank_STRUMPACK(Mat F,PetscInt hssmaxrank) 189 { 190 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr; 191 192 PetscFunctionBegin; 193 PetscStackCall("STRUMPACK_set_HSS_max_rank", STRUMPACK_set_HSS_max_rank(*S,hssmaxrank)); 194 PetscFunctionReturn(0); 195 } 196 197 /*@ 198 MatSTRUMPACKSetHSSMaxRank - Set STRUMPACK maximum HSS rank 199 200 Logically Collective on Mat 201 202 Input Parameters: 203 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface 204 - hssmaxrank - maximum rank used in low-rank approximation 205 206 Options Database: 207 . -mat_strumpack_max_rank - Maximum rank in HSS compression, when using pctype ilu (None) 208 209 Level: beginner 210 211 References: 212 . * - STRUMPACK manual 213 214 .seealso: `MatGetFactor()` 215 @*/ 216 PetscErrorCode MatSTRUMPACKSetHSSMaxRank(Mat F,PetscInt hssmaxrank) 217 { 218 PetscFunctionBegin; 219 PetscValidHeaderSpecific(F,MAT_CLASSID,1); 220 PetscValidLogicalCollectiveInt(F,hssmaxrank,2); 221 PetscTryMethod(F,"MatSTRUMPACKSetHSSMaxRank_C",(Mat,PetscInt),(F,hssmaxrank)); 222 PetscFunctionReturn(0); 223 } 224 225 static PetscErrorCode MatSTRUMPACKSetHSSLeafSize_STRUMPACK(Mat F,PetscInt leaf_size) 226 { 227 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr; 228 229 PetscFunctionBegin; 230 PetscStackCall("STRUMPACK_set_HSS_leaf_size", STRUMPACK_set_HSS_leaf_size(*S,leaf_size)); 231 PetscFunctionReturn(0); 232 } 233 234 /*@ 235 MatSTRUMPACKSetHSSLeafSize - Set STRUMPACK HSS leaf size 236 237 Logically Collective on Mat 238 239 Input Parameters: 240 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface 241 - leaf_size - Size of diagonal blocks in HSS approximation 242 243 Options Database: 244 . -mat_strumpack_leaf_size - Size of diagonal blocks in HSS approximation, when using pctype ilu (None) 245 246 Level: beginner 247 248 References: 249 . * - STRUMPACK manual 250 251 .seealso: `MatGetFactor()` 252 @*/ 253 PetscErrorCode MatSTRUMPACKSetHSSLeafSize(Mat F,PetscInt leaf_size) 254 { 255 PetscFunctionBegin; 256 PetscValidHeaderSpecific(F,MAT_CLASSID,1); 257 PetscValidLogicalCollectiveInt(F,leaf_size,2); 258 PetscTryMethod(F,"MatSTRUMPACKSetHSSLeafSize_C",(Mat,PetscInt),(F,leaf_size)); 259 PetscFunctionReturn(0); 260 } 261 262 static PetscErrorCode MatSTRUMPACKSetHSSMinSepSize_STRUMPACK(Mat F,PetscInt hssminsize) 263 { 264 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr; 265 266 PetscFunctionBegin; 267 PetscStackCall("STRUMPACK_set_HSS_min_sep_size", STRUMPACK_set_HSS_min_sep_size(*S,hssminsize)); 268 PetscFunctionReturn(0); 269 } 270 271 /*@ 272 MatSTRUMPACKSetHSSMinSepSize - Set STRUMPACK minimum separator size for low-rank approximation 273 274 Logically Collective on Mat 275 276 Input Parameters: 277 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface 278 - hssminsize - minimum dense matrix size for low-rank approximation 279 280 Options Database: 281 . -mat_strumpack_hss_min_sep_size <hssminsize> - set the minimum separator size 282 283 Level: beginner 284 285 References: 286 . * - STRUMPACK manual 287 288 .seealso: `MatGetFactor()` 289 @*/ 290 PetscErrorCode MatSTRUMPACKSetHSSMinSepSize(Mat F,PetscInt hssminsize) 291 { 292 PetscFunctionBegin; 293 PetscValidHeaderSpecific(F,MAT_CLASSID,1); 294 PetscValidLogicalCollectiveInt(F,hssminsize,2); 295 PetscTryMethod(F,"MatSTRUMPACKSetHSSMinSepSize_C",(Mat,PetscInt),(F,hssminsize)); 296 PetscFunctionReturn(0); 297 } 298 299 static PetscErrorCode MatSolve_STRUMPACK(Mat A,Vec b_mpi,Vec x) 300 { 301 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)A->spptr; 302 STRUMPACK_RETURN_CODE sp_err; 303 const PetscScalar *bptr; 304 PetscScalar *xptr; 305 306 PetscFunctionBegin; 307 PetscCall(VecGetArray(x,&xptr)); 308 PetscCall(VecGetArrayRead(b_mpi,&bptr)); 309 310 PetscStackCall("STRUMPACK_solve",sp_err = STRUMPACK_solve(*S,(PetscScalar*)bptr,xptr,0)); 311 switch (sp_err) { 312 case STRUMPACK_SUCCESS: break; 313 case STRUMPACK_MATRIX_NOT_SET: { SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: matrix was not set"); break; } 314 case STRUMPACK_REORDERING_ERROR: { SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: matrix reordering failed"); break; } 315 default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: solve failed"); 316 } 317 PetscCall(VecRestoreArray(x,&xptr)); 318 PetscCall(VecRestoreArrayRead(b_mpi,&bptr)); 319 PetscFunctionReturn(0); 320 } 321 322 static PetscErrorCode MatMatSolve_STRUMPACK(Mat A,Mat B_mpi,Mat X) 323 { 324 PetscBool flg; 325 326 PetscFunctionBegin; 327 PetscCall(PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL)); 328 PetscCheck(flg,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix"); 329 PetscCall(PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL)); 330 PetscCheck(flg,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix"); 331 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatMatSolve_STRUMPACK() is not implemented yet"); 332 PetscFunctionReturn(0); 333 } 334 335 static PetscErrorCode MatView_Info_STRUMPACK(Mat A,PetscViewer viewer) 336 { 337 PetscFunctionBegin; 338 /* check if matrix is strumpack type */ 339 if (A->ops->solve != MatSolve_STRUMPACK) PetscFunctionReturn(0); 340 PetscCall(PetscViewerASCIIPrintf(viewer,"STRUMPACK sparse solver!\n")); 341 PetscFunctionReturn(0); 342 } 343 344 static PetscErrorCode MatView_STRUMPACK(Mat A,PetscViewer viewer) 345 { 346 PetscBool iascii; 347 PetscViewerFormat format; 348 349 PetscFunctionBegin; 350 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii)); 351 if (iascii) { 352 PetscCall(PetscViewerGetFormat(viewer,&format)); 353 if (format == PETSC_VIEWER_ASCII_INFO) PetscCall(MatView_Info_STRUMPACK(A,viewer)); 354 } 355 PetscFunctionReturn(0); 356 } 357 358 static PetscErrorCode MatLUFactorNumeric_STRUMPACK(Mat F,Mat A,const MatFactorInfo *info) 359 { 360 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr; 361 STRUMPACK_RETURN_CODE sp_err; 362 Mat_SeqAIJ *A_d,*A_o; 363 Mat_MPIAIJ *mat; 364 PetscInt M=A->rmap->N,m=A->rmap->n; 365 PetscBool flg; 366 367 PetscFunctionBegin; 368 PetscCall(PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg)); 369 if (flg) { /* A is MATMPIAIJ */ 370 mat = (Mat_MPIAIJ*)A->data; 371 A_d = (Mat_SeqAIJ*)(mat->A)->data; 372 A_o = (Mat_SeqAIJ*)(mat->B)->data; 373 PetscStackCall("STRUMPACK_set_MPIAIJ_matrix",STRUMPACK_set_MPIAIJ_matrix(*S,&m,A_d->i,A_d->j,A_d->a,A_o->i,A_o->j,A_o->a,mat->garray)); 374 } else { /* A is MATSEQAIJ */ 375 A_d = (Mat_SeqAIJ*)A->data; 376 PetscStackCall("STRUMPACK_set_csr_matrix",STRUMPACK_set_csr_matrix(*S,&M,A_d->i,A_d->j,A_d->a,0)); 377 } 378 379 /* Reorder and Factor the matrix. */ 380 /* TODO figure out how to avoid reorder if the matrix values changed, but the pattern remains the same. */ 381 PetscStackCall("STRUMPACK_reorder",sp_err = STRUMPACK_reorder(*S)); 382 PetscStackCall("STRUMPACK_factor",sp_err = STRUMPACK_factor(*S)); 383 switch (sp_err) { 384 case STRUMPACK_SUCCESS: break; 385 case STRUMPACK_MATRIX_NOT_SET: { SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: matrix was not set"); break; } 386 case STRUMPACK_REORDERING_ERROR: { SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: matrix reordering failed"); break; } 387 default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: factorization failed"); 388 } 389 F->assembled = PETSC_TRUE; 390 F->preallocated = PETSC_TRUE; 391 PetscFunctionReturn(0); 392 } 393 394 static PetscErrorCode MatLUFactorSymbolic_STRUMPACK(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 395 { 396 STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr; 397 PetscBool flg,set; 398 PetscReal ctol; 399 PetscInt hssminsize,max_rank,leaf_size; 400 STRUMPACK_REORDERING_STRATEGY ndcurrent,ndvalue; 401 STRUMPACK_KRYLOV_SOLVER itcurrent,itsolver; 402 const char *const STRUMPACKNDTypes[] = {"NATURAL","METIS","PARMETIS","SCOTCH","PTSCOTCH","RCM","STRUMPACKNDTypes","",0}; 403 const char *const SolverTypes[] = {"AUTO","NONE","REFINE","PREC_GMRES","GMRES","PREC_BICGSTAB","BICGSTAB","SolverTypes","",0}; 404 405 PetscFunctionBegin; 406 /* Set options to F */ 407 PetscOptionsBegin(PetscObjectComm((PetscObject)F),((PetscObject)F)->prefix,"STRUMPACK Options","Mat"); 408 PetscStackCall("STRUMPACK_HSS_rel_tol",ctol = (PetscReal)STRUMPACK_HSS_rel_tol(*S)); 409 PetscCall(PetscOptionsReal("-mat_strumpack_hss_rel_tol","Relative compression tolerance","None",ctol,&ctol,&set)); 410 if (set) PetscStackCall("STRUMPACK_set_HSS_rel_tol",STRUMPACK_set_HSS_rel_tol(*S,(double)ctol)); 411 412 PetscStackCall("STRUMPACK_HSS_abs_tol",ctol = (PetscReal)STRUMPACK_HSS_abs_tol(*S)); 413 PetscCall(PetscOptionsReal("-mat_strumpack_hss_abs_tol","Absolute compression tolerance","None",ctol,&ctol,&set)); 414 if (set) PetscStackCall("STRUMPACK_set_HSS_abs_tol",STRUMPACK_set_HSS_abs_tol(*S,(double)ctol)); 415 416 PetscStackCall("STRUMPACK_mc64job",flg = (STRUMPACK_mc64job(*S) == 0) ? PETSC_FALSE : PETSC_TRUE); 417 PetscCall(PetscOptionsBool("-mat_strumpack_colperm","Find a col perm to get nonzero diagonal","None",flg,&flg,&set)); 418 if (set) PetscStackCall("STRUMPACK_set_mc64job",STRUMPACK_set_mc64job(*S,flg ? 5 : 0)); 419 420 PetscStackCall("STRUMPACK_HSS_min_sep_size",hssminsize = (PetscInt)STRUMPACK_HSS_min_sep_size(*S)); 421 PetscCall(PetscOptionsInt("-mat_strumpack_hss_min_sep_size","Minimum size of separator for HSS compression","None",hssminsize,&hssminsize,&set)); 422 if (set) PetscStackCall("STRUMPACK_set_HSS_min_sep_size",STRUMPACK_set_HSS_min_sep_size(*S,(int)hssminsize)); 423 424 PetscStackCall("STRUMPACK_HSS_max_rank",max_rank = (PetscInt)STRUMPACK_HSS_max_rank(*S)); 425 PetscCall(PetscOptionsInt("-mat_strumpack_max_rank","Maximum rank in HSS approximation","None",max_rank,&max_rank,&set)); 426 if (set) PetscStackCall("STRUMPACK_set_HSS_max_rank",STRUMPACK_set_HSS_max_rank(*S,(int)max_rank)); 427 428 PetscStackCall("STRUMPACK_HSS_leaf_size",leaf_size = (PetscInt)STRUMPACK_HSS_leaf_size(*S)); 429 PetscCall(PetscOptionsInt("-mat_strumpack_leaf_size","Size of diagonal blocks in HSS approximation","None",leaf_size,&leaf_size,&set)); 430 if (set) PetscStackCall("STRUMPACK_set_HSS_leaf_size",STRUMPACK_set_HSS_leaf_size(*S,(int)leaf_size)); 431 432 PetscStackCall("STRUMPACK_reordering_method",ndcurrent = STRUMPACK_reordering_method(*S)); 433 PetscCall(PetscOptionsEnum("-mat_strumpack_reordering","Sparsity reducing matrix reordering","None",STRUMPACKNDTypes,(PetscEnum)ndcurrent,(PetscEnum*)&ndvalue,&set)); 434 if (set) PetscStackCall("STRUMPACK_set_reordering_method",STRUMPACK_set_reordering_method(*S,ndvalue)); 435 436 /* Disable the outer iterative solver from STRUMPACK. */ 437 /* When STRUMPACK is used as a direct solver, it will by default do iterative refinement. */ 438 /* When STRUMPACK is used as an approximate factorization preconditioner (by enabling */ 439 /* low-rank compression), it will use it's own preconditioned GMRES. Here we can disable */ 440 /* the outer iterative solver, as PETSc uses STRUMPACK from within a KSP. */ 441 PetscStackCall("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, STRUMPACK_DIRECT)); 442 443 PetscStackCall("STRUMPACK_Krylov_solver",itcurrent = STRUMPACK_Krylov_solver(*S)); 444 PetscCall(PetscOptionsEnum("-mat_strumpack_iterative_solver","Select iterative solver from STRUMPACK","None",SolverTypes,(PetscEnum)itcurrent,(PetscEnum*)&itsolver,&set)); 445 if (set) PetscStackCall("STRUMPACK_set_Krylov_solver",STRUMPACK_set_Krylov_solver(*S,itsolver)); 446 PetscOptionsEnd(); 447 448 F->ops->lufactornumeric = MatLUFactorNumeric_STRUMPACK; 449 F->ops->solve = MatSolve_STRUMPACK; 450 F->ops->matsolve = MatMatSolve_STRUMPACK; 451 PetscFunctionReturn(0); 452 } 453 454 static PetscErrorCode MatFactorGetSolverType_aij_strumpack(Mat A,MatSolverType *type) 455 { 456 PetscFunctionBegin; 457 *type = MATSOLVERSTRUMPACK; 458 PetscFunctionReturn(0); 459 } 460 461 /*MC 462 MATSOLVERSSTRUMPACK = "strumpack" - A solver package providing a direct sparse solver (PCLU) 463 and a preconditioner (PCILU) using low-rank compression via the external package STRUMPACK. 464 465 Consult the STRUMPACK-sparse manual for more info. 466 467 Use 468 ./configure --download-strumpack 469 to have PETSc installed with STRUMPACK 470 471 Use 472 -pc_type lu -pc_factor_mat_solver_type strumpack 473 to use this as an exact (direct) solver, use 474 -pc_type ilu -pc_factor_mat_solver_type strumpack 475 to enable low-rank compression (i.e, use as a preconditioner). 476 477 Works with AIJ matrices 478 479 Options Database Keys: 480 + -mat_strumpack_verbose - verbose info 481 . -mat_strumpack_hss_rel_tol <1e-2> - Relative compression tolerance (None) 482 . -mat_strumpack_hss_abs_tol <1e-8> - Absolute compression tolerance (None) 483 . -mat_strumpack_colperm <TRUE> - Permute matrix to make diagonal nonzeros (None) 484 . -mat_strumpack_hss_min_sep_size <256> - Minimum size of separator for HSS compression (None) 485 . -mat_strumpack_max_rank - Maximum rank in HSS compression, when using pctype ilu (None) 486 . -mat_strumpack_leaf_size - Size of diagonal blocks in HSS approximation, when using pctype ilu (None) 487 . -mat_strumpack_reordering <METIS> - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM (None) 488 - -mat_strumpack_iterative_solver <DIRECT> - Select iterative solver from STRUMPACK (choose one of) AUTO DIRECT REFINE PREC_GMRES GMRES PREC_BICGSTAB BICGSTAB (None) 489 490 Level: beginner 491 492 .seealso: `PCLU`, `PCILU`, `MATSOLVERSUPERLU_DIST`, `MATSOLVERMUMPS`, `PCFactorSetMatSolverType()`, `MatSolverType` 493 M*/ 494 static PetscErrorCode MatGetFactor_aij_strumpack(Mat A,MatFactorType ftype,Mat *F) 495 { 496 Mat B; 497 PetscInt M=A->rmap->N,N=A->cmap->N; 498 PetscBool verb,flg; 499 STRUMPACK_SparseSolver *S; 500 STRUMPACK_INTERFACE iface; 501 const STRUMPACK_PRECISION table[2][2][2] = 502 {{{STRUMPACK_FLOATCOMPLEX_64, STRUMPACK_DOUBLECOMPLEX_64}, 503 {STRUMPACK_FLOAT_64, STRUMPACK_DOUBLE_64}}, 504 {{STRUMPACK_FLOATCOMPLEX, STRUMPACK_DOUBLECOMPLEX}, 505 {STRUMPACK_FLOAT, STRUMPACK_DOUBLE}}}; 506 const STRUMPACK_PRECISION prec = table[(sizeof(PetscInt)==8)?0:1][(PETSC_SCALAR==PETSC_COMPLEX)?0:1][(PETSC_REAL==PETSC_FLOAT)?0:1]; 507 508 PetscFunctionBegin; 509 /* Create the factorization matrix */ 510 PetscCall(MatCreate(PetscObjectComm((PetscObject)A),&B)); 511 PetscCall(MatSetSizes(B,A->rmap->n,A->cmap->n,M,N)); 512 PetscCall(MatSetType(B,((PetscObject)A)->type_name)); 513 PetscCall(MatSeqAIJSetPreallocation(B,0,NULL)); 514 PetscCall(MatMPIAIJSetPreallocation(B,0,NULL,0,NULL)); 515 B->trivialsymbolic = PETSC_TRUE; 516 if (ftype == MAT_FACTOR_LU || ftype == MAT_FACTOR_ILU) { 517 B->ops->lufactorsymbolic = MatLUFactorSymbolic_STRUMPACK; 518 B->ops->ilufactorsymbolic = MatLUFactorSymbolic_STRUMPACK; 519 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Factor type not supported"); 520 B->ops->view = MatView_STRUMPACK; 521 B->ops->destroy = MatDestroy_STRUMPACK; 522 B->ops->getdiagonal = MatGetDiagonal_STRUMPACK; 523 PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_aij_strumpack)); 524 PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetReordering_C",MatSTRUMPACKSetReordering_STRUMPACK)); 525 PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetColPerm_C",MatSTRUMPACKSetColPerm_STRUMPACK)); 526 PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSRelTol_C",MatSTRUMPACKSetHSSRelTol_STRUMPACK)); 527 PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSAbsTol_C",MatSTRUMPACKSetHSSAbsTol_STRUMPACK)); 528 PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSMaxRank_C",MatSTRUMPACKSetHSSMaxRank_STRUMPACK)); 529 PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSLeafSize_C",MatSTRUMPACKSetHSSLeafSize_STRUMPACK)); 530 PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSMinSepSize_C",MatSTRUMPACKSetHSSMinSepSize_STRUMPACK)); 531 B->factortype = ftype; 532 PetscCall(PetscNewLog(B,&S)); 533 B->spptr = S; 534 535 PetscCall(PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg)); 536 iface = flg ? STRUMPACK_MT : STRUMPACK_MPI_DIST; 537 538 PetscOptionsBegin(PetscObjectComm((PetscObject)B),((PetscObject)B)->prefix,"STRUMPACK Options","Mat"); 539 verb = PetscLogPrintInfo ? PETSC_TRUE : PETSC_FALSE; 540 PetscCall(PetscOptionsBool("-mat_strumpack_verbose","Print STRUMPACK information","None",verb,&verb,NULL)); 541 542 PetscStackCall("STRUMPACK_init",STRUMPACK_init(S,PetscObjectComm((PetscObject)A),prec,iface,0,NULL,verb)); 543 544 if (ftype == MAT_FACTOR_ILU) { 545 /* When enabling HSS compression, the STRUMPACK solver becomes an incomplete */ 546 /* (or approximate) LU factorization. */ 547 PetscStackCall("STRUMPACK_enable_HSS",STRUMPACK_enable_HSS(*S)); 548 } 549 PetscOptionsEnd(); 550 551 /* set solvertype */ 552 PetscCall(PetscFree(B->solvertype)); 553 PetscCall(PetscStrallocpy(MATSOLVERSTRUMPACK,&B->solvertype)); 554 555 *F = B; 556 PetscFunctionReturn(0); 557 } 558 559 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void) 560 { 561 PetscFunctionBegin; 562 PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK,MATMPIAIJ,MAT_FACTOR_LU,MatGetFactor_aij_strumpack)); 563 PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_strumpack)); 564 PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK,MATMPIAIJ,MAT_FACTOR_ILU,MatGetFactor_aij_strumpack)); 565 PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK,MATSEQAIJ,MAT_FACTOR_ILU,MatGetFactor_aij_strumpack)); 566 PetscFunctionReturn(0); 567 } 568