xref: /petsc/src/mat/impls/aij/mpi/strumpack/strumpack.c (revision c3b5f7ba6bc5ce25a01a67bb37ba5d34b02bbbd7)
1 #include <../src/mat/impls/aij/seq/aij.h>            /*I "petscmat.h" I*/
2 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3 #include <StrumpackSparseSolver.h>
4 
5 static PetscErrorCode MatGetDiagonal_STRUMPACK(Mat A,Vec v)
6 {
7   PetscFunctionBegin;
8   SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type: STRUMPACK factor");
9   PetscFunctionReturn(0);
10 }
11 
12 static PetscErrorCode MatDestroy_STRUMPACK(Mat A)
13 {
14   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)A->spptr;
15   PetscBool              flg;
16 
17   PetscFunctionBegin;
18   /* Deallocate STRUMPACK storage */
19   PetscStackCall("STRUMPACK_destroy",STRUMPACK_destroy(S));
20   PetscCall(PetscFree(A->spptr));
21   PetscCall(PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg));
22   if (flg) {
23     PetscCall(MatDestroy_SeqAIJ(A));
24   } else {
25     PetscCall(MatDestroy_MPIAIJ(A));
26   }
27 
28   /* clear composed functions */
29   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL));
30   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetReordering_C",NULL));
31   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetColPerm_C",NULL));
32   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSRelTol_C",NULL));
33   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSAbsTol_C",NULL));
34   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSMaxRank_C",NULL));
35   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSLeafSize_C",NULL));
36   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSMinSepSize_C",NULL));
37 
38   PetscFunctionReturn(0);
39 }
40 
41 static PetscErrorCode MatSTRUMPACKSetReordering_STRUMPACK(Mat F,MatSTRUMPACKReordering reordering)
42 {
43   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
44 
45   PetscFunctionBegin;
46   PetscStackCall("STRUMPACK_reordering_method",STRUMPACK_set_reordering_method(*S,(STRUMPACK_REORDERING_STRATEGY)reordering));
47   PetscFunctionReturn(0);
48 }
49 
50 /*@
51   MatSTRUMPACKSetReordering - Set STRUMPACK fill-reducing reordering
52 
53    Input Parameters:
54 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface
55 -  reordering - the code to be used to find the fill-reducing reordering
56       Possible values: NATURAL=0 METIS=1 PARMETIS=2 SCOTCH=3 PTSCOTCH=4 RCM=5
57 
58   Options Database:
59 .   -mat_strumpack_reordering <METIS>  - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM (None)
60 
61    Level: beginner
62 
63    References:
64 .  * - STRUMPACK manual
65 
66 .seealso: MatGetFactor()
67 @*/
68 PetscErrorCode MatSTRUMPACKSetReordering(Mat F,MatSTRUMPACKReordering reordering)
69 {
70   PetscFunctionBegin;
71   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
72   PetscValidLogicalCollectiveEnum(F,reordering,2);
73   PetscTryMethod(F,"MatSTRUMPACKSetReordering_C",(Mat,MatSTRUMPACKReordering),(F,reordering));
74   PetscFunctionReturn(0);
75 }
76 
77 static PetscErrorCode MatSTRUMPACKSetColPerm_STRUMPACK(Mat F,PetscBool cperm)
78 {
79   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
80 
81   PetscFunctionBegin;
82   PetscStackCall("STRUMPACK_set_mc64job",STRUMPACK_set_mc64job(*S,cperm ? 5 : 0));
83   PetscFunctionReturn(0);
84 }
85 
86 /*@
87   MatSTRUMPACKSetColPerm - Set whether STRUMPACK should try to permute the columns of the matrix in order to get a nonzero diagonal
88 
89    Logically Collective on Mat
90 
91    Input Parameters:
92 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface
93 -  cperm - PETSC_TRUE to permute (internally) the columns of the matrix
94 
95   Options Database:
96 .   -mat_strumpack_colperm <cperm> - true to use the permutation
97 
98    Level: beginner
99 
100    References:
101 .  * - STRUMPACK manual
102 
103 .seealso: MatGetFactor()
104 @*/
105 PetscErrorCode MatSTRUMPACKSetColPerm(Mat F,PetscBool cperm)
106 {
107   PetscFunctionBegin;
108   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
109   PetscValidLogicalCollectiveBool(F,cperm,2);
110   PetscTryMethod(F,"MatSTRUMPACKSetColPerm_C",(Mat,PetscBool),(F,cperm));
111   PetscFunctionReturn(0);
112 }
113 
114 static PetscErrorCode MatSTRUMPACKSetHSSRelTol_STRUMPACK(Mat F,PetscReal rtol)
115 {
116   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
117 
118   PetscFunctionBegin;
119   PetscStackCall("STRUMPACK_set_HSS_rel_tol", STRUMPACK_set_HSS_rel_tol(*S,rtol));
120   PetscFunctionReturn(0);
121 }
122 
123 /*@
124   MatSTRUMPACKSetHSSRelTol - Set STRUMPACK relative tolerance for HSS compression
125 
126   Logically Collective on Mat
127 
128    Input Parameters:
129 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface
130 -  rtol - relative compression tolerance
131 
132   Options Database:
133 .   -mat_strumpack_hss_rel_tol <1e-2>         - Relative compression tolerance (None)
134 
135    Level: beginner
136 
137    References:
138 .  * - STRUMPACK manual
139 
140 .seealso: MatGetFactor()
141 @*/
142 PetscErrorCode MatSTRUMPACKSetHSSRelTol(Mat F,PetscReal rtol)
143 {
144   PetscFunctionBegin;
145   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
146   PetscValidLogicalCollectiveReal(F,rtol,2);
147   PetscTryMethod(F,"MatSTRUMPACKSetHSSRelTol_C",(Mat,PetscReal),(F,rtol));
148   PetscFunctionReturn(0);
149 }
150 
151 static PetscErrorCode MatSTRUMPACKSetHSSAbsTol_STRUMPACK(Mat F,PetscReal atol)
152 {
153   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
154 
155   PetscFunctionBegin;
156   PetscStackCall("STRUMPACK_set_HSS_abs_tol", STRUMPACK_set_HSS_abs_tol(*S,atol));
157   PetscFunctionReturn(0);
158 }
159 
160 /*@
161   MatSTRUMPACKSetHSSAbsTol - Set STRUMPACK absolute tolerance for HSS compression
162 
163    Logically Collective on Mat
164 
165    Input Parameters:
166 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface
167 -  atol - absolute compression tolerance
168 
169   Options Database:
170 .   -mat_strumpack_hss_abs_tol <1e-8>         - Absolute compression tolerance (None)
171 
172    Level: beginner
173 
174    References:
175 .  * - STRUMPACK manual
176 
177 .seealso: MatGetFactor()
178 @*/
179 PetscErrorCode MatSTRUMPACKSetHSSAbsTol(Mat F,PetscReal atol)
180 {
181   PetscFunctionBegin;
182   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
183   PetscValidLogicalCollectiveReal(F,atol,2);
184   PetscTryMethod(F,"MatSTRUMPACKSetHSSAbsTol_C",(Mat,PetscReal),(F,atol));
185   PetscFunctionReturn(0);
186 }
187 
188 static PetscErrorCode MatSTRUMPACKSetHSSMaxRank_STRUMPACK(Mat F,PetscInt hssmaxrank)
189 {
190   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
191 
192   PetscFunctionBegin;
193   PetscStackCall("STRUMPACK_set_HSS_max_rank", STRUMPACK_set_HSS_max_rank(*S,hssmaxrank));
194   PetscFunctionReturn(0);
195 }
196 
197 /*@
198   MatSTRUMPACKSetHSSMaxRank - Set STRUMPACK maximum HSS rank
199 
200    Logically Collective on Mat
201 
202    Input Parameters:
203 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface
204 -  hssmaxrank - maximum rank used in low-rank approximation
205 
206   Options Database:
207 .   -mat_strumpack_max_rank    - Maximum rank in HSS compression, when using pctype ilu (None)
208 
209    Level: beginner
210 
211    References:
212 .  * - STRUMPACK manual
213 
214 .seealso: MatGetFactor()
215 @*/
216 PetscErrorCode MatSTRUMPACKSetHSSMaxRank(Mat F,PetscInt hssmaxrank)
217 {
218   PetscFunctionBegin;
219   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
220   PetscValidLogicalCollectiveInt(F,hssmaxrank,2);
221   PetscTryMethod(F,"MatSTRUMPACKSetHSSMaxRank_C",(Mat,PetscInt),(F,hssmaxrank));
222   PetscFunctionReturn(0);
223 }
224 
225 static PetscErrorCode MatSTRUMPACKSetHSSLeafSize_STRUMPACK(Mat F,PetscInt leaf_size)
226 {
227   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
228 
229   PetscFunctionBegin;
230   PetscStackCall("STRUMPACK_set_HSS_leaf_size", STRUMPACK_set_HSS_leaf_size(*S,leaf_size));
231   PetscFunctionReturn(0);
232 }
233 
234 /*@
235   MatSTRUMPACKSetHSSLeafSize - Set STRUMPACK HSS leaf size
236 
237    Logically Collective on Mat
238 
239    Input Parameters:
240 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface
241 -  leaf_size - Size of diagonal blocks in HSS approximation
242 
243   Options Database:
244 .   -mat_strumpack_leaf_size    - Size of diagonal blocks in HSS approximation, when using pctype ilu (None)
245 
246    Level: beginner
247 
248    References:
249 .  * - STRUMPACK manual
250 
251 .seealso: MatGetFactor()
252 @*/
253 PetscErrorCode MatSTRUMPACKSetHSSLeafSize(Mat F,PetscInt leaf_size)
254 {
255   PetscFunctionBegin;
256   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
257   PetscValidLogicalCollectiveInt(F,leaf_size,2);
258   PetscTryMethod(F,"MatSTRUMPACKSetHSSLeafSize_C",(Mat,PetscInt),(F,leaf_size));
259   PetscFunctionReturn(0);
260 }
261 
262 static PetscErrorCode MatSTRUMPACKSetHSSMinSepSize_STRUMPACK(Mat F,PetscInt hssminsize)
263 {
264   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
265 
266   PetscFunctionBegin;
267   PetscStackCall("STRUMPACK_set_HSS_min_sep_size", STRUMPACK_set_HSS_min_sep_size(*S,hssminsize));
268   PetscFunctionReturn(0);
269 }
270 
271 /*@
272   MatSTRUMPACKSetHSSMinSepSize - Set STRUMPACK minimum separator size for low-rank approximation
273 
274    Logically Collective on Mat
275 
276    Input Parameters:
277 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface
278 -  hssminsize - minimum dense matrix size for low-rank approximation
279 
280   Options Database:
281 .   -mat_strumpack_hss_min_sep_size <hssminsize> - set the minimum separator size
282 
283    Level: beginner
284 
285    References:
286 .  * - STRUMPACK manual
287 
288 .seealso: MatGetFactor()
289 @*/
290 PetscErrorCode MatSTRUMPACKSetHSSMinSepSize(Mat F,PetscInt hssminsize)
291 {
292   PetscFunctionBegin;
293   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
294   PetscValidLogicalCollectiveInt(F,hssminsize,2);
295   PetscTryMethod(F,"MatSTRUMPACKSetHSSMinSepSize_C",(Mat,PetscInt),(F,hssminsize));
296   PetscFunctionReturn(0);
297 }
298 
299 static PetscErrorCode MatSolve_STRUMPACK(Mat A,Vec b_mpi,Vec x)
300 {
301   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)A->spptr;
302   STRUMPACK_RETURN_CODE  sp_err;
303   const PetscScalar      *bptr;
304   PetscScalar            *xptr;
305 
306   PetscFunctionBegin;
307   PetscCall(VecGetArray(x,&xptr));
308   PetscCall(VecGetArrayRead(b_mpi,&bptr));
309 
310   PetscStackCall("STRUMPACK_solve",sp_err = STRUMPACK_solve(*S,(PetscScalar*)bptr,xptr,0));
311   switch (sp_err) {
312   case STRUMPACK_SUCCESS: break;
313   case STRUMPACK_MATRIX_NOT_SET:   { SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: matrix was not set"); break; }
314   case STRUMPACK_REORDERING_ERROR: { SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: matrix reordering failed"); break; }
315   default:                           SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: solve failed");
316   }
317   PetscCall(VecRestoreArray(x,&xptr));
318   PetscCall(VecRestoreArrayRead(b_mpi,&bptr));
319   PetscFunctionReturn(0);
320 }
321 
322 static PetscErrorCode MatMatSolve_STRUMPACK(Mat A,Mat B_mpi,Mat X)
323 {
324   PetscBool flg;
325 
326   PetscFunctionBegin;
327   PetscCall(PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL));
328   PetscCheck(flg,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
329   PetscCall(PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL));
330   PetscCheck(flg,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
331   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatMatSolve_STRUMPACK() is not implemented yet");
332   PetscFunctionReturn(0);
333 }
334 
335 static PetscErrorCode MatView_Info_STRUMPACK(Mat A,PetscViewer viewer)
336 {
337   PetscFunctionBegin;
338   /* check if matrix is strumpack type */
339   if (A->ops->solve != MatSolve_STRUMPACK) PetscFunctionReturn(0);
340   PetscCall(PetscViewerASCIIPrintf(viewer,"STRUMPACK sparse solver!\n"));
341   PetscFunctionReturn(0);
342 }
343 
344 static PetscErrorCode MatView_STRUMPACK(Mat A,PetscViewer viewer)
345 {
346   PetscBool         iascii;
347   PetscViewerFormat format;
348 
349   PetscFunctionBegin;
350   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
351   if (iascii) {
352     PetscCall(PetscViewerGetFormat(viewer,&format));
353     if (format == PETSC_VIEWER_ASCII_INFO) PetscCall(MatView_Info_STRUMPACK(A,viewer));
354   }
355   PetscFunctionReturn(0);
356 }
357 
358 static PetscErrorCode MatLUFactorNumeric_STRUMPACK(Mat F,Mat A,const MatFactorInfo *info)
359 {
360   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
361   STRUMPACK_RETURN_CODE  sp_err;
362   Mat_SeqAIJ             *A_d,*A_o;
363   Mat_MPIAIJ             *mat;
364   PetscInt               M=A->rmap->N,m=A->rmap->n;
365   PetscBool              flg;
366 
367   PetscFunctionBegin;
368   PetscCall(PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg));
369   if (flg) { /* A is MATMPIAIJ */
370     mat = (Mat_MPIAIJ*)A->data;
371     A_d = (Mat_SeqAIJ*)(mat->A)->data;
372     A_o = (Mat_SeqAIJ*)(mat->B)->data;
373     PetscStackCall("STRUMPACK_set_MPIAIJ_matrix",STRUMPACK_set_MPIAIJ_matrix(*S,&m,A_d->i,A_d->j,A_d->a,A_o->i,A_o->j,A_o->a,mat->garray));
374   } else { /* A is MATSEQAIJ */
375     A_d = (Mat_SeqAIJ*)A->data;
376     PetscStackCall("STRUMPACK_set_csr_matrix",STRUMPACK_set_csr_matrix(*S,&M,A_d->i,A_d->j,A_d->a,0));
377   }
378 
379   /* Reorder and Factor the matrix. */
380   /* TODO figure out how to avoid reorder if the matrix values changed, but the pattern remains the same. */
381   PetscStackCall("STRUMPACK_reorder",sp_err = STRUMPACK_reorder(*S));
382   PetscStackCall("STRUMPACK_factor",sp_err = STRUMPACK_factor(*S));
383   switch (sp_err) {
384   case STRUMPACK_SUCCESS: break;
385   case STRUMPACK_MATRIX_NOT_SET:   { SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: matrix was not set"); break; }
386   case STRUMPACK_REORDERING_ERROR: { SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: matrix reordering failed"); break; }
387   default:                           SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: factorization failed");
388   }
389   F->assembled    = PETSC_TRUE;
390   F->preallocated = PETSC_TRUE;
391   PetscFunctionReturn(0);
392 }
393 
394 static PetscErrorCode MatLUFactorSymbolic_STRUMPACK(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
395 {
396   PetscFunctionBegin;
397   F->ops->lufactornumeric = MatLUFactorNumeric_STRUMPACK;
398   F->ops->solve           = MatSolve_STRUMPACK;
399   F->ops->matsolve        = MatMatSolve_STRUMPACK;
400   PetscFunctionReturn(0);
401 }
402 
403 static PetscErrorCode MatFactorGetSolverType_aij_strumpack(Mat A,MatSolverType *type)
404 {
405   PetscFunctionBegin;
406   *type = MATSOLVERSTRUMPACK;
407   PetscFunctionReturn(0);
408 }
409 
410 /*MC
411   MATSOLVERSSTRUMPACK = "strumpack" - A solver package providing a direct sparse solver (PCLU)
412   and a preconditioner (PCILU) using low-rank compression via the external package STRUMPACK.
413 
414   Consult the STRUMPACK-sparse manual for more info.
415 
416   Use
417      ./configure --download-strumpack
418   to have PETSc installed with STRUMPACK
419 
420   Use
421     -pc_type lu -pc_factor_mat_solver_type strumpack
422   to use this as an exact (direct) solver, use
423     -pc_type ilu -pc_factor_mat_solver_type strumpack
424   to enable low-rank compression (i.e, use as a preconditioner).
425 
426   Works with AIJ matrices
427 
428   Options Database Keys:
429 + -mat_strumpack_verbose                    - verbose info
430 . -mat_strumpack_hss_rel_tol <1e-2>         - Relative compression tolerance (None)
431 . -mat_strumpack_hss_abs_tol <1e-8>         - Absolute compression tolerance (None)
432 . -mat_strumpack_colperm <TRUE>             - Permute matrix to make diagonal nonzeros (None)
433 . -mat_strumpack_hss_min_sep_size <256>     - Minimum size of separator for HSS compression (None)
434 . -mat_strumpack_max_rank                   - Maximum rank in HSS compression, when using pctype ilu (None)
435 . -mat_strumpack_leaf_size                  - Size of diagonal blocks in HSS approximation, when using pctype ilu (None)
436 . -mat_strumpack_reordering <METIS>         - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM (None)
437 - -mat_strumpack_iterative_solver <DIRECT>  - Select iterative solver from STRUMPACK (choose one of) AUTO DIRECT REFINE PREC_GMRES GMRES PREC_BICGSTAB BICGSTAB (None)
438 
439  Level: beginner
440 
441 .seealso: PCLU, PCILU, MATSOLVERSUPERLU_DIST, MATSOLVERMUMPS, PCFactorSetMatSolverType(), MatSolverType
442 M*/
443 static PetscErrorCode MatGetFactor_aij_strumpack(Mat A,MatFactorType ftype,Mat *F)
444 {
445   Mat                           B;
446   PetscInt                      M=A->rmap->N,N=A->cmap->N;
447   PetscBool                     verb,flg,set;
448   PetscReal                     ctol;
449   PetscInt                      hssminsize,max_rank,leaf_size;
450   STRUMPACK_SparseSolver        *S;
451   STRUMPACK_INTERFACE           iface;
452   STRUMPACK_REORDERING_STRATEGY ndcurrent,ndvalue;
453   STRUMPACK_KRYLOV_SOLVER       itcurrent,itsolver;
454   const STRUMPACK_PRECISION     table[2][2][2] =
455     {{{STRUMPACK_FLOATCOMPLEX_64, STRUMPACK_DOUBLECOMPLEX_64},
456       {STRUMPACK_FLOAT_64,        STRUMPACK_DOUBLE_64}},
457      {{STRUMPACK_FLOATCOMPLEX,    STRUMPACK_DOUBLECOMPLEX},
458       {STRUMPACK_FLOAT,           STRUMPACK_DOUBLE}}};
459   const STRUMPACK_PRECISION     prec = table[(sizeof(PetscInt)==8)?0:1][(PETSC_SCALAR==PETSC_COMPLEX)?0:1][(PETSC_REAL==PETSC_FLOAT)?0:1];
460   const char *const             STRUMPACKNDTypes[] = {"NATURAL","METIS","PARMETIS","SCOTCH","PTSCOTCH","RCM","STRUMPACKNDTypes","",0};
461   const char *const             SolverTypes[] = {"AUTO","NONE","REFINE","PREC_GMRES","GMRES","PREC_BICGSTAB","BICGSTAB","SolverTypes","",0};
462   PetscErrorCode                ierr;
463 
464   PetscFunctionBegin;
465   /* Create the factorization matrix */
466   PetscCall(MatCreate(PetscObjectComm((PetscObject)A),&B));
467   PetscCall(MatSetSizes(B,A->rmap->n,A->cmap->n,M,N));
468   PetscCall(MatSetType(B,((PetscObject)A)->type_name));
469   PetscCall(MatSeqAIJSetPreallocation(B,0,NULL));
470   PetscCall(MatMPIAIJSetPreallocation(B,0,NULL,0,NULL));
471   B->trivialsymbolic = PETSC_TRUE;
472   if (ftype == MAT_FACTOR_LU || ftype == MAT_FACTOR_ILU) {
473     B->ops->lufactorsymbolic  = MatLUFactorSymbolic_STRUMPACK;
474     B->ops->ilufactorsymbolic = MatLUFactorSymbolic_STRUMPACK;
475   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Factor type not supported");
476   B->ops->view        = MatView_STRUMPACK;
477   B->ops->destroy     = MatDestroy_STRUMPACK;
478   B->ops->getdiagonal = MatGetDiagonal_STRUMPACK;
479   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_aij_strumpack));
480   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetReordering_C",MatSTRUMPACKSetReordering_STRUMPACK));
481   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetColPerm_C",MatSTRUMPACKSetColPerm_STRUMPACK));
482   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSRelTol_C",MatSTRUMPACKSetHSSRelTol_STRUMPACK));
483   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSAbsTol_C",MatSTRUMPACKSetHSSAbsTol_STRUMPACK));
484   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSMaxRank_C",MatSTRUMPACKSetHSSMaxRank_STRUMPACK));
485   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSLeafSize_C",MatSTRUMPACKSetHSSLeafSize_STRUMPACK));
486   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSMinSepSize_C",MatSTRUMPACKSetHSSMinSepSize_STRUMPACK));
487   B->factortype = ftype;
488   PetscCall(PetscNewLog(B,&S));
489   B->spptr = S;
490 
491   PetscCall(PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg));
492   iface = flg ? STRUMPACK_MT : STRUMPACK_MPI_DIST;
493 
494   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"STRUMPACK Options","Mat");PetscCall(ierr);
495 
496   verb = PetscLogPrintInfo ? PETSC_TRUE : PETSC_FALSE;
497   PetscCall(PetscOptionsBool("-mat_strumpack_verbose","Print STRUMPACK information","None",verb,&verb,NULL));
498 
499   PetscStackCall("STRUMPACK_init",STRUMPACK_init(S,PetscObjectComm((PetscObject)A),prec,iface,0,NULL,verb));
500 
501   PetscStackCall("STRUMPACK_HSS_rel_tol",ctol = (PetscReal)STRUMPACK_HSS_rel_tol(*S));
502   PetscCall(PetscOptionsReal("-mat_strumpack_hss_rel_tol","Relative compression tolerance","None",ctol,&ctol,&set));
503   if (set) PetscStackCall("STRUMPACK_set_HSS_rel_tol",STRUMPACK_set_HSS_rel_tol(*S,(double)ctol));
504 
505   PetscStackCall("STRUMPACK_HSS_abs_tol",ctol = (PetscReal)STRUMPACK_HSS_abs_tol(*S));
506   PetscCall(PetscOptionsReal("-mat_strumpack_hss_abs_tol","Absolute compression tolerance","None",ctol,&ctol,&set));
507   if (set) PetscStackCall("STRUMPACK_set_HSS_abs_tol",STRUMPACK_set_HSS_abs_tol(*S,(double)ctol));
508 
509   PetscStackCall("STRUMPACK_mc64job",flg = (STRUMPACK_mc64job(*S) == 0) ? PETSC_FALSE : PETSC_TRUE);
510   PetscCall(PetscOptionsBool("-mat_strumpack_colperm","Find a col perm to get nonzero diagonal","None",flg,&flg,&set));
511   if (set) PetscStackCall("STRUMPACK_set_mc64job",STRUMPACK_set_mc64job(*S,flg ? 5 : 0));
512 
513   PetscStackCall("STRUMPACK_HSS_min_sep_size",hssminsize = (PetscInt)STRUMPACK_HSS_min_sep_size(*S));
514   PetscCall(PetscOptionsInt("-mat_strumpack_hss_min_sep_size","Minimum size of separator for HSS compression","None",hssminsize,&hssminsize,&set));
515   if (set) PetscStackCall("STRUMPACK_set_HSS_min_sep_size",STRUMPACK_set_HSS_min_sep_size(*S,(int)hssminsize));
516 
517   PetscStackCall("STRUMPACK_HSS_max_rank",max_rank = (PetscInt)STRUMPACK_HSS_max_rank(*S));
518   PetscCall(PetscOptionsInt("-mat_strumpack_max_rank","Maximum rank in HSS approximation","None",max_rank,&max_rank,&set));
519   if (set) PetscStackCall("STRUMPACK_set_HSS_max_rank",STRUMPACK_set_HSS_max_rank(*S,(int)max_rank));
520 
521   PetscStackCall("STRUMPACK_HSS_leaf_size",leaf_size = (PetscInt)STRUMPACK_HSS_leaf_size(*S));
522   PetscCall(PetscOptionsInt("-mat_strumpack_leaf_size","Size of diagonal blocks in HSS approximation","None",leaf_size,&leaf_size,&set));
523   if (set) PetscStackCall("STRUMPACK_set_HSS_leaf_size",STRUMPACK_set_HSS_leaf_size(*S,(int)leaf_size));
524 
525   PetscStackCall("STRUMPACK_reordering_method",ndcurrent = STRUMPACK_reordering_method(*S));
526   PetscCall(PetscOptionsEnum("-mat_strumpack_reordering","Sparsity reducing matrix reordering","None",STRUMPACKNDTypes,(PetscEnum)ndcurrent,(PetscEnum*)&ndvalue,&set));
527   if (set) PetscStackCall("STRUMPACK_set_reordering_method",STRUMPACK_set_reordering_method(*S,ndvalue));
528 
529   if (ftype == MAT_FACTOR_ILU) {
530     /* When enabling HSS compression, the STRUMPACK solver becomes an incomplete              */
531     /* (or approximate) LU factorization.                                                     */
532     PetscStackCall("STRUMPACK_enable_HSS",STRUMPACK_enable_HSS(*S));
533   }
534 
535   /* Disable the outer iterative solver from STRUMPACK.                                       */
536   /* When STRUMPACK is used as a direct solver, it will by default do iterative refinement.   */
537   /* When STRUMPACK is used as an approximate factorization preconditioner (by enabling       */
538   /* low-rank compression), it will use it's own preconditioned GMRES. Here we can disable    */
539   /* the outer iterative solver, as PETSc uses STRUMPACK from within a KSP.                   */
540   PetscStackCall("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, STRUMPACK_DIRECT));
541 
542   PetscStackCall("STRUMPACK_Krylov_solver",itcurrent = STRUMPACK_Krylov_solver(*S));
543   PetscCall(PetscOptionsEnum("-mat_strumpack_iterative_solver","Select iterative solver from STRUMPACK","None",SolverTypes,(PetscEnum)itcurrent,(PetscEnum*)&itsolver,&set));
544   if (set) PetscStackCall("STRUMPACK_set_Krylov_solver",STRUMPACK_set_Krylov_solver(*S,itsolver));
545 
546   ierr = PetscOptionsEnd();PetscCall(ierr);
547 
548   *F = B;
549   PetscFunctionReturn(0);
550 }
551 
552 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void)
553 {
554   PetscFunctionBegin;
555   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK,MATMPIAIJ,MAT_FACTOR_LU,MatGetFactor_aij_strumpack));
556   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_strumpack));
557   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK,MATMPIAIJ,MAT_FACTOR_ILU,MatGetFactor_aij_strumpack));
558   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK,MATSEQAIJ,MAT_FACTOR_ILU,MatGetFactor_aij_strumpack));
559   PetscFunctionReturn(0);
560 }
561