xref: /petsc/src/mat/impls/aij/mpi/strumpack/strumpack.c (revision c69effb2e05caa58810396c101d616d7ad9da0a7)
1 #include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/
2 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3 #include <StrumpackSparseSolver.h>
4 
5 static PetscErrorCode MatGetDiagonal_STRUMPACK(Mat A, Vec v)
6 {
7   PetscFunctionBegin;
8   SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type: STRUMPACK factor");
9   PetscFunctionReturn(PETSC_SUCCESS);
10 }
11 
12 static PetscErrorCode MatDestroy_STRUMPACK(Mat A)
13 {
14   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->data;
15 
16   PetscFunctionBegin;
17   /* Deallocate STRUMPACK storage */
18   PetscStackCallExternalVoid("STRUMPACK_destroy", STRUMPACK_destroy(S));
19   PetscCall(PetscFree(A->data));
20 
21   /* clear composed functions */
22   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
23   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetReordering_C", NULL));
24   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetColPerm_C", NULL));
25   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSRelTol_C", NULL));
26   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSAbsTol_C", NULL));
27   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSMaxRank_C", NULL));
28   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSLeafSize_C", NULL));
29   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSMinSepSize_C", NULL));
30 
31   PetscFunctionReturn(PETSC_SUCCESS);
32 }
33 
34 static PetscErrorCode MatSTRUMPACKSetReordering_STRUMPACK(Mat F, MatSTRUMPACKReordering reordering)
35 {
36   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
37 
38   PetscFunctionBegin;
39   PetscStackCallExternalVoid("STRUMPACK_reordering_method", STRUMPACK_set_reordering_method(*S, (STRUMPACK_REORDERING_STRATEGY)reordering));
40   PetscFunctionReturn(PETSC_SUCCESS);
41 }
42 
43 /*@
44   MatSTRUMPACKSetReordering - Set STRUMPACK fill-reducing reordering
45 
46    Input Parameters:
47 +  F - the factored matrix obtained by calling `MatGetFactor(`) from PETSc-STRUMPACK interface
48 -  reordering - the code to be used to find the fill-reducing reordering
49       Possible values: NATURAL=0 METIS=1 PARMETIS=2 SCOTCH=3 PTSCOTCH=4 RCM=5
50 
51   Options Database Key:
52 .   -mat_strumpack_reordering <METIS>  - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM (None)
53 
54    Level: beginner
55 
56    References:
57 .  * - STRUMPACK manual
58 
59 .seealso: `MatGetFactor()`
60 @*/
61 PetscErrorCode MatSTRUMPACKSetReordering(Mat F, MatSTRUMPACKReordering reordering)
62 {
63   PetscFunctionBegin;
64   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
65   PetscValidLogicalCollectiveEnum(F, reordering, 2);
66   PetscTryMethod(F, "MatSTRUMPACKSetReordering_C", (Mat, MatSTRUMPACKReordering), (F, reordering));
67   PetscFunctionReturn(PETSC_SUCCESS);
68 }
69 
70 static PetscErrorCode MatSTRUMPACKSetColPerm_STRUMPACK(Mat F, PetscBool cperm)
71 {
72   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
73 
74   PetscFunctionBegin;
75   PetscStackCallExternalVoid("STRUMPACK_set_mc64job", STRUMPACK_set_mc64job(*S, cperm ? 5 : 0));
76   PetscFunctionReturn(PETSC_SUCCESS);
77 }
78 
79 /*@
80   MatSTRUMPACKSetColPerm - Set whether STRUMPACK should try to permute the columns of the matrix in order to get a nonzero diagonal
81 
82    Logically Collective
83 
84    Input Parameters:
85 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
86 -  cperm - `PETSC_TRUE` to permute (internally) the columns of the matrix
87 
88   Options Database Key:
89 .   -mat_strumpack_colperm <cperm> - true to use the permutation
90 
91    Level: beginner
92 
93    References:
94 .  * - STRUMPACK manual
95 
96 .seealso: `MatGetFactor()`
97 @*/
98 PetscErrorCode MatSTRUMPACKSetColPerm(Mat F, PetscBool cperm)
99 {
100   PetscFunctionBegin;
101   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
102   PetscValidLogicalCollectiveBool(F, cperm, 2);
103   PetscTryMethod(F, "MatSTRUMPACKSetColPerm_C", (Mat, PetscBool), (F, cperm));
104   PetscFunctionReturn(PETSC_SUCCESS);
105 }
106 
107 static PetscErrorCode MatSTRUMPACKSetHSSRelTol_STRUMPACK(Mat F, PetscReal rtol)
108 {
109   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
110 
111   PetscFunctionBegin;
112   PetscStackCallExternalVoid("STRUMPACK_set_HSS_rel_tol", STRUMPACK_set_HSS_rel_tol(*S, rtol));
113   PetscFunctionReturn(PETSC_SUCCESS);
114 }
115 
116 /*@
117   MatSTRUMPACKSetHSSRelTol - Set STRUMPACK relative tolerance for HSS compression
118 
119   Logically Collective
120 
121    Input Parameters:
122 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
123 -  rtol - relative compression tolerance
124 
125   Options Database Key:
126 .   -mat_strumpack_hss_rel_tol <1e-2>         - Relative compression tolerance (None)
127 
128    Level: beginner
129 
130    References:
131 .  * - STRUMPACK manual
132 
133 .seealso: `MatGetFactor()`
134 @*/
135 PetscErrorCode MatSTRUMPACKSetHSSRelTol(Mat F, PetscReal rtol)
136 {
137   PetscFunctionBegin;
138   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
139   PetscValidLogicalCollectiveReal(F, rtol, 2);
140   PetscTryMethod(F, "MatSTRUMPACKSetHSSRelTol_C", (Mat, PetscReal), (F, rtol));
141   PetscFunctionReturn(PETSC_SUCCESS);
142 }
143 
144 static PetscErrorCode MatSTRUMPACKSetHSSAbsTol_STRUMPACK(Mat F, PetscReal atol)
145 {
146   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
147 
148   PetscFunctionBegin;
149   PetscStackCallExternalVoid("STRUMPACK_set_HSS_abs_tol", STRUMPACK_set_HSS_abs_tol(*S, atol));
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 /*@
154   MatSTRUMPACKSetHSSAbsTol - Set STRUMPACK absolute tolerance for HSS compression
155 
156    Logically Collective
157 
158    Input Parameters:
159 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
160 -  atol - absolute compression tolerance
161 
162   Options Database Key:
163 .   -mat_strumpack_hss_abs_tol <1e-8>         - Absolute compression tolerance (None)
164 
165    Level: beginner
166 
167    References:
168 .  * - STRUMPACK manual
169 
170 .seealso: `MatGetFactor()`
171 @*/
172 PetscErrorCode MatSTRUMPACKSetHSSAbsTol(Mat F, PetscReal atol)
173 {
174   PetscFunctionBegin;
175   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
176   PetscValidLogicalCollectiveReal(F, atol, 2);
177   PetscTryMethod(F, "MatSTRUMPACKSetHSSAbsTol_C", (Mat, PetscReal), (F, atol));
178   PetscFunctionReturn(PETSC_SUCCESS);
179 }
180 
181 static PetscErrorCode MatSTRUMPACKSetHSSMaxRank_STRUMPACK(Mat F, PetscInt hssmaxrank)
182 {
183   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
184 
185   PetscFunctionBegin;
186   PetscStackCallExternalVoid("STRUMPACK_set_HSS_max_rank", STRUMPACK_set_HSS_max_rank(*S, hssmaxrank));
187   PetscFunctionReturn(PETSC_SUCCESS);
188 }
189 
190 /*@
191   MatSTRUMPACKSetHSSMaxRank - Set STRUMPACK maximum HSS rank
192 
193    Logically Collective
194 
195    Input Parameters:
196 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
197 -  hssmaxrank - maximum rank used in low-rank approximation
198 
199   Options Database Key:
200 .   -mat_strumpack_max_rank - Maximum rank in HSS compression, when using pctype ilu (None)
201 
202    Level: beginner
203 
204    References:
205 .  * - STRUMPACK manual
206 
207 .seealso: `MatGetFactor()`
208 @*/
209 PetscErrorCode MatSTRUMPACKSetHSSMaxRank(Mat F, PetscInt hssmaxrank)
210 {
211   PetscFunctionBegin;
212   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
213   PetscValidLogicalCollectiveInt(F, hssmaxrank, 2);
214   PetscTryMethod(F, "MatSTRUMPACKSetHSSMaxRank_C", (Mat, PetscInt), (F, hssmaxrank));
215   PetscFunctionReturn(PETSC_SUCCESS);
216 }
217 
218 static PetscErrorCode MatSTRUMPACKSetHSSLeafSize_STRUMPACK(Mat F, PetscInt leaf_size)
219 {
220   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
221 
222   PetscFunctionBegin;
223   PetscStackCallExternalVoid("STRUMPACK_set_HSS_leaf_size", STRUMPACK_set_HSS_leaf_size(*S, leaf_size));
224   PetscFunctionReturn(PETSC_SUCCESS);
225 }
226 
227 /*@
228   MatSTRUMPACKSetHSSLeafSize - Set STRUMPACK HSS leaf size
229 
230    Logically Collective
231 
232    Input Parameters:
233 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
234 -  leaf_size - Size of diagonal blocks in HSS approximation
235 
236   Options Database Key:
237 .   -mat_strumpack_leaf_size - Size of diagonal blocks in HSS approximation, when using pctype ilu (None)
238 
239    Level: beginner
240 
241    References:
242 .  * - STRUMPACK manual
243 
244 .seealso: `MatGetFactor()`
245 @*/
246 PetscErrorCode MatSTRUMPACKSetHSSLeafSize(Mat F, PetscInt leaf_size)
247 {
248   PetscFunctionBegin;
249   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
250   PetscValidLogicalCollectiveInt(F, leaf_size, 2);
251   PetscTryMethod(F, "MatSTRUMPACKSetHSSLeafSize_C", (Mat, PetscInt), (F, leaf_size));
252   PetscFunctionReturn(PETSC_SUCCESS);
253 }
254 
255 static PetscErrorCode MatSTRUMPACKSetHSSMinSepSize_STRUMPACK(Mat F, PetscInt hssminsize)
256 {
257   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
258 
259   PetscFunctionBegin;
260   PetscStackCallExternalVoid("STRUMPACK_set_HSS_min_sep_size", STRUMPACK_set_HSS_min_sep_size(*S, hssminsize));
261   PetscFunctionReturn(PETSC_SUCCESS);
262 }
263 
264 /*@
265   MatSTRUMPACKSetHSSMinSepSize - Set STRUMPACK minimum separator size for low-rank approximation
266 
267    Logically Collective
268 
269    Input Parameters:
270 +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
271 -  hssminsize - minimum dense matrix size for low-rank approximation
272 
273   Options Database Key:
274 .   -mat_strumpack_hss_min_sep_size <hssminsize> - set the minimum separator size
275 
276    Level: beginner
277 
278    References:
279 .  * - STRUMPACK manual
280 
281 .seealso: `MatGetFactor()`
282 @*/
283 PetscErrorCode MatSTRUMPACKSetHSSMinSepSize(Mat F, PetscInt hssminsize)
284 {
285   PetscFunctionBegin;
286   PetscValidHeaderSpecific(F, MAT_CLASSID, 1);
287   PetscValidLogicalCollectiveInt(F, hssminsize, 2);
288   PetscTryMethod(F, "MatSTRUMPACKSetHSSMinSepSize_C", (Mat, PetscInt), (F, hssminsize));
289   PetscFunctionReturn(PETSC_SUCCESS);
290 }
291 
292 static PetscErrorCode MatSolve_STRUMPACK(Mat A, Vec b_mpi, Vec x)
293 {
294   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->data;
295   STRUMPACK_RETURN_CODE   sp_err;
296   const PetscScalar      *bptr;
297   PetscScalar            *xptr;
298 
299   PetscFunctionBegin;
300   PetscCall(VecGetArray(x, &xptr));
301   PetscCall(VecGetArrayRead(b_mpi, &bptr));
302 
303   PetscStackCallExternalVoid("STRUMPACK_solve", sp_err = STRUMPACK_solve(*S, (PetscScalar *)bptr, xptr, 0));
304   switch (sp_err) {
305   case STRUMPACK_SUCCESS:
306     break;
307   case STRUMPACK_MATRIX_NOT_SET: {
308     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
309     break;
310   }
311   case STRUMPACK_REORDERING_ERROR: {
312     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
313     break;
314   }
315   default:
316     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: solve failed");
317   }
318   PetscCall(VecRestoreArray(x, &xptr));
319   PetscCall(VecRestoreArrayRead(b_mpi, &bptr));
320   PetscFunctionReturn(PETSC_SUCCESS);
321 }
322 
323 static PetscErrorCode MatMatSolve_STRUMPACK(Mat A, Mat B_mpi, Mat X)
324 {
325   PetscBool flg;
326 
327   PetscFunctionBegin;
328   PetscCall(PetscObjectTypeCompareAny((PetscObject)B_mpi, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
329   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix B must be MATDENSE matrix");
330   PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
331   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix X must be MATDENSE matrix");
332   SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "MatMatSolve_STRUMPACK() is not implemented yet");
333   PetscFunctionReturn(PETSC_SUCCESS);
334 }
335 
336 static PetscErrorCode MatView_Info_STRUMPACK(Mat A, PetscViewer viewer)
337 {
338   PetscFunctionBegin;
339   /* check if matrix is strumpack type */
340   if (A->ops->solve != MatSolve_STRUMPACK) PetscFunctionReturn(PETSC_SUCCESS);
341   PetscCall(PetscViewerASCIIPrintf(viewer, "STRUMPACK sparse solver!\n"));
342   PetscFunctionReturn(PETSC_SUCCESS);
343 }
344 
345 static PetscErrorCode MatView_STRUMPACK(Mat A, PetscViewer viewer)
346 {
347   PetscBool         iascii;
348   PetscViewerFormat format;
349 
350   PetscFunctionBegin;
351   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
352   if (iascii) {
353     PetscCall(PetscViewerGetFormat(viewer, &format));
354     if (format == PETSC_VIEWER_ASCII_INFO) PetscCall(MatView_Info_STRUMPACK(A, viewer));
355   }
356   PetscFunctionReturn(PETSC_SUCCESS);
357 }
358 
359 static PetscErrorCode MatLUFactorNumeric_STRUMPACK(Mat F, Mat A, const MatFactorInfo *info)
360 {
361   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
362   STRUMPACK_RETURN_CODE   sp_err;
363   Mat_SeqAIJ             *A_d, *A_o;
364   Mat_MPIAIJ             *mat;
365   PetscInt                M = A->rmap->N, m = A->rmap->n;
366   PetscBool               flg;
367   const PetscScalar      *A_d_a, *A_o_a;
368 
369   PetscFunctionBegin;
370   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &flg));
371   if (flg) { /* A might be MATMPIAIJCUSPARSE etc */
372     mat = (Mat_MPIAIJ *)A->data;
373     PetscCall(MatSeqAIJGetArrayRead(mat->A, &A_d_a)); /* Make sure mat is sync'ed to host */
374     PetscCall(MatSeqAIJGetArrayRead(mat->B, &A_o_a));
375     A_d = (Mat_SeqAIJ *)(mat->A)->data;
376     A_o = (Mat_SeqAIJ *)(mat->B)->data;
377     PetscStackCallExternalVoid("STRUMPACK_set_MPIAIJ_matrix", STRUMPACK_set_MPIAIJ_matrix(*S, &m, A_d->i, A_d->j, A_d_a, A_o->i, A_o->j, A_o_a, mat->garray));
378     PetscCall(MatSeqAIJRestoreArrayRead(mat->A, &A_d_a));
379     PetscCall(MatSeqAIJRestoreArrayRead(mat->B, &A_o_a));
380   } else { /* A might be MATSEQAIJCUSPARSE etc */
381     PetscCall(MatSeqAIJGetArrayRead(A, &A_d_a));
382     A_d = (Mat_SeqAIJ *)A->data;
383     PetscStackCallExternalVoid("STRUMPACK_set_csr_matrix", STRUMPACK_set_csr_matrix(*S, &M, A_d->i, A_d->j, A_d_a, 0));
384     PetscCall(MatSeqAIJRestoreArrayRead(A, &A_d_a));
385   }
386 
387   /* Reorder and Factor the matrix. */
388   /* TODO figure out how to avoid reorder if the matrix values changed, but the pattern remains the same. */
389   PetscStackCallExternalVoid("STRUMPACK_reorder", sp_err = STRUMPACK_reorder(*S));
390   PetscStackCallExternalVoid("STRUMPACK_factor", sp_err = STRUMPACK_factor(*S));
391   switch (sp_err) {
392   case STRUMPACK_SUCCESS:
393     break;
394   case STRUMPACK_MATRIX_NOT_SET: {
395     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
396     break;
397   }
398   case STRUMPACK_REORDERING_ERROR: {
399     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
400     break;
401   }
402   default:
403     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: factorization failed");
404   }
405   F->assembled    = PETSC_TRUE;
406   F->preallocated = PETSC_TRUE;
407   PetscFunctionReturn(PETSC_SUCCESS);
408 }
409 
410 static PetscErrorCode MatLUFactorSymbolic_STRUMPACK(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
411 {
412   STRUMPACK_SparseSolver       *S = (STRUMPACK_SparseSolver *)F->data;
413   PetscBool                     flg, set;
414   PetscReal                     ctol;
415   PetscInt                      hssminsize, max_rank, leaf_size;
416   STRUMPACK_REORDERING_STRATEGY ndcurrent, ndvalue;
417   STRUMPACK_KRYLOV_SOLVER       itcurrent, itsolver;
418   const char *const             STRUMPACKNDTypes[] = {"NATURAL", "METIS", "PARMETIS", "SCOTCH", "PTSCOTCH", "RCM", "STRUMPACKNDTypes", "", 0};
419   const char *const             SolverTypes[]      = {"AUTO", "NONE", "REFINE", "PREC_GMRES", "GMRES", "PREC_BICGSTAB", "BICGSTAB", "SolverTypes", "", 0};
420 
421   PetscFunctionBegin;
422   /* Set options to F */
423   PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "STRUMPACK Options", "Mat");
424   PetscStackCallExternalVoid("STRUMPACK_HSS_rel_tol", ctol = (PetscReal)STRUMPACK_HSS_rel_tol(*S));
425   PetscCall(PetscOptionsReal("-mat_strumpack_hss_rel_tol", "Relative compression tolerance", "None", ctol, &ctol, &set));
426   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_rel_tol", STRUMPACK_set_HSS_rel_tol(*S, (double)ctol));
427 
428   PetscStackCallExternalVoid("STRUMPACK_HSS_abs_tol", ctol = (PetscReal)STRUMPACK_HSS_abs_tol(*S));
429   PetscCall(PetscOptionsReal("-mat_strumpack_hss_abs_tol", "Absolute compression tolerance", "None", ctol, &ctol, &set));
430   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_abs_tol", STRUMPACK_set_HSS_abs_tol(*S, (double)ctol));
431 
432   PetscStackCallExternalVoid("STRUMPACK_mc64job", flg = (STRUMPACK_mc64job(*S) == 0) ? PETSC_FALSE : PETSC_TRUE);
433   PetscCall(PetscOptionsBool("-mat_strumpack_colperm", "Find a col perm to get nonzero diagonal", "None", flg, &flg, &set));
434   if (set) PetscStackCallExternalVoid("STRUMPACK_set_mc64job", STRUMPACK_set_mc64job(*S, flg ? 5 : 0));
435 
436   PetscStackCallExternalVoid("STRUMPACK_HSS_min_sep_size", hssminsize = (PetscInt)STRUMPACK_HSS_min_sep_size(*S));
437   PetscCall(PetscOptionsInt("-mat_strumpack_hss_min_sep_size", "Minimum size of separator for HSS compression", "None", hssminsize, &hssminsize, &set));
438   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_min_sep_size", STRUMPACK_set_HSS_min_sep_size(*S, (int)hssminsize));
439 
440   PetscStackCallExternalVoid("STRUMPACK_HSS_max_rank", max_rank = (PetscInt)STRUMPACK_HSS_max_rank(*S));
441   PetscCall(PetscOptionsInt("-mat_strumpack_max_rank", "Maximum rank in HSS approximation", "None", max_rank, &max_rank, &set));
442   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_max_rank", STRUMPACK_set_HSS_max_rank(*S, (int)max_rank));
443 
444   PetscStackCallExternalVoid("STRUMPACK_HSS_leaf_size", leaf_size = (PetscInt)STRUMPACK_HSS_leaf_size(*S));
445   PetscCall(PetscOptionsInt("-mat_strumpack_leaf_size", "Size of diagonal blocks in HSS approximation", "None", leaf_size, &leaf_size, &set));
446   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_leaf_size", STRUMPACK_set_HSS_leaf_size(*S, (int)leaf_size));
447 
448   PetscStackCallExternalVoid("STRUMPACK_reordering_method", ndcurrent = STRUMPACK_reordering_method(*S));
449   PetscCall(PetscOptionsEnum("-mat_strumpack_reordering", "Sparsity reducing matrix reordering", "None", STRUMPACKNDTypes, (PetscEnum)ndcurrent, (PetscEnum *)&ndvalue, &set));
450   if (set) PetscStackCallExternalVoid("STRUMPACK_set_reordering_method", STRUMPACK_set_reordering_method(*S, ndvalue));
451 
452   /* Disable the outer iterative solver from STRUMPACK.                                       */
453   /* When STRUMPACK is used as a direct solver, it will by default do iterative refinement.   */
454   /* When STRUMPACK is used as an approximate factorization preconditioner (by enabling       */
455   /* low-rank compression), it will use it's own preconditioned GMRES. Here we can disable    */
456   /* the outer iterative solver, as PETSc uses STRUMPACK from within a KSP.                   */
457   PetscStackCallExternalVoid("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, STRUMPACK_DIRECT));
458 
459   PetscStackCallExternalVoid("STRUMPACK_Krylov_solver", itcurrent = STRUMPACK_Krylov_solver(*S));
460   PetscCall(PetscOptionsEnum("-mat_strumpack_iterative_solver", "Select iterative solver from STRUMPACK", "None", SolverTypes, (PetscEnum)itcurrent, (PetscEnum *)&itsolver, &set));
461   if (set) PetscStackCallExternalVoid("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, itsolver));
462   PetscOptionsEnd();
463 
464   F->ops->lufactornumeric = MatLUFactorNumeric_STRUMPACK;
465   F->ops->solve           = MatSolve_STRUMPACK;
466   F->ops->matsolve        = MatMatSolve_STRUMPACK;
467   PetscFunctionReturn(PETSC_SUCCESS);
468 }
469 
470 static PetscErrorCode MatFactorGetSolverType_aij_strumpack(Mat A, MatSolverType *type)
471 {
472   PetscFunctionBegin;
473   *type = MATSOLVERSTRUMPACK;
474   PetscFunctionReturn(PETSC_SUCCESS);
475 }
476 
477 /*MC
478   MATSOLVERSSTRUMPACK = "strumpack" - A solver package providing a direct sparse solver (PCLU)
479   and a preconditioner (PCILU) using low-rank compression via the external package STRUMPACK.
480 
481   Consult the STRUMPACK-sparse manual for more info.
482 
483   Use
484      ./configure --download-strumpack
485   to have PETSc installed with STRUMPACK
486 
487   Use
488     -pc_type lu -pc_factor_mat_solver_type strumpack
489   to use this as an exact (direct) solver, use
490     -pc_type ilu -pc_factor_mat_solver_type strumpack
491   to enable low-rank compression (i.e, use as a preconditioner).
492 
493   Works with AIJ matrices
494 
495   Options Database Keys:
496 + -mat_strumpack_verbose                    - verbose info
497 . -mat_strumpack_hss_rel_tol <1e-2>         - Relative compression tolerance (None)
498 . -mat_strumpack_hss_abs_tol <1e-8>         - Absolute compression tolerance (None)
499 . -mat_strumpack_colperm <TRUE>             - Permute matrix to make diagonal nonzeros (None)
500 . -mat_strumpack_hss_min_sep_size <256>     - Minimum size of separator for HSS compression (None)
501 . -mat_strumpack_max_rank                   - Maximum rank in HSS compression, when using pctype ilu (None)
502 . -mat_strumpack_leaf_size                  - Size of diagonal blocks in HSS approximation, when using pctype ilu (None)
503 . -mat_strumpack_reordering <METIS>         - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM (None)
504 - -mat_strumpack_iterative_solver <DIRECT>  - Select iterative solver from STRUMPACK (choose one of) AUTO DIRECT REFINE PREC_GMRES GMRES PREC_BICGSTAB BICGSTAB (None)
505 
506  Level: beginner
507 
508 .seealso: `PCLU`, `PCILU`, `MATSOLVERSUPERLU_DIST`, `MATSOLVERMUMPS`, `PCFactorSetMatSolverType()`, `MatSolverType`, `MatGetFactor()`
509 M*/
510 static PetscErrorCode MatGetFactor_aij_strumpack(Mat A, MatFactorType ftype, Mat *F)
511 {
512   Mat                       B;
513   PetscInt                  M = A->rmap->N, N = A->cmap->N;
514   PetscBool                 verb, flg;
515   STRUMPACK_SparseSolver   *S;
516   STRUMPACK_INTERFACE       iface;
517   const STRUMPACK_PRECISION table[2][2][2] = {
518     {{STRUMPACK_FLOATCOMPLEX_64, STRUMPACK_DOUBLECOMPLEX_64}, {STRUMPACK_FLOAT_64, STRUMPACK_DOUBLE_64}},
519     {{STRUMPACK_FLOATCOMPLEX, STRUMPACK_DOUBLECOMPLEX},       {STRUMPACK_FLOAT, STRUMPACK_DOUBLE}      }
520   };
521   const STRUMPACK_PRECISION prec = table[(sizeof(PetscInt) == 8) ? 0 : 1][(PETSC_SCALAR == PETSC_COMPLEX) ? 0 : 1][(PETSC_REAL == PETSC_FLOAT) ? 0 : 1];
522 
523   PetscFunctionBegin;
524   /* Create the factorization matrix */
525   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
526   PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, M, N));
527   PetscCall(PetscStrallocpy("strumpack", &((PetscObject)B)->type_name));
528   PetscCall(MatSetUp(B));
529   B->trivialsymbolic = PETSC_TRUE;
530   if (ftype == MAT_FACTOR_LU || ftype == MAT_FACTOR_ILU) {
531     B->ops->lufactorsymbolic  = MatLUFactorSymbolic_STRUMPACK;
532     B->ops->ilufactorsymbolic = MatLUFactorSymbolic_STRUMPACK;
533   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Factor type not supported");
534   B->ops->getinfo     = MatGetInfo_External;
535   B->ops->view        = MatView_STRUMPACK;
536   B->ops->destroy     = MatDestroy_STRUMPACK;
537   B->ops->getdiagonal = MatGetDiagonal_STRUMPACK;
538   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_aij_strumpack));
539   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetReordering_C", MatSTRUMPACKSetReordering_STRUMPACK));
540   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetColPerm_C", MatSTRUMPACKSetColPerm_STRUMPACK));
541   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSRelTol_C", MatSTRUMPACKSetHSSRelTol_STRUMPACK));
542   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSAbsTol_C", MatSTRUMPACKSetHSSAbsTol_STRUMPACK));
543   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSMaxRank_C", MatSTRUMPACKSetHSSMaxRank_STRUMPACK));
544   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSLeafSize_C", MatSTRUMPACKSetHSSLeafSize_STRUMPACK));
545   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSMinSepSize_C", MatSTRUMPACKSetHSSMinSepSize_STRUMPACK));
546   B->factortype = ftype;
547   PetscCall(PetscNew(&S));
548   B->data = S;
549 
550   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &flg)); /* A might be MATSEQAIJCUSPARSE */
551   iface = flg ? STRUMPACK_MT : STRUMPACK_MPI_DIST;
552 
553   PetscOptionsBegin(PetscObjectComm((PetscObject)B), ((PetscObject)B)->prefix, "STRUMPACK Options", "Mat");
554   verb = PetscLogPrintInfo ? PETSC_TRUE : PETSC_FALSE;
555   PetscCall(PetscOptionsBool("-mat_strumpack_verbose", "Print STRUMPACK information", "None", verb, &verb, NULL));
556 
557   PetscStackCallExternalVoid("STRUMPACK_init", STRUMPACK_init(S, PetscObjectComm((PetscObject)A), prec, iface, 0, NULL, verb));
558 
559   if (ftype == MAT_FACTOR_ILU) {
560     /* When enabling HSS compression, the STRUMPACK solver becomes an incomplete              */
561     /* (or approximate) LU factorization.                                                     */
562     PetscStackCallExternalVoid("STRUMPACK_enable_HSS", STRUMPACK_enable_HSS(*S));
563   }
564   PetscOptionsEnd();
565 
566   /* set solvertype */
567   PetscCall(PetscFree(B->solvertype));
568   PetscCall(PetscStrallocpy(MATSOLVERSTRUMPACK, &B->solvertype));
569 
570   *F = B;
571   PetscFunctionReturn(PETSC_SUCCESS);
572 }
573 
574 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void)
575 {
576   PetscFunctionBegin;
577   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack));
578   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack));
579   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack));
580   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack));
581   PetscFunctionReturn(PETSC_SUCCESS);
582 }
583