xref: /petsc/src/mat/impls/aij/seq/mkl_pardiso/mkl_pardiso.c (revision 8564c97f3fe574910f676ffb31bf76fa44548916)
1 #include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/
2 #include <../src/mat/impls/sbaij/seq/sbaij.h>
3 #include <../src/mat/impls/dense/seq/dense.h>
4 
5 #if defined(PETSC_HAVE_MKL_INTEL_ILP64)
6   #define MKL_ILP64
7 #endif
8 #include <mkl_pardiso.h>
9 
10 PETSC_EXTERN void PetscSetMKL_PARDISOThreads(int);
11 
12 /*
13  *  Possible mkl_pardiso phases that controls the execution of the solver.
14  *  For more information check mkl_pardiso manual.
15  */
16 #define JOB_ANALYSIS                                                    11
17 #define JOB_ANALYSIS_NUMERICAL_FACTORIZATION                            12
18 #define JOB_ANALYSIS_NUMERICAL_FACTORIZATION_SOLVE_ITERATIVE_REFINEMENT 13
19 #define JOB_NUMERICAL_FACTORIZATION                                     22
20 #define JOB_NUMERICAL_FACTORIZATION_SOLVE_ITERATIVE_REFINEMENT          23
21 #define JOB_SOLVE_ITERATIVE_REFINEMENT                                  33
22 #define JOB_SOLVE_FORWARD_SUBSTITUTION                                  331
23 #define JOB_SOLVE_DIAGONAL_SUBSTITUTION                                 332
24 #define JOB_SOLVE_BACKWARD_SUBSTITUTION                                 333
25 #define JOB_RELEASE_OF_LU_MEMORY                                        0
26 #define JOB_RELEASE_OF_ALL_MEMORY                                       -1
27 
28 #define IPARM_SIZE 64
29 
30 #if defined(PETSC_USE_64BIT_INDICES)
31   #if defined(PETSC_HAVE_MKL_INTEL_ILP64)
32     #define INT_TYPE         long long int
33     #define MKL_PARDISO      pardiso
34     #define MKL_PARDISO_INIT pardisoinit
35   #else
36     /* this is the case where the MKL BLAS/LAPACK are 32-bit integers but the 64-bit integer version of
37      of PARDISO code is used; hence the need for the 64 below*/
38     #define INT_TYPE         long long int
39     #define MKL_PARDISO      pardiso_64
40     #define MKL_PARDISO_INIT pardiso_64init
41 void pardiso_64init(void *pt, INT_TYPE *mtype, INT_TYPE iparm[])
42 {
43   int iparm_copy[IPARM_SIZE], mtype_copy, i;
44 
45   mtype_copy = *mtype;
46   pardisoinit(pt, &mtype_copy, iparm_copy);
47   for (i = 0; i < IPARM_SIZE; i++) iparm[i] = iparm_copy[i];
48 }
49   #endif
50 #else
51   #define INT_TYPE         int
52   #define MKL_PARDISO      pardiso
53   #define MKL_PARDISO_INIT pardisoinit
54 #endif
55 
56 /*
57    Internal data structure.
58  */
59 typedef struct {
60   /* Configuration vector*/
61   INT_TYPE iparm[IPARM_SIZE];
62 
63   /*
64      Internal MKL PARDISO memory location.
65      After the first call to MKL PARDISO do not modify pt, as that could cause a serious memory leak.
66    */
67   void *pt[IPARM_SIZE];
68 
69   /* Basic MKL PARDISO info */
70   INT_TYPE phase, maxfct, mnum, mtype, n, nrhs, msglvl, err;
71 
72   /* Matrix structure*/
73   void     *a;
74   INT_TYPE *ia, *ja;
75 
76   /* Number of non-zero elements*/
77   INT_TYPE nz;
78 
79   /* Row permutaton vector*/
80   INT_TYPE *perm;
81 
82   /* Define if matrix preserves sparse structure.*/
83   MatStructure matstruc;
84 
85   PetscBool needsym;
86   PetscBool freeaij;
87 
88   /* Schur complement */
89   PetscScalar *schur;
90   PetscInt     schur_size;
91   PetscInt    *schur_idxs;
92   PetscScalar *schur_work;
93   PetscBLASInt schur_work_size;
94   PetscBool    solve_interior;
95 
96   /* True if MKL PARDISO function have been used. */
97   PetscBool CleanUp;
98 
99   /* Conversion to a format suitable for MKL */
100   PetscErrorCode (*Convert)(Mat, PetscBool, MatReuse, PetscBool *, INT_TYPE *, INT_TYPE **, INT_TYPE **, PetscScalar **);
101 } Mat_MKL_PARDISO;
102 
103 static PetscErrorCode MatMKLPardiso_Convert_seqsbaij(Mat A, PetscBool sym, MatReuse reuse, PetscBool *free, INT_TYPE *nnz, INT_TYPE **r, INT_TYPE **c, PetscScalar **v)
104 {
105   Mat_SeqSBAIJ *aa = (Mat_SeqSBAIJ *)A->data;
106   PetscInt      bs = A->rmap->bs, i;
107 
108   PetscFunctionBegin;
109   PetscCheck(sym, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "This should not happen");
110   *v = aa->a;
111   if (bs == 1) { /* already in the correct format */
112     /* though PetscInt and INT_TYPE are of the same size since they are defined differently the Intel compiler requires a cast */
113     *r    = (INT_TYPE *)aa->i;
114     *c    = (INT_TYPE *)aa->j;
115     *nnz  = (INT_TYPE)aa->nz;
116     *free = PETSC_FALSE;
117   } else if (reuse == MAT_INITIAL_MATRIX) {
118     PetscInt  m = A->rmap->n, nz = aa->nz;
119     PetscInt *row, *col;
120     PetscCall(PetscMalloc2(m + 1, &row, nz, &col));
121     for (i = 0; i < m + 1; i++) row[i] = aa->i[i] + 1;
122     for (i = 0; i < nz; i++) col[i] = aa->j[i] + 1;
123     *r    = (INT_TYPE *)row;
124     *c    = (INT_TYPE *)col;
125     *nnz  = (INT_TYPE)nz;
126     *free = PETSC_TRUE;
127   }
128   PetscFunctionReturn(PETSC_SUCCESS);
129 }
130 
131 static PetscErrorCode MatMKLPardiso_Convert_seqbaij(Mat A, PetscBool sym, MatReuse reuse, PetscBool *free, INT_TYPE *nnz, INT_TYPE **r, INT_TYPE **c, PetscScalar **v)
132 {
133   Mat_SeqBAIJ *aa = (Mat_SeqBAIJ *)A->data;
134   PetscInt     bs = A->rmap->bs, i;
135 
136   PetscFunctionBegin;
137   if (!sym) {
138     *v = aa->a;
139     if (bs == 1) { /* already in the correct format */
140       /* though PetscInt and INT_TYPE are of the same size since they are defined differently the Intel compiler requires a cast */
141       *r    = (INT_TYPE *)aa->i;
142       *c    = (INT_TYPE *)aa->j;
143       *nnz  = (INT_TYPE)aa->nz;
144       *free = PETSC_FALSE;
145       PetscFunctionReturn(PETSC_SUCCESS);
146     } else if (reuse == MAT_INITIAL_MATRIX) {
147       PetscInt  m = A->rmap->n, nz = aa->nz;
148       PetscInt *row, *col;
149       PetscCall(PetscMalloc2(m + 1, &row, nz, &col));
150       for (i = 0; i < m + 1; i++) row[i] = aa->i[i] + 1;
151       for (i = 0; i < nz; i++) col[i] = aa->j[i] + 1;
152       *r   = (INT_TYPE *)row;
153       *c   = (INT_TYPE *)col;
154       *nnz = (INT_TYPE)nz;
155     }
156     *free = PETSC_TRUE;
157   } else {
158     SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "This should not happen");
159   }
160   PetscFunctionReturn(PETSC_SUCCESS);
161 }
162 
163 static PetscErrorCode MatMKLPardiso_Convert_seqaij(Mat A, PetscBool sym, MatReuse reuse, PetscBool *free, INT_TYPE *nnz, INT_TYPE **r, INT_TYPE **c, PetscScalar **v)
164 {
165   Mat_SeqAIJ  *aa = (Mat_SeqAIJ *)A->data;
166   PetscScalar *aav;
167 
168   PetscFunctionBegin;
169   PetscCall(MatSeqAIJGetArrayRead(A, (const PetscScalar **)&aav));
170   if (!sym) { /* already in the correct format */
171     *v    = aav;
172     *r    = (INT_TYPE *)aa->i;
173     *c    = (INT_TYPE *)aa->j;
174     *nnz  = (INT_TYPE)aa->nz;
175     *free = PETSC_FALSE;
176   } else if (reuse == MAT_INITIAL_MATRIX) { /* need to get the triangular part */
177     PetscScalar *vals, *vv;
178     PetscInt    *row, *col, *jj;
179     PetscInt     m = A->rmap->n, nz, i;
180 
181     nz = 0;
182     for (i = 0; i < m; i++) nz += aa->i[i + 1] - aa->diag[i];
183     PetscCall(PetscMalloc2(m + 1, &row, nz, &col));
184     PetscCall(PetscMalloc1(nz, &vals));
185     jj = col;
186     vv = vals;
187 
188     row[0] = 0;
189     for (i = 0; i < m; i++) {
190       PetscInt    *aj = aa->j + aa->diag[i];
191       PetscScalar *av = aav + aa->diag[i];
192       PetscInt     rl = aa->i[i + 1] - aa->diag[i], j;
193 
194       for (j = 0; j < rl; j++) {
195         *jj = *aj;
196         jj++;
197         aj++;
198         *vv = *av;
199         vv++;
200         av++;
201       }
202       row[i + 1] = row[i] + rl;
203     }
204     *v    = vals;
205     *r    = (INT_TYPE *)row;
206     *c    = (INT_TYPE *)col;
207     *nnz  = (INT_TYPE)nz;
208     *free = PETSC_TRUE;
209   } else {
210     PetscScalar *vv;
211     PetscInt     m = A->rmap->n, i;
212 
213     vv = *v;
214     for (i = 0; i < m; i++) {
215       PetscScalar *av = aav + aa->diag[i];
216       PetscInt     rl = aa->i[i + 1] - aa->diag[i], j;
217       for (j = 0; j < rl; j++) {
218         *vv = *av;
219         vv++;
220         av++;
221       }
222     }
223     *free = PETSC_TRUE;
224   }
225   PetscCall(MatSeqAIJRestoreArrayRead(A, (const PetscScalar **)&aav));
226   PetscFunctionReturn(PETSC_SUCCESS);
227 }
228 
229 static PetscErrorCode MatMKLPardisoSolveSchur_Private(Mat F, PetscScalar *B, PetscScalar *X)
230 {
231   Mat_MKL_PARDISO     *mpardiso = (Mat_MKL_PARDISO *)F->data;
232   Mat                  S, Xmat, Bmat;
233   MatFactorSchurStatus schurstatus;
234 
235   PetscFunctionBegin;
236   PetscCall(MatFactorGetSchurComplement(F, &S, &schurstatus));
237   PetscCheck(X != B || schurstatus != MAT_FACTOR_SCHUR_INVERTED, PETSC_COMM_SELF, PETSC_ERR_SUP, "X and B cannot point to the same address");
238   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, mpardiso->schur_size, mpardiso->nrhs, B, &Bmat));
239   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, mpardiso->schur_size, mpardiso->nrhs, X, &Xmat));
240   PetscCall(MatSetType(Bmat, ((PetscObject)S)->type_name));
241   PetscCall(MatSetType(Xmat, ((PetscObject)S)->type_name));
242 #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
243   PetscCall(MatBindToCPU(Xmat, S->boundtocpu));
244   PetscCall(MatBindToCPU(Bmat, S->boundtocpu));
245 #endif
246 
247 #if defined(PETSC_USE_COMPLEX)
248   PetscCheck(mpardiso->iparm[12 - 1] != 1, PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Hermitian solve not implemented yet");
249 #endif
250 
251   switch (schurstatus) {
252   case MAT_FACTOR_SCHUR_FACTORED:
253     if (!mpardiso->iparm[12 - 1]) {
254       PetscCall(MatMatSolve(S, Bmat, Xmat));
255     } else { /* transpose solve */
256       PetscCall(MatMatSolveTranspose(S, Bmat, Xmat));
257     }
258     break;
259   case MAT_FACTOR_SCHUR_INVERTED:
260     PetscCall(MatProductCreateWithMat(S, Bmat, NULL, Xmat));
261     if (!mpardiso->iparm[12 - 1]) {
262       PetscCall(MatProductSetType(Xmat, MATPRODUCT_AB));
263     } else { /* transpose solve */
264       PetscCall(MatProductSetType(Xmat, MATPRODUCT_AtB));
265     }
266     PetscCall(MatProductSetFromOptions(Xmat));
267     PetscCall(MatProductSymbolic(Xmat));
268     PetscCall(MatProductNumeric(Xmat));
269     PetscCall(MatProductClear(Xmat));
270     break;
271   default:
272     SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %" PetscInt_FMT, F->schur_status);
273     break;
274   }
275   PetscCall(MatFactorRestoreSchurComplement(F, &S, schurstatus));
276   PetscCall(MatDestroy(&Bmat));
277   PetscCall(MatDestroy(&Xmat));
278   PetscFunctionReturn(PETSC_SUCCESS);
279 }
280 
281 static PetscErrorCode MatFactorSetSchurIS_MKL_PARDISO(Mat F, IS is)
282 {
283   Mat_MKL_PARDISO   *mpardiso = (Mat_MKL_PARDISO *)F->data;
284   const PetscScalar *arr;
285   const PetscInt    *idxs;
286   PetscInt           size, i;
287   PetscMPIInt        csize;
288   PetscBool          sorted;
289 
290   PetscFunctionBegin;
291   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)F), &csize));
292   PetscCheck(csize <= 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "MKL PARDISO parallel Schur complements not yet supported from PETSc");
293   PetscCall(ISSorted(is, &sorted));
294   PetscCheck(sorted, PETSC_COMM_SELF, PETSC_ERR_SUP, "IS for MKL PARDISO Schur complements needs to be sorted");
295   PetscCall(ISGetLocalSize(is, &size));
296   PetscCall(PetscFree(mpardiso->schur_work));
297   PetscCall(PetscBLASIntCast(PetscMax(mpardiso->n, 2 * size), &mpardiso->schur_work_size));
298   PetscCall(PetscMalloc1(mpardiso->schur_work_size, &mpardiso->schur_work));
299   PetscCall(MatDestroy(&F->schur));
300   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, size, size, NULL, &F->schur));
301   PetscCall(MatDenseGetArrayRead(F->schur, &arr));
302   mpardiso->schur      = (PetscScalar *)arr;
303   mpardiso->schur_size = size;
304   PetscCall(MatDenseRestoreArrayRead(F->schur, &arr));
305   if (mpardiso->mtype == 2) PetscCall(MatSetOption(F->schur, MAT_SPD, PETSC_TRUE));
306 
307   PetscCall(PetscFree(mpardiso->schur_idxs));
308   PetscCall(PetscMalloc1(size, &mpardiso->schur_idxs));
309   PetscCall(PetscArrayzero(mpardiso->perm, mpardiso->n));
310   PetscCall(ISGetIndices(is, &idxs));
311   PetscCall(PetscArraycpy(mpardiso->schur_idxs, idxs, size));
312   for (i = 0; i < size; i++) mpardiso->perm[idxs[i]] = 1;
313   PetscCall(ISRestoreIndices(is, &idxs));
314   if (size) { /* turn on Schur switch if the set of indices is not empty */
315     mpardiso->iparm[36 - 1] = 2;
316   }
317   PetscFunctionReturn(PETSC_SUCCESS);
318 }
319 
320 static PetscErrorCode MatDestroy_MKL_PARDISO(Mat A)
321 {
322   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
323 
324   PetscFunctionBegin;
325   if (mat_mkl_pardiso->CleanUp) {
326     mat_mkl_pardiso->phase = JOB_RELEASE_OF_ALL_MEMORY;
327 
328     MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, NULL, NULL, NULL, NULL, &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, NULL, NULL,
329                 &mat_mkl_pardiso->err);
330   }
331   PetscCall(PetscFree(mat_mkl_pardiso->perm));
332   PetscCall(PetscFree(mat_mkl_pardiso->schur_work));
333   PetscCall(PetscFree(mat_mkl_pardiso->schur_idxs));
334   if (mat_mkl_pardiso->freeaij) {
335     PetscCall(PetscFree2(mat_mkl_pardiso->ia, mat_mkl_pardiso->ja));
336     if (mat_mkl_pardiso->iparm[34] == 1) PetscCall(PetscFree(mat_mkl_pardiso->a));
337   }
338   PetscCall(PetscFree(A->data));
339 
340   /* clear composed functions */
341   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
342   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorSetSchurIS_C", NULL));
343   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatMkl_PardisoSetCntl_C", NULL));
344   PetscFunctionReturn(PETSC_SUCCESS);
345 }
346 
347 static PetscErrorCode MatMKLPardisoScatterSchur_Private(Mat_MKL_PARDISO *mpardiso, PetscScalar *whole, PetscScalar *schur, PetscBool reduce)
348 {
349   PetscFunctionBegin;
350   if (reduce) { /* data given for the whole matrix */
351     PetscInt i, m = 0, p = 0;
352     for (i = 0; i < mpardiso->nrhs; i++) {
353       PetscInt j;
354       for (j = 0; j < mpardiso->schur_size; j++) schur[p + j] = whole[m + mpardiso->schur_idxs[j]];
355       m += mpardiso->n;
356       p += mpardiso->schur_size;
357     }
358   } else { /* from Schur to whole */
359     PetscInt i, m = 0, p = 0;
360     for (i = 0; i < mpardiso->nrhs; i++) {
361       PetscInt j;
362       for (j = 0; j < mpardiso->schur_size; j++) whole[m + mpardiso->schur_idxs[j]] = schur[p + j];
363       m += mpardiso->n;
364       p += mpardiso->schur_size;
365     }
366   }
367   PetscFunctionReturn(PETSC_SUCCESS);
368 }
369 
370 static PetscErrorCode MatSolve_MKL_PARDISO(Mat A, Vec b, Vec x)
371 {
372   Mat_MKL_PARDISO   *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
373   PetscScalar       *xarray;
374   const PetscScalar *barray;
375 
376   PetscFunctionBegin;
377   mat_mkl_pardiso->nrhs = 1;
378   PetscCall(VecGetArrayWrite(x, &xarray));
379   PetscCall(VecGetArrayRead(b, &barray));
380 
381   if (!mat_mkl_pardiso->schur) mat_mkl_pardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT;
382   else mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION;
383 
384   if (barray == xarray) { /* if the two vectors share the same memory */
385     PetscScalar *work;
386     if (!mat_mkl_pardiso->schur_work) {
387       PetscCall(PetscMalloc1(mat_mkl_pardiso->n, &work));
388     } else {
389       work = mat_mkl_pardiso->schur_work;
390     }
391     mat_mkl_pardiso->iparm[6 - 1] = 1;
392     MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, NULL, &mat_mkl_pardiso->nrhs,
393                 mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)work, &mat_mkl_pardiso->err);
394     if (!mat_mkl_pardiso->schur_work) PetscCall(PetscFree(work));
395   } else {
396     mat_mkl_pardiso->iparm[6 - 1] = 0;
397     MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm,
398                 &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err);
399   }
400   PetscCall(VecRestoreArrayRead(b, &barray));
401 
402   PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
403 
404   if (mat_mkl_pardiso->schur) { /* solve Schur complement and expand solution */
405     if (!mat_mkl_pardiso->solve_interior) {
406       PetscInt shift = mat_mkl_pardiso->schur_size;
407 
408       PetscCall(MatFactorFactorizeSchurComplement(A));
409       /* if inverted, uses BLAS *MM subroutines, otherwise LAPACK *TRS */
410       if (A->schur_status != MAT_FACTOR_SCHUR_INVERTED) shift = 0;
411 
412       /* solve Schur complement */
413       PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso, xarray, mat_mkl_pardiso->schur_work, PETSC_TRUE));
414       PetscCall(MatMKLPardisoSolveSchur_Private(A, mat_mkl_pardiso->schur_work, mat_mkl_pardiso->schur_work + shift));
415       PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso, xarray, mat_mkl_pardiso->schur_work + shift, PETSC_FALSE));
416     } else { /* if we are solving for the interior problem, any value in barray[schur] forward-substituted to xarray[schur] will be neglected */
417       PetscInt i;
418       for (i = 0; i < mat_mkl_pardiso->schur_size; i++) xarray[mat_mkl_pardiso->schur_idxs[i]] = 0.;
419     }
420 
421     /* expansion phase */
422     mat_mkl_pardiso->iparm[6 - 1] = 1;
423     mat_mkl_pardiso->phase        = JOB_SOLVE_BACKWARD_SUBSTITUTION;
424     MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm,
425                 &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */
426                 &mat_mkl_pardiso->err);
427 
428     PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
429     mat_mkl_pardiso->iparm[6 - 1] = 0;
430   }
431   PetscCall(VecRestoreArrayWrite(x, &xarray));
432   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
433   PetscFunctionReturn(PETSC_SUCCESS);
434 }
435 
436 static PetscErrorCode MatForwardSolve_MKL_PARDISO(Mat A, Vec b, Vec x)
437 {
438   Mat_MKL_PARDISO   *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
439   PetscScalar       *xarray;
440   const PetscScalar *barray;
441 
442   PetscFunctionBegin;
443   PetscCheck(!mat_mkl_pardiso->schur, PETSC_COMM_SELF, PETSC_ERR_SUP, "Forward substitution not supported with Schur complement");
444 
445   mat_mkl_pardiso->nrhs = 1;
446   PetscCall(VecGetArrayWrite(x, &xarray));
447   PetscCall(VecGetArrayRead(b, &barray));
448 
449   mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION;
450 
451   MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm,
452               &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err);
453 
454   PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
455 
456   PetscCall(VecRestoreArrayRead(b, &barray));
457   PetscCall(VecRestoreArrayWrite(x, &xarray));
458   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
459   PetscFunctionReturn(PETSC_SUCCESS);
460 }
461 
462 static PetscErrorCode MatBackwardSolve_MKL_PARDISO(Mat A, Vec b, Vec x)
463 {
464   Mat_MKL_PARDISO   *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
465   PetscScalar       *xarray;
466   const PetscScalar *barray;
467 
468   PetscFunctionBegin;
469   PetscCheck(!mat_mkl_pardiso->schur, PETSC_COMM_SELF, PETSC_ERR_SUP, "Backward substitution not supported with Schur complement");
470 
471   mat_mkl_pardiso->nrhs = 1;
472   PetscCall(VecGetArrayWrite(x, &xarray));
473   PetscCall(VecGetArrayRead(b, &barray));
474 
475   mat_mkl_pardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION;
476 
477   MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm,
478               &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err);
479 
480   PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
481 
482   PetscCall(VecRestoreArrayRead(b, &barray));
483   PetscCall(VecRestoreArrayWrite(x, &xarray));
484   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
485   PetscFunctionReturn(PETSC_SUCCESS);
486 }
487 
488 static PetscErrorCode MatSolveTranspose_MKL_PARDISO(Mat A, Vec b, Vec x)
489 {
490   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
491   PetscInt         oiparm12;
492 
493   PetscFunctionBegin;
494   oiparm12                       = mat_mkl_pardiso->iparm[12 - 1];
495   mat_mkl_pardiso->iparm[12 - 1] = 2;
496   PetscCall(MatSolve_MKL_PARDISO(A, b, x));
497   mat_mkl_pardiso->iparm[12 - 1] = oiparm12;
498   PetscFunctionReturn(PETSC_SUCCESS);
499 }
500 
501 static PetscErrorCode MatMatSolve_MKL_PARDISO(Mat A, Mat B, Mat X)
502 {
503   Mat_MKL_PARDISO   *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
504   const PetscScalar *barray;
505   PetscScalar       *xarray;
506   PetscBool          flg;
507 
508   PetscFunctionBegin;
509   PetscCall(PetscObjectBaseTypeCompare((PetscObject)B, MATSEQDENSE, &flg));
510   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix B must be MATSEQDENSE matrix");
511   if (X != B) {
512     PetscCall(PetscObjectBaseTypeCompare((PetscObject)X, MATSEQDENSE, &flg));
513     PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix X must be MATSEQDENSE matrix");
514   }
515 
516   PetscCall(MatGetSize(B, NULL, (PetscInt *)&mat_mkl_pardiso->nrhs));
517 
518   if (mat_mkl_pardiso->nrhs > 0) {
519     PetscCall(MatDenseGetArrayRead(B, &barray));
520     PetscCall(MatDenseGetArrayWrite(X, &xarray));
521 
522     PetscCheck(barray != xarray, PETSC_COMM_SELF, PETSC_ERR_SUP, "B and X cannot share the same memory location");
523     if (!mat_mkl_pardiso->schur) mat_mkl_pardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT;
524     else mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION;
525 
526     MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm,
527                 &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err);
528     PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
529 
530     PetscCall(MatDenseRestoreArrayRead(B, &barray));
531     if (mat_mkl_pardiso->schur) { /* solve Schur complement and expand solution */
532       PetscScalar *o_schur_work = NULL;
533 
534       /* solve Schur complement */
535       if (!mat_mkl_pardiso->solve_interior) {
536         PetscInt shift = mat_mkl_pardiso->schur_size * mat_mkl_pardiso->nrhs, scale;
537         PetscInt mem   = mat_mkl_pardiso->n * mat_mkl_pardiso->nrhs;
538 
539         PetscCall(MatFactorFactorizeSchurComplement(A));
540         /* allocate extra memory if it is needed */
541         scale = 1;
542         if (A->schur_status == MAT_FACTOR_SCHUR_INVERTED) scale = 2;
543         mem *= scale;
544         if (mem > mat_mkl_pardiso->schur_work_size) {
545           o_schur_work = mat_mkl_pardiso->schur_work;
546           PetscCall(PetscMalloc1(mem, &mat_mkl_pardiso->schur_work));
547         }
548         /* if inverted, uses BLAS *MM subroutines, otherwise LAPACK *TRS */
549         if (A->schur_status != MAT_FACTOR_SCHUR_INVERTED) shift = 0;
550         PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso, xarray, mat_mkl_pardiso->schur_work, PETSC_TRUE));
551         PetscCall(MatMKLPardisoSolveSchur_Private(A, mat_mkl_pardiso->schur_work, mat_mkl_pardiso->schur_work + shift));
552         PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso, xarray, mat_mkl_pardiso->schur_work + shift, PETSC_FALSE));
553       } else { /* if we are solving for the interior problem, any value in barray[schur,n] forward-substituted to xarray[schur,n] will be neglected */
554         PetscInt i, n, m = 0;
555         for (n = 0; n < mat_mkl_pardiso->nrhs; n++) {
556           for (i = 0; i < mat_mkl_pardiso->schur_size; i++) xarray[mat_mkl_pardiso->schur_idxs[i] + m] = 0.;
557           m += mat_mkl_pardiso->n;
558         }
559       }
560 
561       /* expansion phase */
562       mat_mkl_pardiso->iparm[6 - 1] = 1;
563       mat_mkl_pardiso->phase        = JOB_SOLVE_BACKWARD_SUBSTITUTION;
564       MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm,
565                   &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */
566                   &mat_mkl_pardiso->err);
567       if (o_schur_work) { /* restore original Schur_work (minimal size) */
568         PetscCall(PetscFree(mat_mkl_pardiso->schur_work));
569         mat_mkl_pardiso->schur_work = o_schur_work;
570       }
571       PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
572       mat_mkl_pardiso->iparm[6 - 1] = 0;
573     }
574     PetscCall(MatDenseRestoreArrayWrite(X, &xarray));
575   }
576   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
577   PetscFunctionReturn(PETSC_SUCCESS);
578 }
579 
580 static PetscErrorCode MatFactorNumeric_MKL_PARDISO(Mat F, Mat A, const MatFactorInfo *info)
581 {
582   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)F->data;
583 
584   PetscFunctionBegin;
585   mat_mkl_pardiso->matstruc = SAME_NONZERO_PATTERN;
586   PetscCall((*mat_mkl_pardiso->Convert)(A, mat_mkl_pardiso->needsym, MAT_REUSE_MATRIX, &mat_mkl_pardiso->freeaij, &mat_mkl_pardiso->nz, &mat_mkl_pardiso->ia, &mat_mkl_pardiso->ja, (PetscScalar **)&mat_mkl_pardiso->a));
587 
588   mat_mkl_pardiso->phase = JOB_NUMERICAL_FACTORIZATION;
589   MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm,
590               &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, NULL, (void *)mat_mkl_pardiso->schur, &mat_mkl_pardiso->err);
591   PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
592 
593   /* report flops */
594   if (mat_mkl_pardiso->iparm[18] > 0) PetscCall(PetscLogFlops(PetscPowRealInt(10., 6) * mat_mkl_pardiso->iparm[18]));
595 
596   if (F->schur) { /* schur output from pardiso is in row major format */
597 #if defined(PETSC_HAVE_CUDA)
598     F->schur->offloadmask = PETSC_OFFLOAD_CPU;
599 #endif
600     PetscCall(MatFactorRestoreSchurComplement(F, NULL, MAT_FACTOR_SCHUR_UNFACTORED));
601     PetscCall(MatTranspose(F->schur, MAT_INPLACE_MATRIX, &F->schur));
602   }
603   mat_mkl_pardiso->matstruc = SAME_NONZERO_PATTERN;
604   mat_mkl_pardiso->CleanUp  = PETSC_TRUE;
605   PetscFunctionReturn(PETSC_SUCCESS);
606 }
607 
608 static PetscErrorCode MatSetFromOptions_MKL_PARDISO(Mat F, Mat A)
609 {
610   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)F->data;
611   PetscInt         icntl, bs, threads = 1;
612   PetscBool        flg;
613 
614   PetscFunctionBegin;
615   PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "MKL_PARDISO Options", "Mat");
616 
617   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_65", "Suggested number of threads to use within MKL PARDISO", "None", threads, &threads, &flg));
618   if (flg) PetscSetMKL_PARDISOThreads((int)threads);
619 
620   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_66", "Maximum number of factors with identical sparsity structure that must be kept in memory at the same time", "None", mat_mkl_pardiso->maxfct, &icntl, &flg));
621   if (flg) mat_mkl_pardiso->maxfct = icntl;
622 
623   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_67", "Indicates the actual matrix for the solution phase", "None", mat_mkl_pardiso->mnum, &icntl, &flg));
624   if (flg) mat_mkl_pardiso->mnum = icntl;
625 
626   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_68", "Message level information", "None", mat_mkl_pardiso->msglvl, &icntl, &flg));
627   if (flg) mat_mkl_pardiso->msglvl = icntl;
628 
629   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_69", "Defines the matrix type", "None", mat_mkl_pardiso->mtype, &icntl, &flg));
630   if (flg) {
631     void *pt[IPARM_SIZE];
632     mat_mkl_pardiso->mtype = icntl;
633     icntl                  = mat_mkl_pardiso->iparm[34];
634     bs                     = mat_mkl_pardiso->iparm[36];
635     MKL_PARDISO_INIT(pt, &mat_mkl_pardiso->mtype, mat_mkl_pardiso->iparm);
636 #if defined(PETSC_USE_REAL_SINGLE)
637     mat_mkl_pardiso->iparm[27] = 1;
638 #else
639     mat_mkl_pardiso->iparm[27] = 0;
640 #endif
641     mat_mkl_pardiso->iparm[34] = icntl;
642     mat_mkl_pardiso->iparm[36] = bs;
643   }
644 
645   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_1", "Use default values (if 0)", "None", mat_mkl_pardiso->iparm[0], &icntl, &flg));
646   if (flg) mat_mkl_pardiso->iparm[0] = icntl;
647 
648   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_2", "Fill-in reducing ordering for the input matrix", "None", mat_mkl_pardiso->iparm[1], &icntl, &flg));
649   if (flg) mat_mkl_pardiso->iparm[1] = icntl;
650 
651   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_4", "Preconditioned CGS/CG", "None", mat_mkl_pardiso->iparm[3], &icntl, &flg));
652   if (flg) mat_mkl_pardiso->iparm[3] = icntl;
653 
654   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_5", "User permutation", "None", mat_mkl_pardiso->iparm[4], &icntl, &flg));
655   if (flg) mat_mkl_pardiso->iparm[4] = icntl;
656 
657   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_6", "Write solution on x", "None", mat_mkl_pardiso->iparm[5], &icntl, &flg));
658   if (flg) mat_mkl_pardiso->iparm[5] = icntl;
659 
660   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_8", "Iterative refinement step", "None", mat_mkl_pardiso->iparm[7], &icntl, &flg));
661   if (flg) mat_mkl_pardiso->iparm[7] = icntl;
662 
663   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_10", "Pivoting perturbation", "None", mat_mkl_pardiso->iparm[9], &icntl, &flg));
664   if (flg) mat_mkl_pardiso->iparm[9] = icntl;
665 
666   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_11", "Scaling vectors", "None", mat_mkl_pardiso->iparm[10], &icntl, &flg));
667   if (flg) mat_mkl_pardiso->iparm[10] = icntl;
668 
669   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_12", "Solve with transposed or conjugate transposed matrix A", "None", mat_mkl_pardiso->iparm[11], &icntl, &flg));
670   if (flg) mat_mkl_pardiso->iparm[11] = icntl;
671 
672   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_13", "Improved accuracy using (non-) symmetric weighted matching", "None", mat_mkl_pardiso->iparm[12], &icntl, &flg));
673   if (flg) mat_mkl_pardiso->iparm[12] = icntl;
674 
675   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_18", "Numbers of non-zero elements", "None", mat_mkl_pardiso->iparm[17], &icntl, &flg));
676   if (flg) mat_mkl_pardiso->iparm[17] = icntl;
677 
678   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_19", "Report number of floating point operations (0 to disable)", "None", mat_mkl_pardiso->iparm[18], &icntl, &flg));
679   if (flg) mat_mkl_pardiso->iparm[18] = icntl;
680 
681   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_21", "Pivoting for symmetric indefinite matrices", "None", mat_mkl_pardiso->iparm[20], &icntl, &flg));
682   if (flg) mat_mkl_pardiso->iparm[20] = icntl;
683 
684   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_24", "Parallel factorization control", "None", mat_mkl_pardiso->iparm[23], &icntl, &flg));
685   if (flg) mat_mkl_pardiso->iparm[23] = icntl;
686 
687   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_25", "Parallel forward/backward solve control", "None", mat_mkl_pardiso->iparm[24], &icntl, &flg));
688   if (flg) mat_mkl_pardiso->iparm[24] = icntl;
689 
690   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_27", "Matrix checker", "None", mat_mkl_pardiso->iparm[26], &icntl, &flg));
691   if (flg) mat_mkl_pardiso->iparm[26] = icntl;
692 
693   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_31", "Partial solve and computing selected components of the solution vectors", "None", mat_mkl_pardiso->iparm[30], &icntl, &flg));
694   if (flg) mat_mkl_pardiso->iparm[30] = icntl;
695 
696   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_34", "Optimal number of threads for conditional numerical reproducibility (CNR) mode", "None", mat_mkl_pardiso->iparm[33], &icntl, &flg));
697   if (flg) mat_mkl_pardiso->iparm[33] = icntl;
698 
699   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_60", "Intel MKL PARDISO mode", "None", mat_mkl_pardiso->iparm[59], &icntl, &flg));
700   if (flg) mat_mkl_pardiso->iparm[59] = icntl;
701   PetscOptionsEnd();
702   PetscFunctionReturn(PETSC_SUCCESS);
703 }
704 
705 static PetscErrorCode MatFactorMKL_PARDISOInitialize_Private(Mat A, MatFactorType ftype, Mat_MKL_PARDISO *mat_mkl_pardiso)
706 {
707   PetscInt  i, bs;
708   PetscBool match;
709 
710   PetscFunctionBegin;
711   for (i = 0; i < IPARM_SIZE; i++) mat_mkl_pardiso->iparm[i] = 0;
712   for (i = 0; i < IPARM_SIZE; i++) mat_mkl_pardiso->pt[i] = 0;
713 #if defined(PETSC_USE_REAL_SINGLE)
714   mat_mkl_pardiso->iparm[27] = 1;
715 #else
716   mat_mkl_pardiso->iparm[27] = 0;
717 #endif
718   /* Default options for both sym and unsym */
719   mat_mkl_pardiso->iparm[0]  = 1;  /* Solver default parameters overridden with provided by iparm */
720   mat_mkl_pardiso->iparm[1]  = 2;  /* Metis reordering */
721   mat_mkl_pardiso->iparm[5]  = 0;  /* Write solution into x */
722   mat_mkl_pardiso->iparm[7]  = 0;  /* Max number of iterative refinement steps */
723   mat_mkl_pardiso->iparm[17] = -1; /* Output: Number of nonzeros in the factor LU */
724   mat_mkl_pardiso->iparm[18] = -1; /* Output: Mflops for LU factorization */
725 #if 0
726   mat_mkl_pardiso->iparm[23] =  1; /* Parallel factorization control*/
727 #endif
728   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &match, MATSEQBAIJ, MATSEQSBAIJ, ""));
729   PetscCall(MatGetBlockSize(A, &bs));
730   if (!match || bs == 1) {
731     mat_mkl_pardiso->iparm[34] = 1; /* Cluster Sparse Solver use C-style indexing for ia and ja arrays */
732     mat_mkl_pardiso->n         = A->rmap->N;
733   } else {
734     mat_mkl_pardiso->iparm[34] = 0; /* Cluster Sparse Solver use Fortran-style indexing for ia and ja arrays */
735     mat_mkl_pardiso->iparm[36] = bs;
736     mat_mkl_pardiso->n         = A->rmap->N / bs;
737   }
738   mat_mkl_pardiso->iparm[39] = 0; /* Input: matrix/rhs/solution stored on rank-0 */
739 
740   mat_mkl_pardiso->CleanUp = PETSC_FALSE;
741   mat_mkl_pardiso->maxfct  = 1; /* Maximum number of numerical factorizations. */
742   mat_mkl_pardiso->mnum    = 1; /* Which factorization to use. */
743   mat_mkl_pardiso->msglvl  = 0; /* 0: do not print 1: Print statistical information in file */
744   mat_mkl_pardiso->phase   = -1;
745   mat_mkl_pardiso->err     = 0;
746 
747   mat_mkl_pardiso->nrhs  = 1;
748   mat_mkl_pardiso->err   = 0;
749   mat_mkl_pardiso->phase = -1;
750 
751   if (ftype == MAT_FACTOR_LU) {
752     mat_mkl_pardiso->iparm[9]  = 13; /* Perturb the pivot elements with 1E-13 */
753     mat_mkl_pardiso->iparm[10] = 1;  /* Use nonsymmetric permutation and scaling MPS */
754     mat_mkl_pardiso->iparm[12] = 1;  /* Switch on Maximum Weighted Matching algorithm (default for non-symmetric) */
755   } else {
756     mat_mkl_pardiso->iparm[9]  = 8; /* Perturb the pivot elements with 1E-8 */
757     mat_mkl_pardiso->iparm[10] = 0; /* Use nonsymmetric permutation and scaling MPS */
758     mat_mkl_pardiso->iparm[12] = 1; /* Switch on Maximum Weighted Matching algorithm (default for non-symmetric) */
759 #if defined(PETSC_USE_DEBUG)
760     mat_mkl_pardiso->iparm[26] = 1; /* Matrix checker */
761 #endif
762   }
763   PetscCall(PetscCalloc1(A->rmap->N * sizeof(INT_TYPE), &mat_mkl_pardiso->perm));
764   mat_mkl_pardiso->schur_size = 0;
765   PetscFunctionReturn(PETSC_SUCCESS);
766 }
767 
768 static PetscErrorCode MatFactorSymbolic_AIJMKL_PARDISO_Private(Mat F, Mat A, const MatFactorInfo *info)
769 {
770   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)F->data;
771 
772   PetscFunctionBegin;
773   mat_mkl_pardiso->matstruc = DIFFERENT_NONZERO_PATTERN;
774   PetscCall(MatSetFromOptions_MKL_PARDISO(F, A));
775   /* throw away any previously computed structure */
776   if (mat_mkl_pardiso->freeaij) {
777     PetscCall(PetscFree2(mat_mkl_pardiso->ia, mat_mkl_pardiso->ja));
778     if (mat_mkl_pardiso->iparm[34] == 1) PetscCall(PetscFree(mat_mkl_pardiso->a));
779   }
780   PetscCall((*mat_mkl_pardiso->Convert)(A, mat_mkl_pardiso->needsym, MAT_INITIAL_MATRIX, &mat_mkl_pardiso->freeaij, &mat_mkl_pardiso->nz, &mat_mkl_pardiso->ia, &mat_mkl_pardiso->ja, (PetscScalar **)&mat_mkl_pardiso->a));
781   if (mat_mkl_pardiso->iparm[34] == 1) mat_mkl_pardiso->n = A->rmap->N;
782   else mat_mkl_pardiso->n = A->rmap->N / A->rmap->bs;
783 
784   mat_mkl_pardiso->phase = JOB_ANALYSIS;
785 
786   /* reset flops counting if requested */
787   if (mat_mkl_pardiso->iparm[18]) mat_mkl_pardiso->iparm[18] = -1;
788 
789   MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm,
790               &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, NULL, NULL, &mat_mkl_pardiso->err);
791   PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err);
792 
793   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
794 
795   if (F->factortype == MAT_FACTOR_LU) F->ops->lufactornumeric = MatFactorNumeric_MKL_PARDISO;
796   else F->ops->choleskyfactornumeric = MatFactorNumeric_MKL_PARDISO;
797 
798   F->ops->solve          = MatSolve_MKL_PARDISO;
799   F->ops->solvetranspose = MatSolveTranspose_MKL_PARDISO;
800   F->ops->matsolve       = MatMatSolve_MKL_PARDISO;
801   if (F->factortype == MAT_FACTOR_LU || (!PetscDefined(USE_COMPLEX) && F->factortype == MAT_FACTOR_CHOLESKY && A->spd == PETSC_BOOL3_TRUE)) {
802     F->ops->backwardsolve = MatBackwardSolve_MKL_PARDISO;
803     F->ops->forwardsolve  = MatForwardSolve_MKL_PARDISO;
804   }
805   PetscFunctionReturn(PETSC_SUCCESS);
806 }
807 
808 static PetscErrorCode MatLUFactorSymbolic_AIJMKL_PARDISO(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
809 {
810   PetscFunctionBegin;
811   PetscCall(MatFactorSymbolic_AIJMKL_PARDISO_Private(F, A, info));
812   PetscFunctionReturn(PETSC_SUCCESS);
813 }
814 
815 #if !defined(PETSC_USE_COMPLEX)
816 static PetscErrorCode MatGetInertia_MKL_PARDISO(Mat F, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
817 {
818   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)F->data;
819 
820   PetscFunctionBegin;
821   if (nneg) *nneg = mat_mkl_pardiso->iparm[22];
822   if (npos) *npos = mat_mkl_pardiso->iparm[21];
823   if (nzero) *nzero = F->rmap->N - (mat_mkl_pardiso->iparm[22] + mat_mkl_pardiso->iparm[21]);
824   PetscFunctionReturn(PETSC_SUCCESS);
825 }
826 #endif
827 
828 static PetscErrorCode MatCholeskyFactorSymbolic_AIJMKL_PARDISO(Mat F, Mat A, IS r, const MatFactorInfo *info)
829 {
830   PetscFunctionBegin;
831   PetscCall(MatFactorSymbolic_AIJMKL_PARDISO_Private(F, A, info));
832   F->ops->getinertia = NULL;
833 #if !defined(PETSC_USE_COMPLEX)
834   F->ops->getinertia = MatGetInertia_MKL_PARDISO;
835 #endif
836   PetscFunctionReturn(PETSC_SUCCESS);
837 }
838 
839 static PetscErrorCode MatView_MKL_PARDISO(Mat A, PetscViewer viewer)
840 {
841   PetscBool         iascii;
842   PetscViewerFormat format;
843   Mat_MKL_PARDISO  *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
844   PetscInt          i;
845 
846   PetscFunctionBegin;
847   if (A->ops->solve != MatSolve_MKL_PARDISO) PetscFunctionReturn(PETSC_SUCCESS);
848 
849   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
850   if (iascii) {
851     PetscCall(PetscViewerGetFormat(viewer, &format));
852     if (format == PETSC_VIEWER_ASCII_INFO) {
853       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO run parameters:\n"));
854       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO phase:             %d \n", mat_mkl_pardiso->phase));
855       for (i = 1; i <= 64; i++) PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO iparm[%d]:     %d \n", i, mat_mkl_pardiso->iparm[i - 1]));
856       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO maxfct:     %d \n", mat_mkl_pardiso->maxfct));
857       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO mnum:     %d \n", mat_mkl_pardiso->mnum));
858       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO mtype:     %d \n", mat_mkl_pardiso->mtype));
859       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO n:     %d \n", mat_mkl_pardiso->n));
860       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO nrhs:     %d \n", mat_mkl_pardiso->nrhs));
861       PetscCall(PetscViewerASCIIPrintf(viewer, "MKL PARDISO msglvl:     %d \n", mat_mkl_pardiso->msglvl));
862     }
863   }
864   PetscFunctionReturn(PETSC_SUCCESS);
865 }
866 
867 static PetscErrorCode MatGetInfo_MKL_PARDISO(Mat A, MatInfoType flag, MatInfo *info)
868 {
869   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data;
870 
871   PetscFunctionBegin;
872   info->block_size        = 1.0;
873   info->nz_used           = mat_mkl_pardiso->iparm[17];
874   info->nz_allocated      = mat_mkl_pardiso->iparm[17];
875   info->nz_unneeded       = 0.0;
876   info->assemblies        = 0.0;
877   info->mallocs           = 0.0;
878   info->memory            = 0.0;
879   info->fill_ratio_given  = 0;
880   info->fill_ratio_needed = 0;
881   info->factor_mallocs    = 0;
882   PetscFunctionReturn(PETSC_SUCCESS);
883 }
884 
885 static PetscErrorCode MatMkl_PardisoSetCntl_MKL_PARDISO(Mat F, PetscInt icntl, PetscInt ival)
886 {
887   PetscInt         backup, bs;
888   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)F->data;
889 
890   PetscFunctionBegin;
891   if (icntl <= 64) {
892     mat_mkl_pardiso->iparm[icntl - 1] = ival;
893   } else {
894     if (icntl == 65) PetscSetMKL_PARDISOThreads(ival);
895     else if (icntl == 66) mat_mkl_pardiso->maxfct = ival;
896     else if (icntl == 67) mat_mkl_pardiso->mnum = ival;
897     else if (icntl == 68) mat_mkl_pardiso->msglvl = ival;
898     else if (icntl == 69) {
899       void *pt[IPARM_SIZE];
900       backup                 = mat_mkl_pardiso->iparm[34];
901       bs                     = mat_mkl_pardiso->iparm[36];
902       mat_mkl_pardiso->mtype = ival;
903       MKL_PARDISO_INIT(pt, &mat_mkl_pardiso->mtype, mat_mkl_pardiso->iparm);
904 #if defined(PETSC_USE_REAL_SINGLE)
905       mat_mkl_pardiso->iparm[27] = 1;
906 #else
907       mat_mkl_pardiso->iparm[27] = 0;
908 #endif
909       mat_mkl_pardiso->iparm[34] = backup;
910       mat_mkl_pardiso->iparm[36] = bs;
911     } else if (icntl == 70) mat_mkl_pardiso->solve_interior = (PetscBool) !!ival;
912   }
913   PetscFunctionReturn(PETSC_SUCCESS);
914 }
915 
916 /*@
917   MatMkl_PardisoSetCntl - Set MKL PARDISO <https://www.intel.com/content/www/us/en/docs/onemkl/developer-reference-c/2023-2/onemkl-pardiso-parallel-direct-sparse-solver-iface.html> parameters
918 
919   Logically Collective
920 
921   Input Parameters:
922 + F     - the factored matrix obtained by calling `MatGetFactor()`
923 . icntl - index of MKL PARDISO parameter
924 - ival  - value of MKL PARDISO parameter
925 
926   Options Database Key:
927 . -mat_mkl_pardiso_<icntl> <ival> - change the option numbered icntl to the value ival
928 
929   Level: beginner
930 
931 .seealso: [](ch_matrices), `Mat`, `MATSOLVERMKL_PARDISO`, `MatGetFactor()`
932 @*/
933 PetscErrorCode MatMkl_PardisoSetCntl(Mat F, PetscInt icntl, PetscInt ival)
934 {
935   PetscFunctionBegin;
936   PetscTryMethod(F, "MatMkl_PardisoSetCntl_C", (Mat, PetscInt, PetscInt), (F, icntl, ival));
937   PetscFunctionReturn(PETSC_SUCCESS);
938 }
939 
940 /*MC
941   MATSOLVERMKL_PARDISO -  A matrix type providing direct solvers, LU, for
942   `MATSEQAIJ` matrices via the external package MKL PARDISO
943   <https://www.intel.com/content/www/us/en/docs/onemkl/developer-reference-c/2024-0/onemkl-pardiso-parallel-direct-sparse-solver-iface.html>.
944 
945   Use `-pc_type lu` `-pc_factor_mat_solver_type mkl_pardiso` to use this direct solver
946 
947   Options Database Keys:
948 + -mat_mkl_pardiso_65 - Suggested number of threads to use within MKL PARDISO
949 . -mat_mkl_pardiso_66 - Maximum number of factors with identical sparsity structure that must be kept in memory at the same time
950 . -mat_mkl_pardiso_67 - Indicates the actual matrix for the solution phase
951 . -mat_mkl_pardiso_68 - Message level information, use 1 to get detailed information on the solver options
952 . -mat_mkl_pardiso_69 - Defines the matrix type. IMPORTANT: When you set this flag, iparm parameters are going to be set to the default ones for the matrix type
953 . -mat_mkl_pardiso_1  - Use default values
954 . -mat_mkl_pardiso_2  - Fill-in reducing ordering for the input matrix
955 . -mat_mkl_pardiso_4  - Preconditioned CGS/CG
956 . -mat_mkl_pardiso_5  - User permutation
957 . -mat_mkl_pardiso_6  - Write solution on x
958 . -mat_mkl_pardiso_8  - Iterative refinement step
959 . -mat_mkl_pardiso_10 - Pivoting perturbation
960 . -mat_mkl_pardiso_11 - Scaling vectors
961 . -mat_mkl_pardiso_12 - Solve with transposed or conjugate transposed matrix A
962 . -mat_mkl_pardiso_13 - Improved accuracy using (non-) symmetric weighted matching
963 . -mat_mkl_pardiso_18 - Numbers of non-zero elements
964 . -mat_mkl_pardiso_19 - Report number of floating point operations
965 . -mat_mkl_pardiso_21 - Pivoting for symmetric indefinite matrices
966 . -mat_mkl_pardiso_24 - Parallel factorization control
967 . -mat_mkl_pardiso_25 - Parallel forward/backward solve control
968 . -mat_mkl_pardiso_27 - Matrix checker
969 . -mat_mkl_pardiso_31 - Partial solve and computing selected components of the solution vectors
970 . -mat_mkl_pardiso_34 - Optimal number of threads for conditional numerical reproducibility (CNR) mode
971 - -mat_mkl_pardiso_60 - Intel MKL PARDISO mode
972 
973   Level: beginner
974 
975   Notes:
976   Use `-mat_mkl_pardiso_68 1` to display the number of threads the solver is using. MKL does not provide a way to directly access this
977   information.
978 
979   For more information on the options check the MKL PARDISO manual
980 
981 .seealso: [](ch_matrices), `Mat`, `MATSEQAIJ`, `PCFactorSetMatSolverType()`, `MatSolverType`, `MatMkl_PardisoSetCntl()`, `MATSOLVERMKL_CPARDISO`
982 M*/
983 static PetscErrorCode MatFactorGetSolverType_mkl_pardiso(Mat A, MatSolverType *type)
984 {
985   PetscFunctionBegin;
986   *type = MATSOLVERMKL_PARDISO;
987   PetscFunctionReturn(PETSC_SUCCESS);
988 }
989 
990 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mkl_pardiso(Mat A, MatFactorType ftype, Mat *F)
991 {
992   Mat              B;
993   Mat_MKL_PARDISO *mat_mkl_pardiso;
994   PetscBool        isSeqAIJ, isSeqBAIJ, isSeqSBAIJ;
995 
996   PetscFunctionBegin;
997   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isSeqAIJ));
998   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQBAIJ, &isSeqBAIJ));
999   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQSBAIJ, &isSeqSBAIJ));
1000   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
1001   PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
1002   PetscCall(PetscStrallocpy("mkl_pardiso", &((PetscObject)B)->type_name));
1003   PetscCall(MatSetUp(B));
1004 
1005   PetscCall(PetscNew(&mat_mkl_pardiso));
1006   B->data = mat_mkl_pardiso;
1007 
1008   PetscCall(MatFactorMKL_PARDISOInitialize_Private(A, ftype, mat_mkl_pardiso));
1009   if (ftype == MAT_FACTOR_LU) {
1010     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMKL_PARDISO;
1011     B->factortype            = MAT_FACTOR_LU;
1012     mat_mkl_pardiso->needsym = PETSC_FALSE;
1013     if (isSeqAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqaij;
1014     else if (isSeqBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqbaij;
1015     else {
1016       PetscCheck(!isSeqSBAIJ, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for MKL PARDISO LU factor with SEQSBAIJ format! Use MAT_FACTOR_CHOLESKY instead");
1017       SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for MKL PARDISO LU with %s format", ((PetscObject)A)->type_name);
1018     }
1019 #if defined(PETSC_USE_COMPLEX)
1020     mat_mkl_pardiso->mtype = 13;
1021 #else
1022     mat_mkl_pardiso->mtype = 11;
1023 #endif
1024   } else {
1025     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_AIJMKL_PARDISO;
1026     B->factortype                  = MAT_FACTOR_CHOLESKY;
1027     if (isSeqAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqaij;
1028     else if (isSeqBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqbaij;
1029     else if (isSeqSBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqsbaij;
1030     else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for PARDISO CHOLESKY with %s format", ((PetscObject)A)->type_name);
1031 
1032     mat_mkl_pardiso->needsym = PETSC_TRUE;
1033 #if !defined(PETSC_USE_COMPLEX)
1034     if (A->spd == PETSC_BOOL3_TRUE) mat_mkl_pardiso->mtype = 2;
1035     else mat_mkl_pardiso->mtype = -2;
1036 #else
1037     mat_mkl_pardiso->mtype = 6;
1038     PetscCheck(A->hermitian != PETSC_BOOL3_TRUE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for MKL PARDISO CHOLESKY with Hermitian matrices! Use MAT_FACTOR_LU instead");
1039 #endif
1040   }
1041   B->ops->destroy = MatDestroy_MKL_PARDISO;
1042   B->ops->view    = MatView_MKL_PARDISO;
1043   B->ops->getinfo = MatGetInfo_MKL_PARDISO;
1044   B->factortype   = ftype;
1045   B->assembled    = PETSC_TRUE;
1046 
1047   PetscCall(PetscFree(B->solvertype));
1048   PetscCall(PetscStrallocpy(MATSOLVERMKL_PARDISO, &B->solvertype));
1049 
1050   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_mkl_pardiso));
1051   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorSetSchurIS_C", MatFactorSetSchurIS_MKL_PARDISO));
1052   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMkl_PardisoSetCntl_C", MatMkl_PardisoSetCntl_MKL_PARDISO));
1053 
1054   *F = B;
1055   PetscFunctionReturn(PETSC_SUCCESS);
1056 }
1057 
1058 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MKL_Pardiso(void)
1059 {
1060   PetscFunctionBegin;
1061   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_aij_mkl_pardiso));
1062   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO, MATSEQAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_aij_mkl_pardiso));
1063   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO, MATSEQBAIJ, MAT_FACTOR_LU, MatGetFactor_aij_mkl_pardiso));
1064   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO, MATSEQSBAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_aij_mkl_pardiso));
1065   PetscFunctionReturn(PETSC_SUCCESS);
1066 }
1067