xref: /petsc/src/mat/impls/aij/seq/mkl_pardiso/mkl_pardiso.c (revision daa037dfd3c3bec8dc8659548d2b20b07c1dc6de)
1 #include <../src/mat/impls/aij/seq/aij.h>        /*I "petscmat.h" I*/
2 #include <../src/mat/impls/sbaij/seq/sbaij.h>
3 #include <../src/mat/impls/dense/seq/dense.h>
4 
5 #if defined(PETSC_HAVE_MKL_INTEL_ILP64)
6 #define MKL_ILP64
7 #endif
8 #include <mkl_pardiso.h>
9 
10 PETSC_EXTERN void PetscSetMKL_PARDISOThreads(int);
11 
12 /*
13  *  Possible mkl_pardiso phases that controls the execution of the solver.
14  *  For more information check mkl_pardiso manual.
15  */
16 #define JOB_ANALYSIS 11
17 #define JOB_ANALYSIS_NUMERICAL_FACTORIZATION 12
18 #define JOB_ANALYSIS_NUMERICAL_FACTORIZATION_SOLVE_ITERATIVE_REFINEMENT 13
19 #define JOB_NUMERICAL_FACTORIZATION 22
20 #define JOB_NUMERICAL_FACTORIZATION_SOLVE_ITERATIVE_REFINEMENT 23
21 #define JOB_SOLVE_ITERATIVE_REFINEMENT 33
22 #define JOB_SOLVE_FORWARD_SUBSTITUTION 331
23 #define JOB_SOLVE_DIAGONAL_SUBSTITUTION 332
24 #define JOB_SOLVE_BACKWARD_SUBSTITUTION 333
25 #define JOB_RELEASE_OF_LU_MEMORY 0
26 #define JOB_RELEASE_OF_ALL_MEMORY -1
27 
28 #define IPARM_SIZE 64
29 
30 #if defined(PETSC_USE_64BIT_INDICES)
31  #if defined(PETSC_HAVE_MKL_INTEL_ILP64)
32   #define INT_TYPE long long int
33   #define MKL_PARDISO pardiso
34   #define MKL_PARDISO_INIT pardisoinit
35  #else
36   /* this is the case where the MKL BLAS/LAPACK are 32 bit integers but the 64 bit integer version of
37      of Pardiso code is used; hence the need for the 64 below*/
38   #define INT_TYPE long long int
39   #define MKL_PARDISO pardiso_64
40   #define MKL_PARDISO_INIT pardiso_64init
41 void pardiso_64init(void *pt, INT_TYPE *mtype, INT_TYPE iparm [])
42 {
43   int iparm_copy[IPARM_SIZE], mtype_copy, i;
44 
45   mtype_copy = *mtype;
46   pardisoinit(pt, &mtype_copy, iparm_copy);
47   for (i=0; i<IPARM_SIZE; i++) iparm[i] = iparm_copy[i];
48 }
49  #endif
50 #else
51  #define INT_TYPE int
52  #define MKL_PARDISO pardiso
53  #define MKL_PARDISO_INIT pardisoinit
54 #endif
55 
56 /*
57  *  Internal data structure.
58  *  For more information check mkl_pardiso manual.
59  */
60 typedef struct {
61 
62   /* Configuration vector*/
63   INT_TYPE     iparm[IPARM_SIZE];
64 
65   /*
66    * Internal mkl_pardiso memory location.
67    * After the first call to mkl_pardiso do not modify pt, as that could cause a serious memory leak.
68    */
69   void         *pt[IPARM_SIZE];
70 
71   /* Basic mkl_pardiso info*/
72   INT_TYPE     phase, maxfct, mnum, mtype, n, nrhs, msglvl, err;
73 
74   /* Matrix structure*/
75   void         *a;
76   INT_TYPE     *ia, *ja;
77 
78   /* Number of non-zero elements*/
79   INT_TYPE     nz;
80 
81   /* Row permutaton vector*/
82   INT_TYPE     *perm;
83 
84   /* Define if matrix preserves sparse structure.*/
85   MatStructure matstruc;
86 
87   PetscBool    needsym;
88   PetscBool    freeaij;
89 
90   /* Schur complement */
91   PetscScalar  *schur;
92   PetscInt     schur_size;
93   PetscInt     *schur_idxs;
94   PetscScalar  *schur_work;
95   PetscBLASInt schur_work_size;
96   PetscBool    solve_interior;
97 
98   /* True if mkl_pardiso function have been used.*/
99   PetscBool CleanUp;
100 
101   /* Conversion to a format suitable for MKL */
102   PetscErrorCode (*Convert)(Mat, PetscBool, MatReuse, PetscBool*, INT_TYPE*, INT_TYPE**, INT_TYPE**, PetscScalar**);
103 } Mat_MKL_PARDISO;
104 
105 PetscErrorCode MatMKLPardiso_Convert_seqsbaij(Mat A,PetscBool sym,MatReuse reuse,PetscBool *free,INT_TYPE *nnz,INT_TYPE **r,INT_TYPE **c,PetscScalar **v)
106 {
107   Mat_SeqSBAIJ   *aa = (Mat_SeqSBAIJ*)A->data;
108   PetscInt       bs  = A->rmap->bs,i;
109 
110   PetscFunctionBegin;
111   PetscCheck(sym,PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"This should not happen");
112   *v      = aa->a;
113   if (bs == 1) { /* already in the correct format */
114     /* though PetscInt and INT_TYPE are of the same size since they are defined differently the Intel compiler requires a cast */
115     *r    = (INT_TYPE*)aa->i;
116     *c    = (INT_TYPE*)aa->j;
117     *nnz  = (INT_TYPE)aa->nz;
118     *free = PETSC_FALSE;
119   } else if (reuse == MAT_INITIAL_MATRIX) {
120     PetscInt m = A->rmap->n,nz = aa->nz;
121     PetscInt *row,*col;
122     PetscCall(PetscMalloc2(m+1,&row,nz,&col));
123     for (i=0; i<m+1; i++) {
124       row[i] = aa->i[i]+1;
125     }
126     for (i=0; i<nz; i++) {
127       col[i] = aa->j[i]+1;
128     }
129     *r    = (INT_TYPE*)row;
130     *c    = (INT_TYPE*)col;
131     *nnz  = (INT_TYPE)nz;
132     *free = PETSC_TRUE;
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 PetscErrorCode MatMKLPardiso_Convert_seqbaij(Mat A,PetscBool sym,MatReuse reuse,PetscBool *free,INT_TYPE *nnz,INT_TYPE **r,INT_TYPE **c,PetscScalar **v)
138 {
139   Mat_SeqBAIJ    *aa = (Mat_SeqBAIJ*)A->data;
140   PetscInt       bs  = A->rmap->bs,i;
141 
142   PetscFunctionBegin;
143   if (!sym) {
144     *v      = aa->a;
145     if (bs == 1) { /* already in the correct format */
146       /* though PetscInt and INT_TYPE are of the same size since they are defined differently the Intel compiler requires a cast */
147       *r    = (INT_TYPE*)aa->i;
148       *c    = (INT_TYPE*)aa->j;
149       *nnz  = (INT_TYPE)aa->nz;
150       *free = PETSC_FALSE;
151       PetscFunctionReturn(0);
152     } else if (reuse == MAT_INITIAL_MATRIX) {
153       PetscInt m = A->rmap->n,nz = aa->nz;
154       PetscInt *row,*col;
155       PetscCall(PetscMalloc2(m+1,&row,nz,&col));
156       for (i=0; i<m+1; i++) {
157         row[i] = aa->i[i]+1;
158       }
159       for (i=0; i<nz; i++) {
160         col[i] = aa->j[i]+1;
161       }
162       *r    = (INT_TYPE*)row;
163       *c    = (INT_TYPE*)col;
164       *nnz  = (INT_TYPE)nz;
165     }
166     *free = PETSC_TRUE;
167   } else {
168     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"This should not happen");
169   }
170   PetscFunctionReturn(0);
171 }
172 
173 PetscErrorCode MatMKLPardiso_Convert_seqaij(Mat A,PetscBool sym,MatReuse reuse,PetscBool *free,INT_TYPE *nnz,INT_TYPE **r,INT_TYPE **c,PetscScalar **v)
174 {
175   Mat_SeqAIJ     *aa = (Mat_SeqAIJ*)A->data;
176   PetscScalar    *aav;
177 
178   PetscFunctionBegin;
179   PetscCall(MatSeqAIJGetArrayRead(A,(const PetscScalar**)&aav));
180   if (!sym) { /* already in the correct format */
181     *v    = aav;
182     *r    = (INT_TYPE*)aa->i;
183     *c    = (INT_TYPE*)aa->j;
184     *nnz  = (INT_TYPE)aa->nz;
185     *free = PETSC_FALSE;
186   } else if (reuse == MAT_INITIAL_MATRIX) { /* need to get the triangular part */
187     PetscScalar *vals,*vv;
188     PetscInt    *row,*col,*jj;
189     PetscInt    m = A->rmap->n,nz,i;
190 
191     nz = 0;
192     for (i=0; i<m; i++) nz += aa->i[i+1] - aa->diag[i];
193     PetscCall(PetscMalloc2(m+1,&row,nz,&col));
194     PetscCall(PetscMalloc1(nz,&vals));
195     jj = col;
196     vv = vals;
197 
198     row[0] = 0;
199     for (i=0; i<m; i++) {
200       PetscInt    *aj = aa->j + aa->diag[i];
201       PetscScalar *av = aav + aa->diag[i];
202       PetscInt    rl  = aa->i[i+1] - aa->diag[i],j;
203 
204       for (j=0; j<rl; j++) {
205         *jj = *aj; jj++; aj++;
206         *vv = *av; vv++; av++;
207       }
208       row[i+1] = row[i] + rl;
209     }
210     *v    = vals;
211     *r    = (INT_TYPE*)row;
212     *c    = (INT_TYPE*)col;
213     *nnz  = (INT_TYPE)nz;
214     *free = PETSC_TRUE;
215   } else {
216     PetscScalar *vv;
217     PetscInt    m = A->rmap->n,i;
218 
219     vv = *v;
220     for (i=0; i<m; i++) {
221       PetscScalar *av = aav + aa->diag[i];
222       PetscInt    rl  = aa->i[i+1] - aa->diag[i],j;
223       for (j=0; j<rl; j++) {
224         *vv = *av; vv++; av++;
225       }
226     }
227     *free = PETSC_TRUE;
228   }
229   PetscCall(MatSeqAIJRestoreArrayRead(A,(const PetscScalar**)&aav));
230   PetscFunctionReturn(0);
231 }
232 
233 static PetscErrorCode MatMKLPardisoSolveSchur_Private(Mat F, PetscScalar *B, PetscScalar *X)
234 {
235   Mat_MKL_PARDISO      *mpardiso = (Mat_MKL_PARDISO*)F->data;
236   Mat                  S,Xmat,Bmat;
237   MatFactorSchurStatus schurstatus;
238 
239   PetscFunctionBegin;
240   PetscCall(MatFactorGetSchurComplement(F,&S,&schurstatus));
241   PetscCheckFalse(X == B && schurstatus == MAT_FACTOR_SCHUR_INVERTED,PETSC_COMM_SELF,PETSC_ERR_SUP,"X and B cannot point to the same address");
242   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF,mpardiso->schur_size,mpardiso->nrhs,B,&Bmat));
243   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF,mpardiso->schur_size,mpardiso->nrhs,X,&Xmat));
244   PetscCall(MatSetType(Bmat,((PetscObject)S)->type_name));
245   PetscCall(MatSetType(Xmat,((PetscObject)S)->type_name));
246 #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
247   PetscCall(MatBindToCPU(Xmat,S->boundtocpu));
248   PetscCall(MatBindToCPU(Bmat,S->boundtocpu));
249 #endif
250 
251 #if defined(PETSC_USE_COMPLEX)
252   PetscCheckFalse(mpardiso->iparm[12-1] == 1,PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Hermitian solve not implemented yet");
253 #endif
254 
255   switch (schurstatus) {
256   case MAT_FACTOR_SCHUR_FACTORED:
257     if (!mpardiso->iparm[12-1]) {
258       PetscCall(MatMatSolve(S,Bmat,Xmat));
259     } else { /* transpose solve */
260       PetscCall(MatMatSolveTranspose(S,Bmat,Xmat));
261     }
262     break;
263   case MAT_FACTOR_SCHUR_INVERTED:
264     PetscCall(MatProductCreateWithMat(S,Bmat,NULL,Xmat));
265     if (!mpardiso->iparm[12-1]) {
266       PetscCall(MatProductSetType(Xmat,MATPRODUCT_AB));
267     } else { /* transpose solve */
268       PetscCall(MatProductSetType(Xmat,MATPRODUCT_AtB));
269     }
270     PetscCall(MatProductSetFromOptions(Xmat));
271     PetscCall(MatProductSymbolic(Xmat));
272     PetscCall(MatProductNumeric(Xmat));
273     PetscCall(MatProductClear(Xmat));
274     break;
275   default:
276     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Unhandled MatFactorSchurStatus %" PetscInt_FMT,F->schur_status);
277     break;
278   }
279   PetscCall(MatFactorRestoreSchurComplement(F,&S,schurstatus));
280   PetscCall(MatDestroy(&Bmat));
281   PetscCall(MatDestroy(&Xmat));
282   PetscFunctionReturn(0);
283 }
284 
285 PetscErrorCode MatFactorSetSchurIS_MKL_PARDISO(Mat F, IS is)
286 {
287   Mat_MKL_PARDISO   *mpardiso = (Mat_MKL_PARDISO*)F->data;
288   const PetscScalar *arr;
289   const PetscInt    *idxs;
290   PetscInt          size,i;
291   PetscMPIInt       csize;
292   PetscBool         sorted;
293 
294   PetscFunctionBegin;
295   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)F),&csize));
296   PetscCheckFalse(csize > 1,PETSC_COMM_SELF,PETSC_ERR_SUP,"MKL_PARDISO parallel Schur complements not yet supported from PETSc");
297   PetscCall(ISSorted(is,&sorted));
298   if (!sorted) {
299     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"IS for MKL_PARDISO Schur complements needs to be sorted");
300   }
301   PetscCall(ISGetLocalSize(is,&size));
302   PetscCall(PetscFree(mpardiso->schur_work));
303   PetscCall(PetscBLASIntCast(PetscMax(mpardiso->n,2*size),&mpardiso->schur_work_size));
304   PetscCall(PetscMalloc1(mpardiso->schur_work_size,&mpardiso->schur_work));
305   PetscCall(MatDestroy(&F->schur));
306   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF,size,size,NULL,&F->schur));
307   PetscCall(MatDenseGetArrayRead(F->schur,&arr));
308   mpardiso->schur      = (PetscScalar*)arr;
309   mpardiso->schur_size = size;
310   PetscCall(MatDenseRestoreArrayRead(F->schur,&arr));
311   if (mpardiso->mtype == 2) {
312     PetscCall(MatSetOption(F->schur,MAT_SPD,PETSC_TRUE));
313   }
314 
315   PetscCall(PetscFree(mpardiso->schur_idxs));
316   PetscCall(PetscMalloc1(size,&mpardiso->schur_idxs));
317   PetscCall(PetscArrayzero(mpardiso->perm,mpardiso->n));
318   PetscCall(ISGetIndices(is,&idxs));
319   PetscCall(PetscArraycpy(mpardiso->schur_idxs,idxs,size));
320   for (i=0;i<size;i++) mpardiso->perm[idxs[i]] = 1;
321   PetscCall(ISRestoreIndices(is,&idxs));
322   if (size) { /* turn on Schur switch if the set of indices is not empty */
323     mpardiso->iparm[36-1] = 2;
324   }
325   PetscFunctionReturn(0);
326 }
327 
328 PetscErrorCode MatDestroy_MKL_PARDISO(Mat A)
329 {
330   Mat_MKL_PARDISO *mat_mkl_pardiso=(Mat_MKL_PARDISO*)A->data;
331 
332   PetscFunctionBegin;
333   if (mat_mkl_pardiso->CleanUp) {
334     mat_mkl_pardiso->phase = JOB_RELEASE_OF_ALL_MEMORY;
335 
336     MKL_PARDISO (mat_mkl_pardiso->pt,
337       &mat_mkl_pardiso->maxfct,
338       &mat_mkl_pardiso->mnum,
339       &mat_mkl_pardiso->mtype,
340       &mat_mkl_pardiso->phase,
341       &mat_mkl_pardiso->n,
342       NULL,
343       NULL,
344       NULL,
345       NULL,
346       &mat_mkl_pardiso->nrhs,
347       mat_mkl_pardiso->iparm,
348       &mat_mkl_pardiso->msglvl,
349       NULL,
350       NULL,
351       &mat_mkl_pardiso->err);
352   }
353   PetscCall(PetscFree(mat_mkl_pardiso->perm));
354   PetscCall(PetscFree(mat_mkl_pardiso->schur_work));
355   PetscCall(PetscFree(mat_mkl_pardiso->schur_idxs));
356   if (mat_mkl_pardiso->freeaij) {
357     PetscCall(PetscFree2(mat_mkl_pardiso->ia,mat_mkl_pardiso->ja));
358     if (mat_mkl_pardiso->iparm[34] == 1) {
359       PetscCall(PetscFree(mat_mkl_pardiso->a));
360     }
361   }
362   PetscCall(PetscFree(A->data));
363 
364   /* clear composed functions */
365   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL));
366   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatFactorSetSchurIS_C",NULL));
367   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatMkl_PardisoSetCntl_C",NULL));
368   PetscFunctionReturn(0);
369 }
370 
371 static PetscErrorCode MatMKLPardisoScatterSchur_Private(Mat_MKL_PARDISO *mpardiso, PetscScalar *whole, PetscScalar *schur, PetscBool reduce)
372 {
373   PetscFunctionBegin;
374   if (reduce) { /* data given for the whole matrix */
375     PetscInt i,m=0,p=0;
376     for (i=0;i<mpardiso->nrhs;i++) {
377       PetscInt j;
378       for (j=0;j<mpardiso->schur_size;j++) {
379         schur[p+j] = whole[m+mpardiso->schur_idxs[j]];
380       }
381       m += mpardiso->n;
382       p += mpardiso->schur_size;
383     }
384   } else { /* from Schur to whole */
385     PetscInt i,m=0,p=0;
386     for (i=0;i<mpardiso->nrhs;i++) {
387       PetscInt j;
388       for (j=0;j<mpardiso->schur_size;j++) {
389         whole[m+mpardiso->schur_idxs[j]] = schur[p+j];
390       }
391       m += mpardiso->n;
392       p += mpardiso->schur_size;
393     }
394   }
395   PetscFunctionReturn(0);
396 }
397 
398 PetscErrorCode MatSolve_MKL_PARDISO(Mat A,Vec b,Vec x)
399 {
400   Mat_MKL_PARDISO   *mat_mkl_pardiso=(Mat_MKL_PARDISO*)A->data;
401   PetscScalar       *xarray;
402   const PetscScalar *barray;
403 
404   PetscFunctionBegin;
405   mat_mkl_pardiso->nrhs = 1;
406   PetscCall(VecGetArrayWrite(x,&xarray));
407   PetscCall(VecGetArrayRead(b,&barray));
408 
409   if (!mat_mkl_pardiso->schur) mat_mkl_pardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT;
410   else mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION;
411 
412   if (barray == xarray) { /* if the two vectors share the same memory */
413     PetscScalar *work;
414     if (!mat_mkl_pardiso->schur_work) {
415       PetscCall(PetscMalloc1(mat_mkl_pardiso->n,&work));
416     } else {
417       work = mat_mkl_pardiso->schur_work;
418     }
419     mat_mkl_pardiso->iparm[6-1] = 1;
420     MKL_PARDISO (mat_mkl_pardiso->pt,
421       &mat_mkl_pardiso->maxfct,
422       &mat_mkl_pardiso->mnum,
423       &mat_mkl_pardiso->mtype,
424       &mat_mkl_pardiso->phase,
425       &mat_mkl_pardiso->n,
426       mat_mkl_pardiso->a,
427       mat_mkl_pardiso->ia,
428       mat_mkl_pardiso->ja,
429       NULL,
430       &mat_mkl_pardiso->nrhs,
431       mat_mkl_pardiso->iparm,
432       &mat_mkl_pardiso->msglvl,
433       (void*)xarray,
434       (void*)work,
435       &mat_mkl_pardiso->err);
436     if (!mat_mkl_pardiso->schur_work) {
437       PetscCall(PetscFree(work));
438     }
439   } else {
440     mat_mkl_pardiso->iparm[6-1] = 0;
441     MKL_PARDISO (mat_mkl_pardiso->pt,
442       &mat_mkl_pardiso->maxfct,
443       &mat_mkl_pardiso->mnum,
444       &mat_mkl_pardiso->mtype,
445       &mat_mkl_pardiso->phase,
446       &mat_mkl_pardiso->n,
447       mat_mkl_pardiso->a,
448       mat_mkl_pardiso->ia,
449       mat_mkl_pardiso->ja,
450       mat_mkl_pardiso->perm,
451       &mat_mkl_pardiso->nrhs,
452       mat_mkl_pardiso->iparm,
453       &mat_mkl_pardiso->msglvl,
454       (void*)barray,
455       (void*)xarray,
456       &mat_mkl_pardiso->err);
457   }
458   PetscCall(VecRestoreArrayRead(b,&barray));
459 
460   PetscCheckFalse(mat_mkl_pardiso->err < 0,PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);
461 
462   if (mat_mkl_pardiso->schur) { /* solve Schur complement and expand solution */
463     if (!mat_mkl_pardiso->solve_interior) {
464       PetscInt shift = mat_mkl_pardiso->schur_size;
465 
466       PetscCall(MatFactorFactorizeSchurComplement(A));
467       /* if inverted, uses BLAS *MM subroutines, otherwise LAPACK *TRS */
468       if (A->schur_status != MAT_FACTOR_SCHUR_INVERTED) shift = 0;
469 
470       /* solve Schur complement */
471       PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso,xarray,mat_mkl_pardiso->schur_work,PETSC_TRUE));
472       PetscCall(MatMKLPardisoSolveSchur_Private(A,mat_mkl_pardiso->schur_work,mat_mkl_pardiso->schur_work+shift));
473       PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso,xarray,mat_mkl_pardiso->schur_work+shift,PETSC_FALSE));
474     } else { /* if we are solving for the interior problem, any value in barray[schur] forward-substituted to xarray[schur] will be neglected */
475       PetscInt i;
476       for (i=0;i<mat_mkl_pardiso->schur_size;i++) {
477         xarray[mat_mkl_pardiso->schur_idxs[i]] = 0.;
478       }
479     }
480 
481     /* expansion phase */
482     mat_mkl_pardiso->iparm[6-1] = 1;
483     mat_mkl_pardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION;
484     MKL_PARDISO (mat_mkl_pardiso->pt,
485       &mat_mkl_pardiso->maxfct,
486       &mat_mkl_pardiso->mnum,
487       &mat_mkl_pardiso->mtype,
488       &mat_mkl_pardiso->phase,
489       &mat_mkl_pardiso->n,
490       mat_mkl_pardiso->a,
491       mat_mkl_pardiso->ia,
492       mat_mkl_pardiso->ja,
493       mat_mkl_pardiso->perm,
494       &mat_mkl_pardiso->nrhs,
495       mat_mkl_pardiso->iparm,
496       &mat_mkl_pardiso->msglvl,
497       (void*)xarray,
498       (void*)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */
499       &mat_mkl_pardiso->err);
500 
501     PetscCheckFalse(mat_mkl_pardiso->err < 0,PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);
502     mat_mkl_pardiso->iparm[6-1] = 0;
503   }
504   PetscCall(VecRestoreArrayWrite(x,&xarray));
505   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
506   PetscFunctionReturn(0);
507 }
508 
509 PetscErrorCode MatSolveTranspose_MKL_PARDISO(Mat A,Vec b,Vec x)
510 {
511   Mat_MKL_PARDISO *mat_mkl_pardiso=(Mat_MKL_PARDISO*)A->data;
512   PetscInt        oiparm12;
513 
514   PetscFunctionBegin;
515   oiparm12 = mat_mkl_pardiso->iparm[12 - 1];
516   mat_mkl_pardiso->iparm[12 - 1] = 2;
517   PetscCall(MatSolve_MKL_PARDISO(A,b,x));
518   mat_mkl_pardiso->iparm[12 - 1] = oiparm12;
519   PetscFunctionReturn(0);
520 }
521 
522 PetscErrorCode MatMatSolve_MKL_PARDISO(Mat A,Mat B,Mat X)
523 {
524   Mat_MKL_PARDISO   *mat_mkl_pardiso=(Mat_MKL_PARDISO*)(A)->data;
525   const PetscScalar *barray;
526   PetscScalar       *xarray;
527   PetscBool         flg;
528 
529   PetscFunctionBegin;
530   PetscCall(PetscObjectBaseTypeCompare((PetscObject)B,MATSEQDENSE,&flg));
531   PetscCheck(flg,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATSEQDENSE matrix");
532   if (X != B) {
533     PetscCall(PetscObjectBaseTypeCompare((PetscObject)X,MATSEQDENSE,&flg));
534     PetscCheck(flg,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATSEQDENSE matrix");
535   }
536 
537   PetscCall(MatGetSize(B,NULL,(PetscInt*)&mat_mkl_pardiso->nrhs));
538 
539   if (mat_mkl_pardiso->nrhs > 0) {
540     PetscCall(MatDenseGetArrayRead(B,&barray));
541     PetscCall(MatDenseGetArrayWrite(X,&xarray));
542 
543     PetscCheckFalse(barray == xarray,PETSC_COMM_SELF,PETSC_ERR_SUP,"B and X cannot share the same memory location");
544     if (!mat_mkl_pardiso->schur) mat_mkl_pardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT;
545     else mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION;
546 
547     MKL_PARDISO (mat_mkl_pardiso->pt,
548       &mat_mkl_pardiso->maxfct,
549       &mat_mkl_pardiso->mnum,
550       &mat_mkl_pardiso->mtype,
551       &mat_mkl_pardiso->phase,
552       &mat_mkl_pardiso->n,
553       mat_mkl_pardiso->a,
554       mat_mkl_pardiso->ia,
555       mat_mkl_pardiso->ja,
556       mat_mkl_pardiso->perm,
557       &mat_mkl_pardiso->nrhs,
558       mat_mkl_pardiso->iparm,
559       &mat_mkl_pardiso->msglvl,
560       (void*)barray,
561       (void*)xarray,
562       &mat_mkl_pardiso->err);
563     PetscCheckFalse(mat_mkl_pardiso->err < 0,PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);
564 
565     PetscCall(MatDenseRestoreArrayRead(B,&barray));
566     if (mat_mkl_pardiso->schur) { /* solve Schur complement and expand solution */
567       PetscScalar *o_schur_work = NULL;
568 
569       /* solve Schur complement */
570       if (!mat_mkl_pardiso->solve_interior) {
571         PetscInt shift = mat_mkl_pardiso->schur_size*mat_mkl_pardiso->nrhs,scale;
572         PetscInt mem = mat_mkl_pardiso->n*mat_mkl_pardiso->nrhs;
573 
574         PetscCall(MatFactorFactorizeSchurComplement(A));
575         /* allocate extra memory if it is needed */
576         scale = 1;
577         if (A->schur_status == MAT_FACTOR_SCHUR_INVERTED) scale = 2;
578         mem *= scale;
579         if (mem > mat_mkl_pardiso->schur_work_size) {
580           o_schur_work = mat_mkl_pardiso->schur_work;
581           PetscCall(PetscMalloc1(mem,&mat_mkl_pardiso->schur_work));
582         }
583         /* if inverted, uses BLAS *MM subroutines, otherwise LAPACK *TRS */
584         if (A->schur_status != MAT_FACTOR_SCHUR_INVERTED) shift = 0;
585         PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso,xarray,mat_mkl_pardiso->schur_work,PETSC_TRUE));
586         PetscCall(MatMKLPardisoSolveSchur_Private(A,mat_mkl_pardiso->schur_work,mat_mkl_pardiso->schur_work+shift));
587         PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso,xarray,mat_mkl_pardiso->schur_work+shift,PETSC_FALSE));
588       } else { /* if we are solving for the interior problem, any value in barray[schur,n] forward-substituted to xarray[schur,n] will be neglected */
589         PetscInt i,n,m=0;
590         for (n=0;n<mat_mkl_pardiso->nrhs;n++) {
591           for (i=0;i<mat_mkl_pardiso->schur_size;i++) {
592             xarray[mat_mkl_pardiso->schur_idxs[i]+m] = 0.;
593           }
594           m += mat_mkl_pardiso->n;
595         }
596       }
597 
598       /* expansion phase */
599       mat_mkl_pardiso->iparm[6-1] = 1;
600       mat_mkl_pardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION;
601       MKL_PARDISO (mat_mkl_pardiso->pt,
602         &mat_mkl_pardiso->maxfct,
603         &mat_mkl_pardiso->mnum,
604         &mat_mkl_pardiso->mtype,
605         &mat_mkl_pardiso->phase,
606         &mat_mkl_pardiso->n,
607         mat_mkl_pardiso->a,
608         mat_mkl_pardiso->ia,
609         mat_mkl_pardiso->ja,
610         mat_mkl_pardiso->perm,
611         &mat_mkl_pardiso->nrhs,
612         mat_mkl_pardiso->iparm,
613         &mat_mkl_pardiso->msglvl,
614         (void*)xarray,
615         (void*)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */
616         &mat_mkl_pardiso->err);
617       if (o_schur_work) { /* restore original schur_work (minimal size) */
618         PetscCall(PetscFree(mat_mkl_pardiso->schur_work));
619         mat_mkl_pardiso->schur_work = o_schur_work;
620       }
621       PetscCheckFalse(mat_mkl_pardiso->err < 0,PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);
622       mat_mkl_pardiso->iparm[6-1] = 0;
623     }
624     PetscCall(MatDenseRestoreArrayWrite(X,&xarray));
625   }
626   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
627   PetscFunctionReturn(0);
628 }
629 
630 PetscErrorCode MatFactorNumeric_MKL_PARDISO(Mat F,Mat A,const MatFactorInfo *info)
631 {
632   Mat_MKL_PARDISO *mat_mkl_pardiso=(Mat_MKL_PARDISO*)(F)->data;
633 
634   PetscFunctionBegin;
635   mat_mkl_pardiso->matstruc = SAME_NONZERO_PATTERN;
636   PetscCall((*mat_mkl_pardiso->Convert)(A,mat_mkl_pardiso->needsym,MAT_REUSE_MATRIX,&mat_mkl_pardiso->freeaij,&mat_mkl_pardiso->nz,&mat_mkl_pardiso->ia,&mat_mkl_pardiso->ja,(PetscScalar**)&mat_mkl_pardiso->a));
637 
638   mat_mkl_pardiso->phase = JOB_NUMERICAL_FACTORIZATION;
639   MKL_PARDISO (mat_mkl_pardiso->pt,
640     &mat_mkl_pardiso->maxfct,
641     &mat_mkl_pardiso->mnum,
642     &mat_mkl_pardiso->mtype,
643     &mat_mkl_pardiso->phase,
644     &mat_mkl_pardiso->n,
645     mat_mkl_pardiso->a,
646     mat_mkl_pardiso->ia,
647     mat_mkl_pardiso->ja,
648     mat_mkl_pardiso->perm,
649     &mat_mkl_pardiso->nrhs,
650     mat_mkl_pardiso->iparm,
651     &mat_mkl_pardiso->msglvl,
652     NULL,
653     (void*)mat_mkl_pardiso->schur,
654     &mat_mkl_pardiso->err);
655   PetscCheckFalse(mat_mkl_pardiso->err < 0,PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);
656 
657   /* report flops */
658   if (mat_mkl_pardiso->iparm[18] > 0) {
659     PetscCall(PetscLogFlops(PetscPowRealInt(10.,6)*mat_mkl_pardiso->iparm[18]));
660   }
661 
662   if (F->schur) { /* schur output from pardiso is in row major format */
663 #if defined(PETSC_HAVE_CUDA)
664     F->schur->offloadmask = PETSC_OFFLOAD_CPU;
665 #endif
666     PetscCall(MatFactorRestoreSchurComplement(F,NULL,MAT_FACTOR_SCHUR_UNFACTORED));
667     PetscCall(MatTranspose(F->schur,MAT_INPLACE_MATRIX,&F->schur));
668   }
669   mat_mkl_pardiso->matstruc = SAME_NONZERO_PATTERN;
670   mat_mkl_pardiso->CleanUp  = PETSC_TRUE;
671   PetscFunctionReturn(0);
672 }
673 
674 PetscErrorCode PetscSetMKL_PARDISOFromOptions(Mat F, Mat A)
675 {
676   Mat_MKL_PARDISO     *mat_mkl_pardiso = (Mat_MKL_PARDISO*)F->data;
677   PetscErrorCode      ierr;
678   PetscInt            icntl,bs,threads=1;
679   PetscBool           flg;
680 
681   PetscFunctionBegin;
682   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MKL_PARDISO Options","Mat");PetscCall(ierr);
683 
684   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_65","Suggested number of threads to use within PARDISO","None",threads,&threads,&flg));
685   if (flg) PetscSetMKL_PARDISOThreads((int)threads);
686 
687   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_66","Maximum number of factors with identical sparsity structure that must be kept in memory at the same time","None",mat_mkl_pardiso->maxfct,&icntl,&flg));
688   if (flg) mat_mkl_pardiso->maxfct = icntl;
689 
690   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_67","Indicates the actual matrix for the solution phase","None",mat_mkl_pardiso->mnum,&icntl,&flg));
691   if (flg) mat_mkl_pardiso->mnum = icntl;
692 
693   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_68","Message level information","None",mat_mkl_pardiso->msglvl,&icntl,&flg));
694   if (flg) mat_mkl_pardiso->msglvl = icntl;
695 
696   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_69","Defines the matrix type","None",mat_mkl_pardiso->mtype,&icntl,&flg));
697   if (flg) {
698     void *pt[IPARM_SIZE];
699     mat_mkl_pardiso->mtype = icntl;
700     icntl = mat_mkl_pardiso->iparm[34];
701     bs = mat_mkl_pardiso->iparm[36];
702     MKL_PARDISO_INIT(pt, &mat_mkl_pardiso->mtype, mat_mkl_pardiso->iparm);
703 #if defined(PETSC_USE_REAL_SINGLE)
704     mat_mkl_pardiso->iparm[27] = 1;
705 #else
706     mat_mkl_pardiso->iparm[27] = 0;
707 #endif
708     mat_mkl_pardiso->iparm[34] = icntl;
709     mat_mkl_pardiso->iparm[36] = bs;
710   }
711 
712   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_1","Use default values (if 0)","None",mat_mkl_pardiso->iparm[0],&icntl,&flg));
713   if (flg) mat_mkl_pardiso->iparm[0] = icntl;
714 
715   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_2","Fill-in reducing ordering for the input matrix","None",mat_mkl_pardiso->iparm[1],&icntl,&flg));
716   if (flg) mat_mkl_pardiso->iparm[1] = icntl;
717 
718   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_4","Preconditioned CGS/CG","None",mat_mkl_pardiso->iparm[3],&icntl,&flg));
719   if (flg) mat_mkl_pardiso->iparm[3] = icntl;
720 
721   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_5","User permutation","None",mat_mkl_pardiso->iparm[4],&icntl,&flg));
722   if (flg) mat_mkl_pardiso->iparm[4] = icntl;
723 
724   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_6","Write solution on x","None",mat_mkl_pardiso->iparm[5],&icntl,&flg));
725   if (flg) mat_mkl_pardiso->iparm[5] = icntl;
726 
727   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_8","Iterative refinement step","None",mat_mkl_pardiso->iparm[7],&icntl,&flg));
728   if (flg) mat_mkl_pardiso->iparm[7] = icntl;
729 
730   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_10","Pivoting perturbation","None",mat_mkl_pardiso->iparm[9],&icntl,&flg));
731   if (flg) mat_mkl_pardiso->iparm[9] = icntl;
732 
733   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_11","Scaling vectors","None",mat_mkl_pardiso->iparm[10],&icntl,&flg));
734   if (flg) mat_mkl_pardiso->iparm[10] = icntl;
735 
736   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_12","Solve with transposed or conjugate transposed matrix A","None",mat_mkl_pardiso->iparm[11],&icntl,&flg));
737   if (flg) mat_mkl_pardiso->iparm[11] = icntl;
738 
739   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_13","Improved accuracy using (non-) symmetric weighted matching","None",mat_mkl_pardiso->iparm[12],&icntl,&flg));
740   if (flg) mat_mkl_pardiso->iparm[12] = icntl;
741 
742   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_18","Numbers of non-zero elements","None",mat_mkl_pardiso->iparm[17],&icntl,&flg));
743   if (flg) mat_mkl_pardiso->iparm[17] = icntl;
744 
745   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_19","Report number of floating point operations (0 to disable)","None",mat_mkl_pardiso->iparm[18],&icntl,&flg));
746   if (flg) mat_mkl_pardiso->iparm[18] = icntl;
747 
748   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_21","Pivoting for symmetric indefinite matrices","None",mat_mkl_pardiso->iparm[20],&icntl,&flg));
749   if (flg) mat_mkl_pardiso->iparm[20] = icntl;
750 
751   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_24","Parallel factorization control","None",mat_mkl_pardiso->iparm[23],&icntl,&flg));
752   if (flg) mat_mkl_pardiso->iparm[23] = icntl;
753 
754   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_25","Parallel forward/backward solve control","None",mat_mkl_pardiso->iparm[24],&icntl,&flg));
755   if (flg) mat_mkl_pardiso->iparm[24] = icntl;
756 
757   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_27","Matrix checker","None",mat_mkl_pardiso->iparm[26],&icntl,&flg));
758   if (flg) mat_mkl_pardiso->iparm[26] = icntl;
759 
760   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_31","Partial solve and computing selected components of the solution vectors","None",mat_mkl_pardiso->iparm[30],&icntl,&flg));
761   if (flg) mat_mkl_pardiso->iparm[30] = icntl;
762 
763   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_34","Optimal number of threads for conditional numerical reproducibility (CNR) mode","None",mat_mkl_pardiso->iparm[33],&icntl,&flg));
764   if (flg) mat_mkl_pardiso->iparm[33] = icntl;
765 
766   PetscCall(PetscOptionsInt("-mat_mkl_pardiso_60","Intel MKL_PARDISO mode","None",mat_mkl_pardiso->iparm[59],&icntl,&flg));
767   if (flg) mat_mkl_pardiso->iparm[59] = icntl;
768   PetscOptionsEnd();
769   PetscFunctionReturn(0);
770 }
771 
772 PetscErrorCode MatFactorMKL_PARDISOInitialize_Private(Mat A, MatFactorType ftype, Mat_MKL_PARDISO *mat_mkl_pardiso)
773 {
774   PetscInt       i,bs;
775   PetscBool      match;
776 
777   PetscFunctionBegin;
778   for (i=0; i<IPARM_SIZE; i++) mat_mkl_pardiso->iparm[i] = 0;
779   for (i=0; i<IPARM_SIZE; i++) mat_mkl_pardiso->pt[i] = 0;
780 #if defined(PETSC_USE_REAL_SINGLE)
781   mat_mkl_pardiso->iparm[27] = 1;
782 #else
783   mat_mkl_pardiso->iparm[27] = 0;
784 #endif
785   /* Default options for both sym and unsym */
786   mat_mkl_pardiso->iparm[ 0] =  1; /* Solver default parameters overriden with provided by iparm */
787   mat_mkl_pardiso->iparm[ 1] =  2; /* Metis reordering */
788   mat_mkl_pardiso->iparm[ 5] =  0; /* Write solution into x */
789   mat_mkl_pardiso->iparm[ 7] =  0; /* Max number of iterative refinement steps */
790   mat_mkl_pardiso->iparm[17] = -1; /* Output: Number of nonzeros in the factor LU */
791   mat_mkl_pardiso->iparm[18] = -1; /* Output: Mflops for LU factorization */
792 #if 0
793   mat_mkl_pardiso->iparm[23] =  1; /* Parallel factorization control*/
794 #endif
795   PetscCall(PetscObjectTypeCompareAny((PetscObject)A,&match,MATSEQBAIJ,MATSEQSBAIJ,""));
796   PetscCall(MatGetBlockSize(A,&bs));
797   if (!match || bs == 1) {
798     mat_mkl_pardiso->iparm[34] = 1; /* Cluster Sparse Solver use C-style indexing for ia and ja arrays */
799     mat_mkl_pardiso->n         = A->rmap->N;
800   } else {
801     mat_mkl_pardiso->iparm[34] = 0; /* Cluster Sparse Solver use Fortran-style indexing for ia and ja arrays */
802     mat_mkl_pardiso->iparm[36] = bs;
803     mat_mkl_pardiso->n         = A->rmap->N/bs;
804   }
805   mat_mkl_pardiso->iparm[39] =  0; /* Input: matrix/rhs/solution stored on rank-0 */
806 
807   mat_mkl_pardiso->CleanUp   = PETSC_FALSE;
808   mat_mkl_pardiso->maxfct    = 1; /* Maximum number of numerical factorizations. */
809   mat_mkl_pardiso->mnum      = 1; /* Which factorization to use. */
810   mat_mkl_pardiso->msglvl    = 0; /* 0: do not print 1: Print statistical information in file */
811   mat_mkl_pardiso->phase     = -1;
812   mat_mkl_pardiso->err       = 0;
813 
814   mat_mkl_pardiso->nrhs      = 1;
815   mat_mkl_pardiso->err       = 0;
816   mat_mkl_pardiso->phase     = -1;
817 
818   if (ftype == MAT_FACTOR_LU) {
819     mat_mkl_pardiso->iparm[ 9] = 13; /* Perturb the pivot elements with 1E-13 */
820     mat_mkl_pardiso->iparm[10] =  1; /* Use nonsymmetric permutation and scaling MPS */
821     mat_mkl_pardiso->iparm[12] =  1; /* Switch on Maximum Weighted Matching algorithm (default for non-symmetric) */
822   } else {
823     mat_mkl_pardiso->iparm[ 9] = 8; /* Perturb the pivot elements with 1E-8 */
824     mat_mkl_pardiso->iparm[10] = 0; /* Use nonsymmetric permutation and scaling MPS */
825     mat_mkl_pardiso->iparm[12] = 1; /* Switch on Maximum Weighted Matching algorithm (default for non-symmetric) */
826 #if defined(PETSC_USE_DEBUG)
827     mat_mkl_pardiso->iparm[26] = 1; /* Matrix checker */
828 #endif
829   }
830   PetscCall(PetscCalloc1(A->rmap->N*sizeof(INT_TYPE), &mat_mkl_pardiso->perm));
831   mat_mkl_pardiso->schur_size = 0;
832   PetscFunctionReturn(0);
833 }
834 
835 PetscErrorCode MatFactorSymbolic_AIJMKL_PARDISO_Private(Mat F,Mat A,const MatFactorInfo *info)
836 {
837   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO*)F->data;
838 
839   PetscFunctionBegin;
840   mat_mkl_pardiso->matstruc = DIFFERENT_NONZERO_PATTERN;
841   PetscCall(PetscSetMKL_PARDISOFromOptions(F,A));
842   /* throw away any previously computed structure */
843   if (mat_mkl_pardiso->freeaij) {
844     PetscCall(PetscFree2(mat_mkl_pardiso->ia,mat_mkl_pardiso->ja));
845     if (mat_mkl_pardiso->iparm[34] == 1) {
846       PetscCall(PetscFree(mat_mkl_pardiso->a));
847     }
848   }
849   PetscCall((*mat_mkl_pardiso->Convert)(A,mat_mkl_pardiso->needsym,MAT_INITIAL_MATRIX,&mat_mkl_pardiso->freeaij,&mat_mkl_pardiso->nz,&mat_mkl_pardiso->ia,&mat_mkl_pardiso->ja,(PetscScalar**)&mat_mkl_pardiso->a));
850   if (mat_mkl_pardiso->iparm[34] == 1) mat_mkl_pardiso->n = A->rmap->N;
851   else mat_mkl_pardiso->n = A->rmap->N/A->rmap->bs;
852 
853   mat_mkl_pardiso->phase = JOB_ANALYSIS;
854 
855   /* reset flops counting if requested */
856   if (mat_mkl_pardiso->iparm[18]) mat_mkl_pardiso->iparm[18] = -1;
857 
858   MKL_PARDISO (mat_mkl_pardiso->pt,
859     &mat_mkl_pardiso->maxfct,
860     &mat_mkl_pardiso->mnum,
861     &mat_mkl_pardiso->mtype,
862     &mat_mkl_pardiso->phase,
863     &mat_mkl_pardiso->n,
864     mat_mkl_pardiso->a,
865     mat_mkl_pardiso->ia,
866     mat_mkl_pardiso->ja,
867     mat_mkl_pardiso->perm,
868     &mat_mkl_pardiso->nrhs,
869     mat_mkl_pardiso->iparm,
870     &mat_mkl_pardiso->msglvl,
871     NULL,
872     NULL,
873     &mat_mkl_pardiso->err);
874   PetscCheckFalse(mat_mkl_pardiso->err < 0,PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);
875 
876   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
877 
878   if (F->factortype == MAT_FACTOR_LU) F->ops->lufactornumeric = MatFactorNumeric_MKL_PARDISO;
879   else F->ops->choleskyfactornumeric = MatFactorNumeric_MKL_PARDISO;
880 
881   F->ops->solve           = MatSolve_MKL_PARDISO;
882   F->ops->solvetranspose  = MatSolveTranspose_MKL_PARDISO;
883   F->ops->matsolve        = MatMatSolve_MKL_PARDISO;
884   PetscFunctionReturn(0);
885 }
886 
887 PetscErrorCode MatLUFactorSymbolic_AIJMKL_PARDISO(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
888 {
889   PetscFunctionBegin;
890   PetscCall(MatFactorSymbolic_AIJMKL_PARDISO_Private(F, A, info));
891   PetscFunctionReturn(0);
892 }
893 
894 #if !defined(PETSC_USE_COMPLEX)
895 PetscErrorCode MatGetInertia_MKL_PARDISO(Mat F,PetscInt *nneg,PetscInt *nzero,PetscInt *npos)
896 {
897   Mat_MKL_PARDISO   *mat_mkl_pardiso=(Mat_MKL_PARDISO*)F->data;
898 
899   PetscFunctionBegin;
900   if (nneg) *nneg = mat_mkl_pardiso->iparm[22];
901   if (npos) *npos = mat_mkl_pardiso->iparm[21];
902   if (nzero) *nzero = F->rmap->N - (mat_mkl_pardiso->iparm[22] + mat_mkl_pardiso->iparm[21]);
903   PetscFunctionReturn(0);
904 }
905 #endif
906 
907 PetscErrorCode MatCholeskyFactorSymbolic_AIJMKL_PARDISO(Mat F,Mat A,IS r,const MatFactorInfo *info)
908 {
909   PetscFunctionBegin;
910   PetscCall(MatFactorSymbolic_AIJMKL_PARDISO_Private(F, A, info));
911 #if defined(PETSC_USE_COMPLEX)
912   F->ops->getinertia = NULL;
913 #else
914   F->ops->getinertia = MatGetInertia_MKL_PARDISO;
915 #endif
916   PetscFunctionReturn(0);
917 }
918 
919 PetscErrorCode MatView_MKL_PARDISO(Mat A, PetscViewer viewer)
920 {
921   PetscBool         iascii;
922   PetscViewerFormat format;
923   Mat_MKL_PARDISO   *mat_mkl_pardiso=(Mat_MKL_PARDISO*)A->data;
924   PetscInt          i;
925 
926   PetscFunctionBegin;
927   if (A->ops->solve != MatSolve_MKL_PARDISO) PetscFunctionReturn(0);
928 
929   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
930   if (iascii) {
931     PetscCall(PetscViewerGetFormat(viewer,&format));
932     if (format == PETSC_VIEWER_ASCII_INFO) {
933       PetscCall(PetscViewerASCIIPrintf(viewer,"MKL_PARDISO run parameters:\n"));
934       PetscCall(PetscViewerASCIIPrintf(viewer,"MKL_PARDISO phase:             %d \n",mat_mkl_pardiso->phase));
935       for (i=1; i<=64; i++) {
936         PetscCall(PetscViewerASCIIPrintf(viewer,"MKL_PARDISO iparm[%d]:     %d \n",i, mat_mkl_pardiso->iparm[i - 1]));
937       }
938       PetscCall(PetscViewerASCIIPrintf(viewer,"MKL_PARDISO maxfct:     %d \n", mat_mkl_pardiso->maxfct));
939       PetscCall(PetscViewerASCIIPrintf(viewer,"MKL_PARDISO mnum:     %d \n", mat_mkl_pardiso->mnum));
940       PetscCall(PetscViewerASCIIPrintf(viewer,"MKL_PARDISO mtype:     %d \n", mat_mkl_pardiso->mtype));
941       PetscCall(PetscViewerASCIIPrintf(viewer,"MKL_PARDISO n:     %d \n", mat_mkl_pardiso->n));
942       PetscCall(PetscViewerASCIIPrintf(viewer,"MKL_PARDISO nrhs:     %d \n", mat_mkl_pardiso->nrhs));
943       PetscCall(PetscViewerASCIIPrintf(viewer,"MKL_PARDISO msglvl:     %d \n", mat_mkl_pardiso->msglvl));
944     }
945   }
946   PetscFunctionReturn(0);
947 }
948 
949 PetscErrorCode MatGetInfo_MKL_PARDISO(Mat A, MatInfoType flag, MatInfo *info)
950 {
951   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO*)A->data;
952 
953   PetscFunctionBegin;
954   info->block_size        = 1.0;
955   info->nz_used           = mat_mkl_pardiso->iparm[17];
956   info->nz_allocated      = mat_mkl_pardiso->iparm[17];
957   info->nz_unneeded       = 0.0;
958   info->assemblies        = 0.0;
959   info->mallocs           = 0.0;
960   info->memory            = 0.0;
961   info->fill_ratio_given  = 0;
962   info->fill_ratio_needed = 0;
963   info->factor_mallocs    = 0;
964   PetscFunctionReturn(0);
965 }
966 
967 PetscErrorCode MatMkl_PardisoSetCntl_MKL_PARDISO(Mat F,PetscInt icntl,PetscInt ival)
968 {
969   PetscInt        backup,bs;
970   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO*)F->data;
971 
972   PetscFunctionBegin;
973   if (icntl <= 64) {
974     mat_mkl_pardiso->iparm[icntl - 1] = ival;
975   } else {
976     if (icntl == 65) PetscSetMKL_PARDISOThreads(ival);
977     else if (icntl == 66) mat_mkl_pardiso->maxfct = ival;
978     else if (icntl == 67) mat_mkl_pardiso->mnum = ival;
979     else if (icntl == 68) mat_mkl_pardiso->msglvl = ival;
980     else if (icntl == 69) {
981       void *pt[IPARM_SIZE];
982       backup = mat_mkl_pardiso->iparm[34];
983       bs = mat_mkl_pardiso->iparm[36];
984       mat_mkl_pardiso->mtype = ival;
985       MKL_PARDISO_INIT(pt, &mat_mkl_pardiso->mtype, mat_mkl_pardiso->iparm);
986 #if defined(PETSC_USE_REAL_SINGLE)
987       mat_mkl_pardiso->iparm[27] = 1;
988 #else
989       mat_mkl_pardiso->iparm[27] = 0;
990 #endif
991       mat_mkl_pardiso->iparm[34] = backup;
992       mat_mkl_pardiso->iparm[36] = bs;
993     } else if (icntl==70) mat_mkl_pardiso->solve_interior = (PetscBool)!!ival;
994   }
995   PetscFunctionReturn(0);
996 }
997 
998 /*@
999   MatMkl_PardisoSetCntl - Set Mkl_Pardiso parameters
1000 
1001    Logically Collective on Mat
1002 
1003    Input Parameters:
1004 +  F - the factored matrix obtained by calling MatGetFactor()
1005 .  icntl - index of Mkl_Pardiso parameter
1006 -  ival - value of Mkl_Pardiso parameter
1007 
1008   Options Database:
1009 .   -mat_mkl_pardiso_<icntl> <ival> - change the option numbered icntl to the value ival
1010 
1011    Level: beginner
1012 
1013    References:
1014 .  * - Mkl_Pardiso Users' Guide
1015 
1016 .seealso: MatGetFactor()
1017 @*/
1018 PetscErrorCode MatMkl_PardisoSetCntl(Mat F,PetscInt icntl,PetscInt ival)
1019 {
1020   PetscFunctionBegin;
1021   PetscTryMethod(F,"MatMkl_PardisoSetCntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));
1022   PetscFunctionReturn(0);
1023 }
1024 
1025 /*MC
1026   MATSOLVERMKL_PARDISO -  A matrix type providing direct solvers (LU) for
1027   sequential matrices via the external package MKL_PARDISO.
1028 
1029   Works with MATSEQAIJ matrices
1030 
1031   Use -pc_type lu -pc_factor_mat_solver_type mkl_pardiso to use this direct solver
1032 
1033   Options Database Keys:
1034 + -mat_mkl_pardiso_65 - Suggested number of threads to use within MKL_PARDISO
1035 . -mat_mkl_pardiso_66 - Maximum number of factors with identical sparsity structure that must be kept in memory at the same time
1036 . -mat_mkl_pardiso_67 - Indicates the actual matrix for the solution phase
1037 . -mat_mkl_pardiso_68 - Message level information, use 1 to get detailed information on the solver options
1038 . -mat_mkl_pardiso_69 - Defines the matrix type. IMPORTANT: When you set this flag, iparm parameters are going to be set to the default ones for the matrix type
1039 . -mat_mkl_pardiso_1  - Use default values
1040 . -mat_mkl_pardiso_2  - Fill-in reducing ordering for the input matrix
1041 . -mat_mkl_pardiso_4  - Preconditioned CGS/CG
1042 . -mat_mkl_pardiso_5  - User permutation
1043 . -mat_mkl_pardiso_6  - Write solution on x
1044 . -mat_mkl_pardiso_8  - Iterative refinement step
1045 . -mat_mkl_pardiso_10 - Pivoting perturbation
1046 . -mat_mkl_pardiso_11 - Scaling vectors
1047 . -mat_mkl_pardiso_12 - Solve with transposed or conjugate transposed matrix A
1048 . -mat_mkl_pardiso_13 - Improved accuracy using (non-) symmetric weighted matching
1049 . -mat_mkl_pardiso_18 - Numbers of non-zero elements
1050 . -mat_mkl_pardiso_19 - Report number of floating point operations
1051 . -mat_mkl_pardiso_21 - Pivoting for symmetric indefinite matrices
1052 . -mat_mkl_pardiso_24 - Parallel factorization control
1053 . -mat_mkl_pardiso_25 - Parallel forward/backward solve control
1054 . -mat_mkl_pardiso_27 - Matrix checker
1055 . -mat_mkl_pardiso_31 - Partial solve and computing selected components of the solution vectors
1056 . -mat_mkl_pardiso_34 - Optimal number of threads for conditional numerical reproducibility (CNR) mode
1057 - -mat_mkl_pardiso_60 - Intel MKL_PARDISO mode
1058 
1059   Level: beginner
1060 
1061   Notes:
1062     Use -mat_mkl_pardiso_68 1 to display the number of threads the solver is using. MKL does not provide a way to directly access this
1063     information.
1064 
1065     For more information on the options check the MKL_Pardiso manual
1066 
1067 .seealso: PCFactorSetMatSolverType(), MatSolverType
1068 
1069 M*/
1070 static PetscErrorCode MatFactorGetSolverType_mkl_pardiso(Mat A, MatSolverType *type)
1071 {
1072   PetscFunctionBegin;
1073   *type = MATSOLVERMKL_PARDISO;
1074   PetscFunctionReturn(0);
1075 }
1076 
1077 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mkl_pardiso(Mat A,MatFactorType ftype,Mat *F)
1078 {
1079   Mat             B;
1080   Mat_MKL_PARDISO *mat_mkl_pardiso;
1081   PetscBool       isSeqAIJ,isSeqBAIJ,isSeqSBAIJ;
1082 
1083   PetscFunctionBegin;
1084   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ));
1085   PetscCall(PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ));
1086   PetscCall(PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ));
1087   PetscCall(MatCreate(PetscObjectComm((PetscObject)A),&B));
1088   PetscCall(MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N));
1089   PetscCall(PetscStrallocpy("mkl_pardiso",&((PetscObject)B)->type_name));
1090   PetscCall(MatSetUp(B));
1091 
1092   PetscCall(PetscNewLog(B,&mat_mkl_pardiso));
1093   B->data = mat_mkl_pardiso;
1094 
1095   PetscCall(MatFactorMKL_PARDISOInitialize_Private(A, ftype, mat_mkl_pardiso));
1096   if (ftype == MAT_FACTOR_LU) {
1097     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMKL_PARDISO;
1098     B->factortype            = MAT_FACTOR_LU;
1099     mat_mkl_pardiso->needsym = PETSC_FALSE;
1100     if (isSeqAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqaij;
1101     else if (isSeqBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqbaij;
1102     else PetscCheck(!isSeqSBAIJ,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for PARDISO LU factor with SEQSBAIJ format! Use MAT_FACTOR_CHOLESKY instead");
1103     else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for PARDISO LU with %s format",((PetscObject)A)->type_name);
1104 #if defined(PETSC_USE_COMPLEX)
1105     mat_mkl_pardiso->mtype = 13;
1106 #else
1107     mat_mkl_pardiso->mtype = 11;
1108 #endif
1109   } else {
1110     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_AIJMKL_PARDISO;
1111     B->factortype                  = MAT_FACTOR_CHOLESKY;
1112     if (isSeqAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqaij;
1113     else if (isSeqBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqbaij;
1114     else if (isSeqSBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqsbaij;
1115     else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for PARDISO CHOLESKY with %s format",((PetscObject)A)->type_name);
1116 
1117     mat_mkl_pardiso->needsym = PETSC_TRUE;
1118 #if !defined(PETSC_USE_COMPLEX)
1119     if (A->spd_set && A->spd) mat_mkl_pardiso->mtype = 2;
1120     else                      mat_mkl_pardiso->mtype = -2;
1121 #else
1122     mat_mkl_pardiso->mtype = 6;
1123     PetscCheck(!A->hermitian,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for PARDISO CHOLESKY with Hermitian matrices! Use MAT_FACTOR_LU instead");
1124 #endif
1125   }
1126   B->ops->destroy = MatDestroy_MKL_PARDISO;
1127   B->ops->view    = MatView_MKL_PARDISO;
1128   B->ops->getinfo = MatGetInfo_MKL_PARDISO;
1129   B->factortype   = ftype;
1130   B->assembled    = PETSC_TRUE;
1131 
1132   PetscCall(PetscFree(B->solvertype));
1133   PetscCall(PetscStrallocpy(MATSOLVERMKL_PARDISO,&B->solvertype));
1134 
1135   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_mkl_pardiso));
1136   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MKL_PARDISO));
1137   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMkl_PardisoSetCntl_C",MatMkl_PardisoSetCntl_MKL_PARDISO));
1138 
1139   *F = B;
1140   PetscFunctionReturn(0);
1141 }
1142 
1143 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MKL_Pardiso(void)
1144 {
1145   PetscFunctionBegin;
1146   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mkl_pardiso));
1147   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO,MATSEQAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mkl_pardiso));
1148   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO,MATSEQBAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mkl_pardiso));
1149   PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO,MATSEQSBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mkl_pardiso));
1150   PetscFunctionReturn(0);
1151 }
1152