1 #include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/ 2 #include <../src/mat/impls/sbaij/seq/sbaij.h> 3 #include <../src/mat/impls/dense/seq/dense.h> 4 5 #if defined(PETSC_HAVE_MKL_INTEL_ILP64) 6 #define MKL_ILP64 7 #endif 8 #include <mkl_pardiso.h> 9 10 PETSC_EXTERN void PetscSetMKL_PARDISOThreads(int); 11 12 /* 13 * Possible mkl_pardiso phases that controls the execution of the solver. 14 * For more information check mkl_pardiso manual. 15 */ 16 #define JOB_ANALYSIS 11 17 #define JOB_ANALYSIS_NUMERICAL_FACTORIZATION 12 18 #define JOB_ANALYSIS_NUMERICAL_FACTORIZATION_SOLVE_ITERATIVE_REFINEMENT 13 19 #define JOB_NUMERICAL_FACTORIZATION 22 20 #define JOB_NUMERICAL_FACTORIZATION_SOLVE_ITERATIVE_REFINEMENT 23 21 #define JOB_SOLVE_ITERATIVE_REFINEMENT 33 22 #define JOB_SOLVE_FORWARD_SUBSTITUTION 331 23 #define JOB_SOLVE_DIAGONAL_SUBSTITUTION 332 24 #define JOB_SOLVE_BACKWARD_SUBSTITUTION 333 25 #define JOB_RELEASE_OF_LU_MEMORY 0 26 #define JOB_RELEASE_OF_ALL_MEMORY -1 27 28 #define IPARM_SIZE 64 29 30 #if defined(PETSC_USE_64BIT_INDICES) 31 #if defined(PETSC_HAVE_MKL_INTEL_ILP64) 32 #define INT_TYPE long long int 33 #define MKL_PARDISO pardiso 34 #define MKL_PARDISO_INIT pardisoinit 35 #else 36 /* this is the case where the MKL BLAS/LAPACK are 32 bit integers but the 64 bit integer version of 37 of Pardiso code is used; hence the need for the 64 below*/ 38 #define INT_TYPE long long int 39 #define MKL_PARDISO pardiso_64 40 #define MKL_PARDISO_INIT pardiso_64init 41 void pardiso_64init(void *pt, INT_TYPE *mtype, INT_TYPE iparm[]) { 42 int iparm_copy[IPARM_SIZE], mtype_copy, i; 43 44 mtype_copy = *mtype; 45 pardisoinit(pt, &mtype_copy, iparm_copy); 46 for (i = 0; i < IPARM_SIZE; i++) iparm[i] = iparm_copy[i]; 47 } 48 #endif 49 #else 50 #define INT_TYPE int 51 #define MKL_PARDISO pardiso 52 #define MKL_PARDISO_INIT pardisoinit 53 #endif 54 55 /* 56 * Internal data structure. 57 * For more information check mkl_pardiso manual. 58 */ 59 typedef struct { 60 /* Configuration vector*/ 61 INT_TYPE iparm[IPARM_SIZE]; 62 63 /* 64 * Internal mkl_pardiso memory location. 65 * After the first call to mkl_pardiso do not modify pt, as that could cause a serious memory leak. 66 */ 67 void *pt[IPARM_SIZE]; 68 69 /* Basic mkl_pardiso info*/ 70 INT_TYPE phase, maxfct, mnum, mtype, n, nrhs, msglvl, err; 71 72 /* Matrix structure*/ 73 void *a; 74 INT_TYPE *ia, *ja; 75 76 /* Number of non-zero elements*/ 77 INT_TYPE nz; 78 79 /* Row permutaton vector*/ 80 INT_TYPE *perm; 81 82 /* Define if matrix preserves sparse structure.*/ 83 MatStructure matstruc; 84 85 PetscBool needsym; 86 PetscBool freeaij; 87 88 /* Schur complement */ 89 PetscScalar *schur; 90 PetscInt schur_size; 91 PetscInt *schur_idxs; 92 PetscScalar *schur_work; 93 PetscBLASInt schur_work_size; 94 PetscBool solve_interior; 95 96 /* True if mkl_pardiso function have been used.*/ 97 PetscBool CleanUp; 98 99 /* Conversion to a format suitable for MKL */ 100 PetscErrorCode (*Convert)(Mat, PetscBool, MatReuse, PetscBool *, INT_TYPE *, INT_TYPE **, INT_TYPE **, PetscScalar **); 101 } Mat_MKL_PARDISO; 102 103 PetscErrorCode MatMKLPardiso_Convert_seqsbaij(Mat A, PetscBool sym, MatReuse reuse, PetscBool *free, INT_TYPE *nnz, INT_TYPE **r, INT_TYPE **c, PetscScalar **v) { 104 Mat_SeqSBAIJ *aa = (Mat_SeqSBAIJ *)A->data; 105 PetscInt bs = A->rmap->bs, i; 106 107 PetscFunctionBegin; 108 PetscCheck(sym, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "This should not happen"); 109 *v = aa->a; 110 if (bs == 1) { /* already in the correct format */ 111 /* though PetscInt and INT_TYPE are of the same size since they are defined differently the Intel compiler requires a cast */ 112 *r = (INT_TYPE *)aa->i; 113 *c = (INT_TYPE *)aa->j; 114 *nnz = (INT_TYPE)aa->nz; 115 *free = PETSC_FALSE; 116 } else if (reuse == MAT_INITIAL_MATRIX) { 117 PetscInt m = A->rmap->n, nz = aa->nz; 118 PetscInt *row, *col; 119 PetscCall(PetscMalloc2(m + 1, &row, nz, &col)); 120 for (i = 0; i < m + 1; i++) row[i] = aa->i[i] + 1; 121 for (i = 0; i < nz; i++) col[i] = aa->j[i] + 1; 122 *r = (INT_TYPE *)row; 123 *c = (INT_TYPE *)col; 124 *nnz = (INT_TYPE)nz; 125 *free = PETSC_TRUE; 126 } 127 PetscFunctionReturn(0); 128 } 129 130 PetscErrorCode MatMKLPardiso_Convert_seqbaij(Mat A, PetscBool sym, MatReuse reuse, PetscBool *free, INT_TYPE *nnz, INT_TYPE **r, INT_TYPE **c, PetscScalar **v) { 131 Mat_SeqBAIJ *aa = (Mat_SeqBAIJ *)A->data; 132 PetscInt bs = A->rmap->bs, i; 133 134 PetscFunctionBegin; 135 if (!sym) { 136 *v = aa->a; 137 if (bs == 1) { /* already in the correct format */ 138 /* though PetscInt and INT_TYPE are of the same size since they are defined differently the Intel compiler requires a cast */ 139 *r = (INT_TYPE *)aa->i; 140 *c = (INT_TYPE *)aa->j; 141 *nnz = (INT_TYPE)aa->nz; 142 *free = PETSC_FALSE; 143 PetscFunctionReturn(0); 144 } else if (reuse == MAT_INITIAL_MATRIX) { 145 PetscInt m = A->rmap->n, nz = aa->nz; 146 PetscInt *row, *col; 147 PetscCall(PetscMalloc2(m + 1, &row, nz, &col)); 148 for (i = 0; i < m + 1; i++) row[i] = aa->i[i] + 1; 149 for (i = 0; i < nz; i++) col[i] = aa->j[i] + 1; 150 *r = (INT_TYPE *)row; 151 *c = (INT_TYPE *)col; 152 *nnz = (INT_TYPE)nz; 153 } 154 *free = PETSC_TRUE; 155 } else { 156 SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "This should not happen"); 157 } 158 PetscFunctionReturn(0); 159 } 160 161 PetscErrorCode MatMKLPardiso_Convert_seqaij(Mat A, PetscBool sym, MatReuse reuse, PetscBool *free, INT_TYPE *nnz, INT_TYPE **r, INT_TYPE **c, PetscScalar **v) { 162 Mat_SeqAIJ *aa = (Mat_SeqAIJ *)A->data; 163 PetscScalar *aav; 164 165 PetscFunctionBegin; 166 PetscCall(MatSeqAIJGetArrayRead(A, (const PetscScalar **)&aav)); 167 if (!sym) { /* already in the correct format */ 168 *v = aav; 169 *r = (INT_TYPE *)aa->i; 170 *c = (INT_TYPE *)aa->j; 171 *nnz = (INT_TYPE)aa->nz; 172 *free = PETSC_FALSE; 173 } else if (reuse == MAT_INITIAL_MATRIX) { /* need to get the triangular part */ 174 PetscScalar *vals, *vv; 175 PetscInt *row, *col, *jj; 176 PetscInt m = A->rmap->n, nz, i; 177 178 nz = 0; 179 for (i = 0; i < m; i++) nz += aa->i[i + 1] - aa->diag[i]; 180 PetscCall(PetscMalloc2(m + 1, &row, nz, &col)); 181 PetscCall(PetscMalloc1(nz, &vals)); 182 jj = col; 183 vv = vals; 184 185 row[0] = 0; 186 for (i = 0; i < m; i++) { 187 PetscInt *aj = aa->j + aa->diag[i]; 188 PetscScalar *av = aav + aa->diag[i]; 189 PetscInt rl = aa->i[i + 1] - aa->diag[i], j; 190 191 for (j = 0; j < rl; j++) { 192 *jj = *aj; 193 jj++; 194 aj++; 195 *vv = *av; 196 vv++; 197 av++; 198 } 199 row[i + 1] = row[i] + rl; 200 } 201 *v = vals; 202 *r = (INT_TYPE *)row; 203 *c = (INT_TYPE *)col; 204 *nnz = (INT_TYPE)nz; 205 *free = PETSC_TRUE; 206 } else { 207 PetscScalar *vv; 208 PetscInt m = A->rmap->n, i; 209 210 vv = *v; 211 for (i = 0; i < m; i++) { 212 PetscScalar *av = aav + aa->diag[i]; 213 PetscInt rl = aa->i[i + 1] - aa->diag[i], j; 214 for (j = 0; j < rl; j++) { 215 *vv = *av; 216 vv++; 217 av++; 218 } 219 } 220 *free = PETSC_TRUE; 221 } 222 PetscCall(MatSeqAIJRestoreArrayRead(A, (const PetscScalar **)&aav)); 223 PetscFunctionReturn(0); 224 } 225 226 static PetscErrorCode MatMKLPardisoSolveSchur_Private(Mat F, PetscScalar *B, PetscScalar *X) { 227 Mat_MKL_PARDISO *mpardiso = (Mat_MKL_PARDISO *)F->data; 228 Mat S, Xmat, Bmat; 229 MatFactorSchurStatus schurstatus; 230 231 PetscFunctionBegin; 232 PetscCall(MatFactorGetSchurComplement(F, &S, &schurstatus)); 233 PetscCheck(X != B || schurstatus != MAT_FACTOR_SCHUR_INVERTED, PETSC_COMM_SELF, PETSC_ERR_SUP, "X and B cannot point to the same address"); 234 PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, mpardiso->schur_size, mpardiso->nrhs, B, &Bmat)); 235 PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, mpardiso->schur_size, mpardiso->nrhs, X, &Xmat)); 236 PetscCall(MatSetType(Bmat, ((PetscObject)S)->type_name)); 237 PetscCall(MatSetType(Xmat, ((PetscObject)S)->type_name)); 238 #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) 239 PetscCall(MatBindToCPU(Xmat, S->boundtocpu)); 240 PetscCall(MatBindToCPU(Bmat, S->boundtocpu)); 241 #endif 242 243 #if defined(PETSC_USE_COMPLEX) 244 PetscCheck(mpardiso->iparm[12 - 1] != 1, PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Hermitian solve not implemented yet"); 245 #endif 246 247 switch (schurstatus) { 248 case MAT_FACTOR_SCHUR_FACTORED: 249 if (!mpardiso->iparm[12 - 1]) { 250 PetscCall(MatMatSolve(S, Bmat, Xmat)); 251 } else { /* transpose solve */ 252 PetscCall(MatMatSolveTranspose(S, Bmat, Xmat)); 253 } 254 break; 255 case MAT_FACTOR_SCHUR_INVERTED: 256 PetscCall(MatProductCreateWithMat(S, Bmat, NULL, Xmat)); 257 if (!mpardiso->iparm[12 - 1]) { 258 PetscCall(MatProductSetType(Xmat, MATPRODUCT_AB)); 259 } else { /* transpose solve */ 260 PetscCall(MatProductSetType(Xmat, MATPRODUCT_AtB)); 261 } 262 PetscCall(MatProductSetFromOptions(Xmat)); 263 PetscCall(MatProductSymbolic(Xmat)); 264 PetscCall(MatProductNumeric(Xmat)); 265 PetscCall(MatProductClear(Xmat)); 266 break; 267 default: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %" PetscInt_FMT, F->schur_status); break; 268 } 269 PetscCall(MatFactorRestoreSchurComplement(F, &S, schurstatus)); 270 PetscCall(MatDestroy(&Bmat)); 271 PetscCall(MatDestroy(&Xmat)); 272 PetscFunctionReturn(0); 273 } 274 275 PetscErrorCode MatFactorSetSchurIS_MKL_PARDISO(Mat F, IS is) { 276 Mat_MKL_PARDISO *mpardiso = (Mat_MKL_PARDISO *)F->data; 277 const PetscScalar *arr; 278 const PetscInt *idxs; 279 PetscInt size, i; 280 PetscMPIInt csize; 281 PetscBool sorted; 282 283 PetscFunctionBegin; 284 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)F), &csize)); 285 PetscCheck(csize <= 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "MKL_PARDISO parallel Schur complements not yet supported from PETSc"); 286 PetscCall(ISSorted(is, &sorted)); 287 PetscCheck(sorted, PETSC_COMM_SELF, PETSC_ERR_SUP, "IS for MKL_PARDISO Schur complements needs to be sorted"); 288 PetscCall(ISGetLocalSize(is, &size)); 289 PetscCall(PetscFree(mpardiso->schur_work)); 290 PetscCall(PetscBLASIntCast(PetscMax(mpardiso->n, 2 * size), &mpardiso->schur_work_size)); 291 PetscCall(PetscMalloc1(mpardiso->schur_work_size, &mpardiso->schur_work)); 292 PetscCall(MatDestroy(&F->schur)); 293 PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, size, size, NULL, &F->schur)); 294 PetscCall(MatDenseGetArrayRead(F->schur, &arr)); 295 mpardiso->schur = (PetscScalar *)arr; 296 mpardiso->schur_size = size; 297 PetscCall(MatDenseRestoreArrayRead(F->schur, &arr)); 298 if (mpardiso->mtype == 2) PetscCall(MatSetOption(F->schur, MAT_SPD, PETSC_TRUE)); 299 300 PetscCall(PetscFree(mpardiso->schur_idxs)); 301 PetscCall(PetscMalloc1(size, &mpardiso->schur_idxs)); 302 PetscCall(PetscArrayzero(mpardiso->perm, mpardiso->n)); 303 PetscCall(ISGetIndices(is, &idxs)); 304 PetscCall(PetscArraycpy(mpardiso->schur_idxs, idxs, size)); 305 for (i = 0; i < size; i++) mpardiso->perm[idxs[i]] = 1; 306 PetscCall(ISRestoreIndices(is, &idxs)); 307 if (size) { /* turn on Schur switch if the set of indices is not empty */ 308 mpardiso->iparm[36 - 1] = 2; 309 } 310 PetscFunctionReturn(0); 311 } 312 313 PetscErrorCode MatDestroy_MKL_PARDISO(Mat A) { 314 Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data; 315 316 PetscFunctionBegin; 317 if (mat_mkl_pardiso->CleanUp) { 318 mat_mkl_pardiso->phase = JOB_RELEASE_OF_ALL_MEMORY; 319 320 MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, NULL, NULL, NULL, NULL, &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, NULL, NULL, 321 &mat_mkl_pardiso->err); 322 } 323 PetscCall(PetscFree(mat_mkl_pardiso->perm)); 324 PetscCall(PetscFree(mat_mkl_pardiso->schur_work)); 325 PetscCall(PetscFree(mat_mkl_pardiso->schur_idxs)); 326 if (mat_mkl_pardiso->freeaij) { 327 PetscCall(PetscFree2(mat_mkl_pardiso->ia, mat_mkl_pardiso->ja)); 328 if (mat_mkl_pardiso->iparm[34] == 1) PetscCall(PetscFree(mat_mkl_pardiso->a)); 329 } 330 PetscCall(PetscFree(A->data)); 331 332 /* clear composed functions */ 333 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL)); 334 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorSetSchurIS_C", NULL)); 335 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatMkl_PardisoSetCntl_C", NULL)); 336 PetscFunctionReturn(0); 337 } 338 339 static PetscErrorCode MatMKLPardisoScatterSchur_Private(Mat_MKL_PARDISO *mpardiso, PetscScalar *whole, PetscScalar *schur, PetscBool reduce) { 340 PetscFunctionBegin; 341 if (reduce) { /* data given for the whole matrix */ 342 PetscInt i, m = 0, p = 0; 343 for (i = 0; i < mpardiso->nrhs; i++) { 344 PetscInt j; 345 for (j = 0; j < mpardiso->schur_size; j++) schur[p + j] = whole[m + mpardiso->schur_idxs[j]]; 346 m += mpardiso->n; 347 p += mpardiso->schur_size; 348 } 349 } else { /* from Schur to whole */ 350 PetscInt i, m = 0, p = 0; 351 for (i = 0; i < mpardiso->nrhs; i++) { 352 PetscInt j; 353 for (j = 0; j < mpardiso->schur_size; j++) whole[m + mpardiso->schur_idxs[j]] = schur[p + j]; 354 m += mpardiso->n; 355 p += mpardiso->schur_size; 356 } 357 } 358 PetscFunctionReturn(0); 359 } 360 361 PetscErrorCode MatSolve_MKL_PARDISO(Mat A, Vec b, Vec x) { 362 Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data; 363 PetscScalar *xarray; 364 const PetscScalar *barray; 365 366 PetscFunctionBegin; 367 mat_mkl_pardiso->nrhs = 1; 368 PetscCall(VecGetArrayWrite(x, &xarray)); 369 PetscCall(VecGetArrayRead(b, &barray)); 370 371 if (!mat_mkl_pardiso->schur) mat_mkl_pardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT; 372 else mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION; 373 374 if (barray == xarray) { /* if the two vectors share the same memory */ 375 PetscScalar *work; 376 if (!mat_mkl_pardiso->schur_work) { 377 PetscCall(PetscMalloc1(mat_mkl_pardiso->n, &work)); 378 } else { 379 work = mat_mkl_pardiso->schur_work; 380 } 381 mat_mkl_pardiso->iparm[6 - 1] = 1; 382 MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, NULL, &mat_mkl_pardiso->nrhs, 383 mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)work, &mat_mkl_pardiso->err); 384 if (!mat_mkl_pardiso->schur_work) PetscCall(PetscFree(work)); 385 } else { 386 mat_mkl_pardiso->iparm[6 - 1] = 0; 387 MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, 388 &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err); 389 } 390 PetscCall(VecRestoreArrayRead(b, &barray)); 391 392 PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL_PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err); 393 394 if (mat_mkl_pardiso->schur) { /* solve Schur complement and expand solution */ 395 if (!mat_mkl_pardiso->solve_interior) { 396 PetscInt shift = mat_mkl_pardiso->schur_size; 397 398 PetscCall(MatFactorFactorizeSchurComplement(A)); 399 /* if inverted, uses BLAS *MM subroutines, otherwise LAPACK *TRS */ 400 if (A->schur_status != MAT_FACTOR_SCHUR_INVERTED) shift = 0; 401 402 /* solve Schur complement */ 403 PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso, xarray, mat_mkl_pardiso->schur_work, PETSC_TRUE)); 404 PetscCall(MatMKLPardisoSolveSchur_Private(A, mat_mkl_pardiso->schur_work, mat_mkl_pardiso->schur_work + shift)); 405 PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso, xarray, mat_mkl_pardiso->schur_work + shift, PETSC_FALSE)); 406 } else { /* if we are solving for the interior problem, any value in barray[schur] forward-substituted to xarray[schur] will be neglected */ 407 PetscInt i; 408 for (i = 0; i < mat_mkl_pardiso->schur_size; i++) xarray[mat_mkl_pardiso->schur_idxs[i]] = 0.; 409 } 410 411 /* expansion phase */ 412 mat_mkl_pardiso->iparm[6 - 1] = 1; 413 mat_mkl_pardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION; 414 MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, 415 &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */ 416 &mat_mkl_pardiso->err); 417 418 PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL_PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err); 419 mat_mkl_pardiso->iparm[6 - 1] = 0; 420 } 421 PetscCall(VecRestoreArrayWrite(x, &xarray)); 422 mat_mkl_pardiso->CleanUp = PETSC_TRUE; 423 PetscFunctionReturn(0); 424 } 425 426 PetscErrorCode MatSolveTranspose_MKL_PARDISO(Mat A, Vec b, Vec x) { 427 Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data; 428 PetscInt oiparm12; 429 430 PetscFunctionBegin; 431 oiparm12 = mat_mkl_pardiso->iparm[12 - 1]; 432 mat_mkl_pardiso->iparm[12 - 1] = 2; 433 PetscCall(MatSolve_MKL_PARDISO(A, b, x)); 434 mat_mkl_pardiso->iparm[12 - 1] = oiparm12; 435 PetscFunctionReturn(0); 436 } 437 438 PetscErrorCode MatMatSolve_MKL_PARDISO(Mat A, Mat B, Mat X) { 439 Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)(A)->data; 440 const PetscScalar *barray; 441 PetscScalar *xarray; 442 PetscBool flg; 443 444 PetscFunctionBegin; 445 PetscCall(PetscObjectBaseTypeCompare((PetscObject)B, MATSEQDENSE, &flg)); 446 PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix B must be MATSEQDENSE matrix"); 447 if (X != B) { 448 PetscCall(PetscObjectBaseTypeCompare((PetscObject)X, MATSEQDENSE, &flg)); 449 PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix X must be MATSEQDENSE matrix"); 450 } 451 452 PetscCall(MatGetSize(B, NULL, (PetscInt *)&mat_mkl_pardiso->nrhs)); 453 454 if (mat_mkl_pardiso->nrhs > 0) { 455 PetscCall(MatDenseGetArrayRead(B, &barray)); 456 PetscCall(MatDenseGetArrayWrite(X, &xarray)); 457 458 PetscCheck(barray != xarray, PETSC_COMM_SELF, PETSC_ERR_SUP, "B and X cannot share the same memory location"); 459 if (!mat_mkl_pardiso->schur) mat_mkl_pardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT; 460 else mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION; 461 462 MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, 463 &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err); 464 PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL_PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err); 465 466 PetscCall(MatDenseRestoreArrayRead(B, &barray)); 467 if (mat_mkl_pardiso->schur) { /* solve Schur complement and expand solution */ 468 PetscScalar *o_schur_work = NULL; 469 470 /* solve Schur complement */ 471 if (!mat_mkl_pardiso->solve_interior) { 472 PetscInt shift = mat_mkl_pardiso->schur_size * mat_mkl_pardiso->nrhs, scale; 473 PetscInt mem = mat_mkl_pardiso->n * mat_mkl_pardiso->nrhs; 474 475 PetscCall(MatFactorFactorizeSchurComplement(A)); 476 /* allocate extra memory if it is needed */ 477 scale = 1; 478 if (A->schur_status == MAT_FACTOR_SCHUR_INVERTED) scale = 2; 479 mem *= scale; 480 if (mem > mat_mkl_pardiso->schur_work_size) { 481 o_schur_work = mat_mkl_pardiso->schur_work; 482 PetscCall(PetscMalloc1(mem, &mat_mkl_pardiso->schur_work)); 483 } 484 /* if inverted, uses BLAS *MM subroutines, otherwise LAPACK *TRS */ 485 if (A->schur_status != MAT_FACTOR_SCHUR_INVERTED) shift = 0; 486 PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso, xarray, mat_mkl_pardiso->schur_work, PETSC_TRUE)); 487 PetscCall(MatMKLPardisoSolveSchur_Private(A, mat_mkl_pardiso->schur_work, mat_mkl_pardiso->schur_work + shift)); 488 PetscCall(MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso, xarray, mat_mkl_pardiso->schur_work + shift, PETSC_FALSE)); 489 } else { /* if we are solving for the interior problem, any value in barray[schur,n] forward-substituted to xarray[schur,n] will be neglected */ 490 PetscInt i, n, m = 0; 491 for (n = 0; n < mat_mkl_pardiso->nrhs; n++) { 492 for (i = 0; i < mat_mkl_pardiso->schur_size; i++) xarray[mat_mkl_pardiso->schur_idxs[i] + m] = 0.; 493 m += mat_mkl_pardiso->n; 494 } 495 } 496 497 /* expansion phase */ 498 mat_mkl_pardiso->iparm[6 - 1] = 1; 499 mat_mkl_pardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION; 500 MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, 501 &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */ 502 &mat_mkl_pardiso->err); 503 if (o_schur_work) { /* restore original schur_work (minimal size) */ 504 PetscCall(PetscFree(mat_mkl_pardiso->schur_work)); 505 mat_mkl_pardiso->schur_work = o_schur_work; 506 } 507 PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL_PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err); 508 mat_mkl_pardiso->iparm[6 - 1] = 0; 509 } 510 PetscCall(MatDenseRestoreArrayWrite(X, &xarray)); 511 } 512 mat_mkl_pardiso->CleanUp = PETSC_TRUE; 513 PetscFunctionReturn(0); 514 } 515 516 PetscErrorCode MatFactorNumeric_MKL_PARDISO(Mat F, Mat A, const MatFactorInfo *info) { 517 Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)(F)->data; 518 519 PetscFunctionBegin; 520 mat_mkl_pardiso->matstruc = SAME_NONZERO_PATTERN; 521 PetscCall((*mat_mkl_pardiso->Convert)(A, mat_mkl_pardiso->needsym, MAT_REUSE_MATRIX, &mat_mkl_pardiso->freeaij, &mat_mkl_pardiso->nz, &mat_mkl_pardiso->ia, &mat_mkl_pardiso->ja, (PetscScalar **)&mat_mkl_pardiso->a)); 522 523 mat_mkl_pardiso->phase = JOB_NUMERICAL_FACTORIZATION; 524 MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, 525 &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, NULL, (void *)mat_mkl_pardiso->schur, &mat_mkl_pardiso->err); 526 PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL_PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err); 527 528 /* report flops */ 529 if (mat_mkl_pardiso->iparm[18] > 0) PetscCall(PetscLogFlops(PetscPowRealInt(10., 6) * mat_mkl_pardiso->iparm[18])); 530 531 if (F->schur) { /* schur output from pardiso is in row major format */ 532 #if defined(PETSC_HAVE_CUDA) 533 F->schur->offloadmask = PETSC_OFFLOAD_CPU; 534 #endif 535 PetscCall(MatFactorRestoreSchurComplement(F, NULL, MAT_FACTOR_SCHUR_UNFACTORED)); 536 PetscCall(MatTranspose(F->schur, MAT_INPLACE_MATRIX, &F->schur)); 537 } 538 mat_mkl_pardiso->matstruc = SAME_NONZERO_PATTERN; 539 mat_mkl_pardiso->CleanUp = PETSC_TRUE; 540 PetscFunctionReturn(0); 541 } 542 543 PetscErrorCode MatSetFromOptions_MKL_PARDISO(Mat F, Mat A) { 544 Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)F->data; 545 PetscInt icntl, bs, threads = 1; 546 PetscBool flg; 547 548 PetscFunctionBegin; 549 PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "MKL_PARDISO Options", "Mat"); 550 551 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_65", "Suggested number of threads to use within PARDISO", "None", threads, &threads, &flg)); 552 if (flg) PetscSetMKL_PARDISOThreads((int)threads); 553 554 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_66", "Maximum number of factors with identical sparsity structure that must be kept in memory at the same time", "None", mat_mkl_pardiso->maxfct, &icntl, &flg)); 555 if (flg) mat_mkl_pardiso->maxfct = icntl; 556 557 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_67", "Indicates the actual matrix for the solution phase", "None", mat_mkl_pardiso->mnum, &icntl, &flg)); 558 if (flg) mat_mkl_pardiso->mnum = icntl; 559 560 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_68", "Message level information", "None", mat_mkl_pardiso->msglvl, &icntl, &flg)); 561 if (flg) mat_mkl_pardiso->msglvl = icntl; 562 563 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_69", "Defines the matrix type", "None", mat_mkl_pardiso->mtype, &icntl, &flg)); 564 if (flg) { 565 void *pt[IPARM_SIZE]; 566 mat_mkl_pardiso->mtype = icntl; 567 icntl = mat_mkl_pardiso->iparm[34]; 568 bs = mat_mkl_pardiso->iparm[36]; 569 MKL_PARDISO_INIT(pt, &mat_mkl_pardiso->mtype, mat_mkl_pardiso->iparm); 570 #if defined(PETSC_USE_REAL_SINGLE) 571 mat_mkl_pardiso->iparm[27] = 1; 572 #else 573 mat_mkl_pardiso->iparm[27] = 0; 574 #endif 575 mat_mkl_pardiso->iparm[34] = icntl; 576 mat_mkl_pardiso->iparm[36] = bs; 577 } 578 579 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_1", "Use default values (if 0)", "None", mat_mkl_pardiso->iparm[0], &icntl, &flg)); 580 if (flg) mat_mkl_pardiso->iparm[0] = icntl; 581 582 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_2", "Fill-in reducing ordering for the input matrix", "None", mat_mkl_pardiso->iparm[1], &icntl, &flg)); 583 if (flg) mat_mkl_pardiso->iparm[1] = icntl; 584 585 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_4", "Preconditioned CGS/CG", "None", mat_mkl_pardiso->iparm[3], &icntl, &flg)); 586 if (flg) mat_mkl_pardiso->iparm[3] = icntl; 587 588 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_5", "User permutation", "None", mat_mkl_pardiso->iparm[4], &icntl, &flg)); 589 if (flg) mat_mkl_pardiso->iparm[4] = icntl; 590 591 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_6", "Write solution on x", "None", mat_mkl_pardiso->iparm[5], &icntl, &flg)); 592 if (flg) mat_mkl_pardiso->iparm[5] = icntl; 593 594 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_8", "Iterative refinement step", "None", mat_mkl_pardiso->iparm[7], &icntl, &flg)); 595 if (flg) mat_mkl_pardiso->iparm[7] = icntl; 596 597 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_10", "Pivoting perturbation", "None", mat_mkl_pardiso->iparm[9], &icntl, &flg)); 598 if (flg) mat_mkl_pardiso->iparm[9] = icntl; 599 600 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_11", "Scaling vectors", "None", mat_mkl_pardiso->iparm[10], &icntl, &flg)); 601 if (flg) mat_mkl_pardiso->iparm[10] = icntl; 602 603 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_12", "Solve with transposed or conjugate transposed matrix A", "None", mat_mkl_pardiso->iparm[11], &icntl, &flg)); 604 if (flg) mat_mkl_pardiso->iparm[11] = icntl; 605 606 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_13", "Improved accuracy using (non-) symmetric weighted matching", "None", mat_mkl_pardiso->iparm[12], &icntl, &flg)); 607 if (flg) mat_mkl_pardiso->iparm[12] = icntl; 608 609 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_18", "Numbers of non-zero elements", "None", mat_mkl_pardiso->iparm[17], &icntl, &flg)); 610 if (flg) mat_mkl_pardiso->iparm[17] = icntl; 611 612 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_19", "Report number of floating point operations (0 to disable)", "None", mat_mkl_pardiso->iparm[18], &icntl, &flg)); 613 if (flg) mat_mkl_pardiso->iparm[18] = icntl; 614 615 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_21", "Pivoting for symmetric indefinite matrices", "None", mat_mkl_pardiso->iparm[20], &icntl, &flg)); 616 if (flg) mat_mkl_pardiso->iparm[20] = icntl; 617 618 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_24", "Parallel factorization control", "None", mat_mkl_pardiso->iparm[23], &icntl, &flg)); 619 if (flg) mat_mkl_pardiso->iparm[23] = icntl; 620 621 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_25", "Parallel forward/backward solve control", "None", mat_mkl_pardiso->iparm[24], &icntl, &flg)); 622 if (flg) mat_mkl_pardiso->iparm[24] = icntl; 623 624 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_27", "Matrix checker", "None", mat_mkl_pardiso->iparm[26], &icntl, &flg)); 625 if (flg) mat_mkl_pardiso->iparm[26] = icntl; 626 627 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_31", "Partial solve and computing selected components of the solution vectors", "None", mat_mkl_pardiso->iparm[30], &icntl, &flg)); 628 if (flg) mat_mkl_pardiso->iparm[30] = icntl; 629 630 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_34", "Optimal number of threads for conditional numerical reproducibility (CNR) mode", "None", mat_mkl_pardiso->iparm[33], &icntl, &flg)); 631 if (flg) mat_mkl_pardiso->iparm[33] = icntl; 632 633 PetscCall(PetscOptionsInt("-mat_mkl_pardiso_60", "Intel MKL_PARDISO mode", "None", mat_mkl_pardiso->iparm[59], &icntl, &flg)); 634 if (flg) mat_mkl_pardiso->iparm[59] = icntl; 635 PetscOptionsEnd(); 636 PetscFunctionReturn(0); 637 } 638 639 PetscErrorCode MatFactorMKL_PARDISOInitialize_Private(Mat A, MatFactorType ftype, Mat_MKL_PARDISO *mat_mkl_pardiso) { 640 PetscInt i, bs; 641 PetscBool match; 642 643 PetscFunctionBegin; 644 for (i = 0; i < IPARM_SIZE; i++) mat_mkl_pardiso->iparm[i] = 0; 645 for (i = 0; i < IPARM_SIZE; i++) mat_mkl_pardiso->pt[i] = 0; 646 #if defined(PETSC_USE_REAL_SINGLE) 647 mat_mkl_pardiso->iparm[27] = 1; 648 #else 649 mat_mkl_pardiso->iparm[27] = 0; 650 #endif 651 /* Default options for both sym and unsym */ 652 mat_mkl_pardiso->iparm[0] = 1; /* Solver default parameters overriden with provided by iparm */ 653 mat_mkl_pardiso->iparm[1] = 2; /* Metis reordering */ 654 mat_mkl_pardiso->iparm[5] = 0; /* Write solution into x */ 655 mat_mkl_pardiso->iparm[7] = 0; /* Max number of iterative refinement steps */ 656 mat_mkl_pardiso->iparm[17] = -1; /* Output: Number of nonzeros in the factor LU */ 657 mat_mkl_pardiso->iparm[18] = -1; /* Output: Mflops for LU factorization */ 658 #if 0 659 mat_mkl_pardiso->iparm[23] = 1; /* Parallel factorization control*/ 660 #endif 661 PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &match, MATSEQBAIJ, MATSEQSBAIJ, "")); 662 PetscCall(MatGetBlockSize(A, &bs)); 663 if (!match || bs == 1) { 664 mat_mkl_pardiso->iparm[34] = 1; /* Cluster Sparse Solver use C-style indexing for ia and ja arrays */ 665 mat_mkl_pardiso->n = A->rmap->N; 666 } else { 667 mat_mkl_pardiso->iparm[34] = 0; /* Cluster Sparse Solver use Fortran-style indexing for ia and ja arrays */ 668 mat_mkl_pardiso->iparm[36] = bs; 669 mat_mkl_pardiso->n = A->rmap->N / bs; 670 } 671 mat_mkl_pardiso->iparm[39] = 0; /* Input: matrix/rhs/solution stored on rank-0 */ 672 673 mat_mkl_pardiso->CleanUp = PETSC_FALSE; 674 mat_mkl_pardiso->maxfct = 1; /* Maximum number of numerical factorizations. */ 675 mat_mkl_pardiso->mnum = 1; /* Which factorization to use. */ 676 mat_mkl_pardiso->msglvl = 0; /* 0: do not print 1: Print statistical information in file */ 677 mat_mkl_pardiso->phase = -1; 678 mat_mkl_pardiso->err = 0; 679 680 mat_mkl_pardiso->nrhs = 1; 681 mat_mkl_pardiso->err = 0; 682 mat_mkl_pardiso->phase = -1; 683 684 if (ftype == MAT_FACTOR_LU) { 685 mat_mkl_pardiso->iparm[9] = 13; /* Perturb the pivot elements with 1E-13 */ 686 mat_mkl_pardiso->iparm[10] = 1; /* Use nonsymmetric permutation and scaling MPS */ 687 mat_mkl_pardiso->iparm[12] = 1; /* Switch on Maximum Weighted Matching algorithm (default for non-symmetric) */ 688 } else { 689 mat_mkl_pardiso->iparm[9] = 8; /* Perturb the pivot elements with 1E-8 */ 690 mat_mkl_pardiso->iparm[10] = 0; /* Use nonsymmetric permutation and scaling MPS */ 691 mat_mkl_pardiso->iparm[12] = 1; /* Switch on Maximum Weighted Matching algorithm (default for non-symmetric) */ 692 #if defined(PETSC_USE_DEBUG) 693 mat_mkl_pardiso->iparm[26] = 1; /* Matrix checker */ 694 #endif 695 } 696 PetscCall(PetscCalloc1(A->rmap->N * sizeof(INT_TYPE), &mat_mkl_pardiso->perm)); 697 mat_mkl_pardiso->schur_size = 0; 698 PetscFunctionReturn(0); 699 } 700 701 PetscErrorCode MatFactorSymbolic_AIJMKL_PARDISO_Private(Mat F, Mat A, const MatFactorInfo *info) { 702 Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)F->data; 703 704 PetscFunctionBegin; 705 mat_mkl_pardiso->matstruc = DIFFERENT_NONZERO_PATTERN; 706 PetscCall(MatSetFromOptions_MKL_PARDISO(F, A)); 707 /* throw away any previously computed structure */ 708 if (mat_mkl_pardiso->freeaij) { 709 PetscCall(PetscFree2(mat_mkl_pardiso->ia, mat_mkl_pardiso->ja)); 710 if (mat_mkl_pardiso->iparm[34] == 1) PetscCall(PetscFree(mat_mkl_pardiso->a)); 711 } 712 PetscCall((*mat_mkl_pardiso->Convert)(A, mat_mkl_pardiso->needsym, MAT_INITIAL_MATRIX, &mat_mkl_pardiso->freeaij, &mat_mkl_pardiso->nz, &mat_mkl_pardiso->ia, &mat_mkl_pardiso->ja, (PetscScalar **)&mat_mkl_pardiso->a)); 713 if (mat_mkl_pardiso->iparm[34] == 1) mat_mkl_pardiso->n = A->rmap->N; 714 else mat_mkl_pardiso->n = A->rmap->N / A->rmap->bs; 715 716 mat_mkl_pardiso->phase = JOB_ANALYSIS; 717 718 /* reset flops counting if requested */ 719 if (mat_mkl_pardiso->iparm[18]) mat_mkl_pardiso->iparm[18] = -1; 720 721 MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, 722 &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, NULL, NULL, &mat_mkl_pardiso->err); 723 PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL_PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err); 724 725 mat_mkl_pardiso->CleanUp = PETSC_TRUE; 726 727 if (F->factortype == MAT_FACTOR_LU) F->ops->lufactornumeric = MatFactorNumeric_MKL_PARDISO; 728 else F->ops->choleskyfactornumeric = MatFactorNumeric_MKL_PARDISO; 729 730 F->ops->solve = MatSolve_MKL_PARDISO; 731 F->ops->solvetranspose = MatSolveTranspose_MKL_PARDISO; 732 F->ops->matsolve = MatMatSolve_MKL_PARDISO; 733 PetscFunctionReturn(0); 734 } 735 736 PetscErrorCode MatLUFactorSymbolic_AIJMKL_PARDISO(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info) { 737 PetscFunctionBegin; 738 PetscCall(MatFactorSymbolic_AIJMKL_PARDISO_Private(F, A, info)); 739 PetscFunctionReturn(0); 740 } 741 742 #if !defined(PETSC_USE_COMPLEX) 743 PetscErrorCode MatGetInertia_MKL_PARDISO(Mat F, PetscInt *nneg, PetscInt *nzero, PetscInt *npos) { 744 Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)F->data; 745 746 PetscFunctionBegin; 747 if (nneg) *nneg = mat_mkl_pardiso->iparm[22]; 748 if (npos) *npos = mat_mkl_pardiso->iparm[21]; 749 if (nzero) *nzero = F->rmap->N - (mat_mkl_pardiso->iparm[22] + mat_mkl_pardiso->iparm[21]); 750 PetscFunctionReturn(0); 751 } 752 #endif 753 754 PetscErrorCode MatCholeskyFactorSymbolic_AIJMKL_PARDISO(Mat F, Mat A, IS r, const MatFactorInfo *info) { 755 PetscFunctionBegin; 756 PetscCall(MatFactorSymbolic_AIJMKL_PARDISO_Private(F, A, info)); 757 #if defined(PETSC_USE_COMPLEX) 758 F->ops->getinertia = NULL; 759 #else 760 F->ops->getinertia = MatGetInertia_MKL_PARDISO; 761 #endif 762 PetscFunctionReturn(0); 763 } 764 765 PetscErrorCode MatView_MKL_PARDISO(Mat A, PetscViewer viewer) { 766 PetscBool iascii; 767 PetscViewerFormat format; 768 Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data; 769 PetscInt i; 770 771 PetscFunctionBegin; 772 if (A->ops->solve != MatSolve_MKL_PARDISO) PetscFunctionReturn(0); 773 774 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 775 if (iascii) { 776 PetscCall(PetscViewerGetFormat(viewer, &format)); 777 if (format == PETSC_VIEWER_ASCII_INFO) { 778 PetscCall(PetscViewerASCIIPrintf(viewer, "MKL_PARDISO run parameters:\n")); 779 PetscCall(PetscViewerASCIIPrintf(viewer, "MKL_PARDISO phase: %d \n", mat_mkl_pardiso->phase)); 780 for (i = 1; i <= 64; i++) PetscCall(PetscViewerASCIIPrintf(viewer, "MKL_PARDISO iparm[%d]: %d \n", i, mat_mkl_pardiso->iparm[i - 1])); 781 PetscCall(PetscViewerASCIIPrintf(viewer, "MKL_PARDISO maxfct: %d \n", mat_mkl_pardiso->maxfct)); 782 PetscCall(PetscViewerASCIIPrintf(viewer, "MKL_PARDISO mnum: %d \n", mat_mkl_pardiso->mnum)); 783 PetscCall(PetscViewerASCIIPrintf(viewer, "MKL_PARDISO mtype: %d \n", mat_mkl_pardiso->mtype)); 784 PetscCall(PetscViewerASCIIPrintf(viewer, "MKL_PARDISO n: %d \n", mat_mkl_pardiso->n)); 785 PetscCall(PetscViewerASCIIPrintf(viewer, "MKL_PARDISO nrhs: %d \n", mat_mkl_pardiso->nrhs)); 786 PetscCall(PetscViewerASCIIPrintf(viewer, "MKL_PARDISO msglvl: %d \n", mat_mkl_pardiso->msglvl)); 787 } 788 } 789 PetscFunctionReturn(0); 790 } 791 792 PetscErrorCode MatGetInfo_MKL_PARDISO(Mat A, MatInfoType flag, MatInfo *info) { 793 Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)A->data; 794 795 PetscFunctionBegin; 796 info->block_size = 1.0; 797 info->nz_used = mat_mkl_pardiso->iparm[17]; 798 info->nz_allocated = mat_mkl_pardiso->iparm[17]; 799 info->nz_unneeded = 0.0; 800 info->assemblies = 0.0; 801 info->mallocs = 0.0; 802 info->memory = 0.0; 803 info->fill_ratio_given = 0; 804 info->fill_ratio_needed = 0; 805 info->factor_mallocs = 0; 806 PetscFunctionReturn(0); 807 } 808 809 PetscErrorCode MatMkl_PardisoSetCntl_MKL_PARDISO(Mat F, PetscInt icntl, PetscInt ival) { 810 PetscInt backup, bs; 811 Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO *)F->data; 812 813 PetscFunctionBegin; 814 if (icntl <= 64) { 815 mat_mkl_pardiso->iparm[icntl - 1] = ival; 816 } else { 817 if (icntl == 65) PetscSetMKL_PARDISOThreads(ival); 818 else if (icntl == 66) mat_mkl_pardiso->maxfct = ival; 819 else if (icntl == 67) mat_mkl_pardiso->mnum = ival; 820 else if (icntl == 68) mat_mkl_pardiso->msglvl = ival; 821 else if (icntl == 69) { 822 void *pt[IPARM_SIZE]; 823 backup = mat_mkl_pardiso->iparm[34]; 824 bs = mat_mkl_pardiso->iparm[36]; 825 mat_mkl_pardiso->mtype = ival; 826 MKL_PARDISO_INIT(pt, &mat_mkl_pardiso->mtype, mat_mkl_pardiso->iparm); 827 #if defined(PETSC_USE_REAL_SINGLE) 828 mat_mkl_pardiso->iparm[27] = 1; 829 #else 830 mat_mkl_pardiso->iparm[27] = 0; 831 #endif 832 mat_mkl_pardiso->iparm[34] = backup; 833 mat_mkl_pardiso->iparm[36] = bs; 834 } else if (icntl == 70) mat_mkl_pardiso->solve_interior = (PetscBool) !!ival; 835 } 836 PetscFunctionReturn(0); 837 } 838 839 /*@ 840 MatMkl_PardisoSetCntl - Set Mkl_Pardiso parameters 841 842 Logically Collective on Mat 843 844 Input Parameters: 845 + F - the factored matrix obtained by calling MatGetFactor() 846 . icntl - index of Mkl_Pardiso parameter 847 - ival - value of Mkl_Pardiso parameter 848 849 Options Database: 850 . -mat_mkl_pardiso_<icntl> <ival> - change the option numbered icntl to the value ival 851 852 Level: beginner 853 854 References: 855 . * - Mkl_Pardiso Users' Guide 856 857 .seealso: `MatGetFactor()` 858 @*/ 859 PetscErrorCode MatMkl_PardisoSetCntl(Mat F, PetscInt icntl, PetscInt ival) { 860 PetscFunctionBegin; 861 PetscTryMethod(F, "MatMkl_PardisoSetCntl_C", (Mat, PetscInt, PetscInt), (F, icntl, ival)); 862 PetscFunctionReturn(0); 863 } 864 865 /*MC 866 MATSOLVERMKL_PARDISO - A matrix type providing direct solvers (LU) for 867 sequential matrices via the external package MKL_PARDISO. 868 869 Works with MATSEQAIJ matrices 870 871 Use -pc_type lu -pc_factor_mat_solver_type mkl_pardiso to use this direct solver 872 873 Options Database Keys: 874 + -mat_mkl_pardiso_65 - Suggested number of threads to use within MKL_PARDISO 875 . -mat_mkl_pardiso_66 - Maximum number of factors with identical sparsity structure that must be kept in memory at the same time 876 . -mat_mkl_pardiso_67 - Indicates the actual matrix for the solution phase 877 . -mat_mkl_pardiso_68 - Message level information, use 1 to get detailed information on the solver options 878 . -mat_mkl_pardiso_69 - Defines the matrix type. IMPORTANT: When you set this flag, iparm parameters are going to be set to the default ones for the matrix type 879 . -mat_mkl_pardiso_1 - Use default values 880 . -mat_mkl_pardiso_2 - Fill-in reducing ordering for the input matrix 881 . -mat_mkl_pardiso_4 - Preconditioned CGS/CG 882 . -mat_mkl_pardiso_5 - User permutation 883 . -mat_mkl_pardiso_6 - Write solution on x 884 . -mat_mkl_pardiso_8 - Iterative refinement step 885 . -mat_mkl_pardiso_10 - Pivoting perturbation 886 . -mat_mkl_pardiso_11 - Scaling vectors 887 . -mat_mkl_pardiso_12 - Solve with transposed or conjugate transposed matrix A 888 . -mat_mkl_pardiso_13 - Improved accuracy using (non-) symmetric weighted matching 889 . -mat_mkl_pardiso_18 - Numbers of non-zero elements 890 . -mat_mkl_pardiso_19 - Report number of floating point operations 891 . -mat_mkl_pardiso_21 - Pivoting for symmetric indefinite matrices 892 . -mat_mkl_pardiso_24 - Parallel factorization control 893 . -mat_mkl_pardiso_25 - Parallel forward/backward solve control 894 . -mat_mkl_pardiso_27 - Matrix checker 895 . -mat_mkl_pardiso_31 - Partial solve and computing selected components of the solution vectors 896 . -mat_mkl_pardiso_34 - Optimal number of threads for conditional numerical reproducibility (CNR) mode 897 - -mat_mkl_pardiso_60 - Intel MKL_PARDISO mode 898 899 Level: beginner 900 901 Notes: 902 Use -mat_mkl_pardiso_68 1 to display the number of threads the solver is using. MKL does not provide a way to directly access this 903 information. 904 905 For more information on the options check the MKL_Pardiso manual 906 907 .seealso: `PCFactorSetMatSolverType()`, `MatSolverType` 908 909 M*/ 910 static PetscErrorCode MatFactorGetSolverType_mkl_pardiso(Mat A, MatSolverType *type) { 911 PetscFunctionBegin; 912 *type = MATSOLVERMKL_PARDISO; 913 PetscFunctionReturn(0); 914 } 915 916 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mkl_pardiso(Mat A, MatFactorType ftype, Mat *F) { 917 Mat B; 918 Mat_MKL_PARDISO *mat_mkl_pardiso; 919 PetscBool isSeqAIJ, isSeqBAIJ, isSeqSBAIJ; 920 921 PetscFunctionBegin; 922 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isSeqAIJ)); 923 PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQBAIJ, &isSeqBAIJ)); 924 PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQSBAIJ, &isSeqSBAIJ)); 925 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B)); 926 PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N)); 927 PetscCall(PetscStrallocpy("mkl_pardiso", &((PetscObject)B)->type_name)); 928 PetscCall(MatSetUp(B)); 929 930 PetscCall(PetscNewLog(B, &mat_mkl_pardiso)); 931 B->data = mat_mkl_pardiso; 932 933 PetscCall(MatFactorMKL_PARDISOInitialize_Private(A, ftype, mat_mkl_pardiso)); 934 if (ftype == MAT_FACTOR_LU) { 935 B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMKL_PARDISO; 936 B->factortype = MAT_FACTOR_LU; 937 mat_mkl_pardiso->needsym = PETSC_FALSE; 938 if (isSeqAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqaij; 939 else if (isSeqBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqbaij; 940 else { 941 PetscCheck(!isSeqSBAIJ, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for PARDISO LU factor with SEQSBAIJ format! Use MAT_FACTOR_CHOLESKY instead"); 942 SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for PARDISO LU with %s format", ((PetscObject)A)->type_name); 943 } 944 #if defined(PETSC_USE_COMPLEX) 945 mat_mkl_pardiso->mtype = 13; 946 #else 947 mat_mkl_pardiso->mtype = 11; 948 #endif 949 } else { 950 B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_AIJMKL_PARDISO; 951 B->factortype = MAT_FACTOR_CHOLESKY; 952 if (isSeqAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqaij; 953 else if (isSeqBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqbaij; 954 else if (isSeqSBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqsbaij; 955 else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for PARDISO CHOLESKY with %s format", ((PetscObject)A)->type_name); 956 957 mat_mkl_pardiso->needsym = PETSC_TRUE; 958 #if !defined(PETSC_USE_COMPLEX) 959 if (A->spd == PETSC_BOOL3_TRUE) mat_mkl_pardiso->mtype = 2; 960 else mat_mkl_pardiso->mtype = -2; 961 #else 962 mat_mkl_pardiso->mtype = 6; 963 PetscCheck(A->hermitian != PETSC_BOOL3_TRUE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for PARDISO CHOLESKY with Hermitian matrices! Use MAT_FACTOR_LU instead"); 964 #endif 965 } 966 B->ops->destroy = MatDestroy_MKL_PARDISO; 967 B->ops->view = MatView_MKL_PARDISO; 968 B->ops->getinfo = MatGetInfo_MKL_PARDISO; 969 B->factortype = ftype; 970 B->assembled = PETSC_TRUE; 971 972 PetscCall(PetscFree(B->solvertype)); 973 PetscCall(PetscStrallocpy(MATSOLVERMKL_PARDISO, &B->solvertype)); 974 975 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_mkl_pardiso)); 976 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorSetSchurIS_C", MatFactorSetSchurIS_MKL_PARDISO)); 977 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMkl_PardisoSetCntl_C", MatMkl_PardisoSetCntl_MKL_PARDISO)); 978 979 *F = B; 980 PetscFunctionReturn(0); 981 } 982 983 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MKL_Pardiso(void) { 984 PetscFunctionBegin; 985 PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_aij_mkl_pardiso)); 986 PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO, MATSEQAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_aij_mkl_pardiso)); 987 PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO, MATSEQBAIJ, MAT_FACTOR_LU, MatGetFactor_aij_mkl_pardiso)); 988 PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO, MATSEQSBAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_aij_mkl_pardiso)); 989 PetscFunctionReturn(0); 990 } 991