1 2 /* 3 Provides an interface to the MUMPS sparse solver 4 */ 5 6 #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I "petscmat.h" I*/ 7 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h> 8 9 EXTERN_C_BEGIN 10 #if defined(PETSC_USE_COMPLEX) 11 #if defined(PETSC_USE_REAL_SINGLE) 12 #include <cmumps_c.h> 13 #else 14 #include <zmumps_c.h> 15 #endif 16 #else 17 #if defined(PETSC_USE_REAL_SINGLE) 18 #include <smumps_c.h> 19 #else 20 #include <dmumps_c.h> 21 #endif 22 #endif 23 EXTERN_C_END 24 #define JOB_INIT -1 25 #define JOB_FACTSYMBOLIC 1 26 #define JOB_FACTNUMERIC 2 27 #define JOB_SOLVE 3 28 #define JOB_END -2 29 30 /* calls to MUMPS */ 31 #if defined(PETSC_USE_COMPLEX) 32 #if defined(PETSC_USE_REAL_SINGLE) 33 #define PetscMUMPS_c cmumps_c 34 #else 35 #define PetscMUMPS_c zmumps_c 36 #endif 37 #else 38 #if defined(PETSC_USE_REAL_SINGLE) 39 #define PetscMUMPS_c smumps_c 40 #else 41 #define PetscMUMPS_c dmumps_c 42 #endif 43 #endif 44 45 /* declare MumpsScalar */ 46 #if defined(PETSC_USE_COMPLEX) 47 #if defined(PETSC_USE_REAL_SINGLE) 48 #define MumpsScalar mumps_complex 49 #else 50 #define MumpsScalar mumps_double_complex 51 #endif 52 #else 53 #define MumpsScalar PetscScalar 54 #endif 55 56 /* macros s.t. indices match MUMPS documentation */ 57 #define ICNTL(I) icntl[(I)-1] 58 #define CNTL(I) cntl[(I)-1] 59 #define INFOG(I) infog[(I)-1] 60 #define INFO(I) info[(I)-1] 61 #define RINFOG(I) rinfog[(I)-1] 62 #define RINFO(I) rinfo[(I)-1] 63 64 typedef struct { 65 #if defined(PETSC_USE_COMPLEX) 66 #if defined(PETSC_USE_REAL_SINGLE) 67 CMUMPS_STRUC_C id; 68 #else 69 ZMUMPS_STRUC_C id; 70 #endif 71 #else 72 #if defined(PETSC_USE_REAL_SINGLE) 73 SMUMPS_STRUC_C id; 74 #else 75 DMUMPS_STRUC_C id; 76 #endif 77 #endif 78 79 MatStructure matstruc; 80 PetscMPIInt myid,size; 81 PetscInt *irn,*jcn,nz,sym; 82 PetscScalar *val; 83 MPI_Comm comm_mumps; 84 PetscBool isAIJ,CleanUpMUMPS; 85 PetscInt ICNTL9_pre; /* check if ICNTL(9) is changed from previous MatSolve */ 86 VecScatter scat_rhs, scat_sol; /* used by MatSolve() */ 87 Vec b_seq,x_seq; 88 PetscInt ninfo,*info; /* display INFO */ 89 90 PetscErrorCode (*Destroy)(Mat); 91 PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**); 92 } Mat_MUMPS; 93 94 extern PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*); 95 96 /* 97 MatConvertToTriples_A_B - convert Petsc matrix to triples: row[nz], col[nz], val[nz] 98 99 input: 100 A - matrix in aij,baij or sbaij (bs=1) format 101 shift - 0: C style output triple; 1: Fortran style output triple. 102 reuse - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple 103 MAT_REUSE_MATRIX: only the values in v array are updated 104 output: 105 nnz - dim of r, c, and v (number of local nonzero entries of A) 106 r, c, v - row and col index, matrix values (matrix triples) 107 108 The returned values r, c, and sometimes v are obtained in a single PetscMalloc(). Then in MatDestroy_MUMPS() it is 109 freed with PetscFree((mumps->irn); This is not ideal code, the fact that v is ONLY sometimes part of mumps->irn means 110 that the PetscMalloc() cannot easily be replaced with a PetscMalloc3(). 111 112 */ 113 114 #undef __FUNCT__ 115 #define __FUNCT__ "MatConvertToTriples_seqaij_seqaij" 116 PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 117 { 118 const PetscInt *ai,*aj,*ajj,M=A->rmap->n; 119 PetscInt nz,rnz,i,j; 120 PetscErrorCode ierr; 121 PetscInt *row,*col; 122 Mat_SeqAIJ *aa=(Mat_SeqAIJ*)A->data; 123 124 PetscFunctionBegin; 125 *v=aa->a; 126 if (reuse == MAT_INITIAL_MATRIX) { 127 nz = aa->nz; 128 ai = aa->i; 129 aj = aa->j; 130 *nnz = nz; 131 ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr); 132 col = row + nz; 133 134 nz = 0; 135 for (i=0; i<M; i++) { 136 rnz = ai[i+1] - ai[i]; 137 ajj = aj + ai[i]; 138 for (j=0; j<rnz; j++) { 139 row[nz] = i+shift; col[nz++] = ajj[j] + shift; 140 } 141 } 142 *r = row; *c = col; 143 } 144 PetscFunctionReturn(0); 145 } 146 147 #undef __FUNCT__ 148 #define __FUNCT__ "MatConvertToTriples_seqbaij_seqaij" 149 PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 150 { 151 Mat_SeqBAIJ *aa=(Mat_SeqBAIJ*)A->data; 152 const PetscInt *ai,*aj,*ajj,bs2 = aa->bs2; 153 PetscInt bs,M,nz,idx=0,rnz,i,j,k,m; 154 PetscErrorCode ierr; 155 PetscInt *row,*col; 156 157 PetscFunctionBegin; 158 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 159 M = A->rmap->N/bs; 160 *v = aa->a; 161 if (reuse == MAT_INITIAL_MATRIX) { 162 ai = aa->i; aj = aa->j; 163 nz = bs2*aa->nz; 164 *nnz = nz; 165 ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr); 166 col = row + nz; 167 168 for (i=0; i<M; i++) { 169 ajj = aj + ai[i]; 170 rnz = ai[i+1] - ai[i]; 171 for (k=0; k<rnz; k++) { 172 for (j=0; j<bs; j++) { 173 for (m=0; m<bs; m++) { 174 row[idx] = i*bs + m + shift; 175 col[idx++] = bs*(ajj[k]) + j + shift; 176 } 177 } 178 } 179 } 180 *r = row; *c = col; 181 } 182 PetscFunctionReturn(0); 183 } 184 185 #undef __FUNCT__ 186 #define __FUNCT__ "MatConvertToTriples_seqsbaij_seqsbaij" 187 PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 188 { 189 const PetscInt *ai, *aj,*ajj,M=A->rmap->n; 190 PetscInt nz,rnz,i,j; 191 PetscErrorCode ierr; 192 PetscInt *row,*col; 193 Mat_SeqSBAIJ *aa=(Mat_SeqSBAIJ*)A->data; 194 195 PetscFunctionBegin; 196 *v = aa->a; 197 if (reuse == MAT_INITIAL_MATRIX) { 198 nz = aa->nz; 199 ai = aa->i; 200 aj = aa->j; 201 *v = aa->a; 202 *nnz = nz; 203 ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr); 204 col = row + nz; 205 206 nz = 0; 207 for (i=0; i<M; i++) { 208 rnz = ai[i+1] - ai[i]; 209 ajj = aj + ai[i]; 210 for (j=0; j<rnz; j++) { 211 row[nz] = i+shift; col[nz++] = ajj[j] + shift; 212 } 213 } 214 *r = row; *c = col; 215 } 216 PetscFunctionReturn(0); 217 } 218 219 #undef __FUNCT__ 220 #define __FUNCT__ "MatConvertToTriples_seqaij_seqsbaij" 221 PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 222 { 223 const PetscInt *ai,*aj,*ajj,*adiag,M=A->rmap->n; 224 PetscInt nz,rnz,i,j; 225 const PetscScalar *av,*v1; 226 PetscScalar *val; 227 PetscErrorCode ierr; 228 PetscInt *row,*col; 229 Mat_SeqAIJ *aa=(Mat_SeqAIJ*)A->data; 230 231 PetscFunctionBegin; 232 ai =aa->i; aj=aa->j;av=aa->a; 233 adiag=aa->diag; 234 if (reuse == MAT_INITIAL_MATRIX) { 235 /* count nz in the uppper triangular part of A */ 236 nz = 0; 237 for (i=0; i<M; i++) nz += ai[i+1] - adiag[i]; 238 *nnz = nz; 239 240 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 241 col = row + nz; 242 val = (PetscScalar*)(col + nz); 243 244 nz = 0; 245 for (i=0; i<M; i++) { 246 rnz = ai[i+1] - adiag[i]; 247 ajj = aj + adiag[i]; 248 v1 = av + adiag[i]; 249 for (j=0; j<rnz; j++) { 250 row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j]; 251 } 252 } 253 *r = row; *c = col; *v = val; 254 } else { 255 nz = 0; val = *v; 256 for (i=0; i <M; i++) { 257 rnz = ai[i+1] - adiag[i]; 258 ajj = aj + adiag[i]; 259 v1 = av + adiag[i]; 260 for (j=0; j<rnz; j++) { 261 val[nz++] = v1[j]; 262 } 263 } 264 } 265 PetscFunctionReturn(0); 266 } 267 268 #undef __FUNCT__ 269 #define __FUNCT__ "MatConvertToTriples_mpisbaij_mpisbaij" 270 PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 271 { 272 const PetscInt *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj; 273 PetscErrorCode ierr; 274 PetscInt rstart,nz,i,j,jj,irow,countA,countB; 275 PetscInt *row,*col; 276 const PetscScalar *av, *bv,*v1,*v2; 277 PetscScalar *val; 278 Mat_MPISBAIJ *mat = (Mat_MPISBAIJ*)A->data; 279 Mat_SeqSBAIJ *aa = (Mat_SeqSBAIJ*)(mat->A)->data; 280 Mat_SeqBAIJ *bb = (Mat_SeqBAIJ*)(mat->B)->data; 281 282 PetscFunctionBegin; 283 ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart; 284 av=aa->a; bv=bb->a; 285 286 garray = mat->garray; 287 288 if (reuse == MAT_INITIAL_MATRIX) { 289 nz = aa->nz + bb->nz; 290 *nnz = nz; 291 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 292 col = row + nz; 293 val = (PetscScalar*)(col + nz); 294 295 *r = row; *c = col; *v = val; 296 } else { 297 row = *r; col = *c; val = *v; 298 } 299 300 jj = 0; irow = rstart; 301 for (i=0; i<m; i++) { 302 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 303 countA = ai[i+1] - ai[i]; 304 countB = bi[i+1] - bi[i]; 305 bjj = bj + bi[i]; 306 v1 = av + ai[i]; 307 v2 = bv + bi[i]; 308 309 /* A-part */ 310 for (j=0; j<countA; j++) { 311 if (reuse == MAT_INITIAL_MATRIX) { 312 row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift; 313 } 314 val[jj++] = v1[j]; 315 } 316 317 /* B-part */ 318 for (j=0; j < countB; j++) { 319 if (reuse == MAT_INITIAL_MATRIX) { 320 row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift; 321 } 322 val[jj++] = v2[j]; 323 } 324 irow++; 325 } 326 PetscFunctionReturn(0); 327 } 328 329 #undef __FUNCT__ 330 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpiaij" 331 PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 332 { 333 const PetscInt *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj; 334 PetscErrorCode ierr; 335 PetscInt rstart,nz,i,j,jj,irow,countA,countB; 336 PetscInt *row,*col; 337 const PetscScalar *av, *bv,*v1,*v2; 338 PetscScalar *val; 339 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 340 Mat_SeqAIJ *aa = (Mat_SeqAIJ*)(mat->A)->data; 341 Mat_SeqAIJ *bb = (Mat_SeqAIJ*)(mat->B)->data; 342 343 PetscFunctionBegin; 344 ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart; 345 av=aa->a; bv=bb->a; 346 347 garray = mat->garray; 348 349 if (reuse == MAT_INITIAL_MATRIX) { 350 nz = aa->nz + bb->nz; 351 *nnz = nz; 352 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 353 col = row + nz; 354 val = (PetscScalar*)(col + nz); 355 356 *r = row; *c = col; *v = val; 357 } else { 358 row = *r; col = *c; val = *v; 359 } 360 361 jj = 0; irow = rstart; 362 for (i=0; i<m; i++) { 363 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 364 countA = ai[i+1] - ai[i]; 365 countB = bi[i+1] - bi[i]; 366 bjj = bj + bi[i]; 367 v1 = av + ai[i]; 368 v2 = bv + bi[i]; 369 370 /* A-part */ 371 for (j=0; j<countA; j++) { 372 if (reuse == MAT_INITIAL_MATRIX) { 373 row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift; 374 } 375 val[jj++] = v1[j]; 376 } 377 378 /* B-part */ 379 for (j=0; j < countB; j++) { 380 if (reuse == MAT_INITIAL_MATRIX) { 381 row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift; 382 } 383 val[jj++] = v2[j]; 384 } 385 irow++; 386 } 387 PetscFunctionReturn(0); 388 } 389 390 #undef __FUNCT__ 391 #define __FUNCT__ "MatConvertToTriples_mpibaij_mpiaij" 392 PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 393 { 394 Mat_MPIBAIJ *mat = (Mat_MPIBAIJ*)A->data; 395 Mat_SeqBAIJ *aa = (Mat_SeqBAIJ*)(mat->A)->data; 396 Mat_SeqBAIJ *bb = (Mat_SeqBAIJ*)(mat->B)->data; 397 const PetscInt *ai = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj; 398 const PetscInt *garray = mat->garray,mbs=mat->mbs,rstart=A->rmap->rstart; 399 const PetscInt bs2=mat->bs2; 400 PetscErrorCode ierr; 401 PetscInt bs,nz,i,j,k,n,jj,irow,countA,countB,idx; 402 PetscInt *row,*col; 403 const PetscScalar *av=aa->a, *bv=bb->a,*v1,*v2; 404 PetscScalar *val; 405 406 PetscFunctionBegin; 407 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 408 if (reuse == MAT_INITIAL_MATRIX) { 409 nz = bs2*(aa->nz + bb->nz); 410 *nnz = nz; 411 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 412 col = row + nz; 413 val = (PetscScalar*)(col + nz); 414 415 *r = row; *c = col; *v = val; 416 } else { 417 row = *r; col = *c; val = *v; 418 } 419 420 jj = 0; irow = rstart; 421 for (i=0; i<mbs; i++) { 422 countA = ai[i+1] - ai[i]; 423 countB = bi[i+1] - bi[i]; 424 ajj = aj + ai[i]; 425 bjj = bj + bi[i]; 426 v1 = av + bs2*ai[i]; 427 v2 = bv + bs2*bi[i]; 428 429 idx = 0; 430 /* A-part */ 431 for (k=0; k<countA; k++) { 432 for (j=0; j<bs; j++) { 433 for (n=0; n<bs; n++) { 434 if (reuse == MAT_INITIAL_MATRIX) { 435 row[jj] = irow + n + shift; 436 col[jj] = rstart + bs*ajj[k] + j + shift; 437 } 438 val[jj++] = v1[idx++]; 439 } 440 } 441 } 442 443 idx = 0; 444 /* B-part */ 445 for (k=0; k<countB; k++) { 446 for (j=0; j<bs; j++) { 447 for (n=0; n<bs; n++) { 448 if (reuse == MAT_INITIAL_MATRIX) { 449 row[jj] = irow + n + shift; 450 col[jj] = bs*garray[bjj[k]] + j + shift; 451 } 452 val[jj++] = v2[idx++]; 453 } 454 } 455 } 456 irow += bs; 457 } 458 PetscFunctionReturn(0); 459 } 460 461 #undef __FUNCT__ 462 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpisbaij" 463 PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 464 { 465 const PetscInt *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj; 466 PetscErrorCode ierr; 467 PetscInt rstart,nz,nza,nzb,i,j,jj,irow,countA,countB; 468 PetscInt *row,*col; 469 const PetscScalar *av, *bv,*v1,*v2; 470 PetscScalar *val; 471 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 472 Mat_SeqAIJ *aa =(Mat_SeqAIJ*)(mat->A)->data; 473 Mat_SeqAIJ *bb =(Mat_SeqAIJ*)(mat->B)->data; 474 475 PetscFunctionBegin; 476 ai=aa->i; aj=aa->j; adiag=aa->diag; 477 bi=bb->i; bj=bb->j; garray = mat->garray; 478 av=aa->a; bv=bb->a; 479 480 rstart = A->rmap->rstart; 481 482 if (reuse == MAT_INITIAL_MATRIX) { 483 nza = 0; /* num of upper triangular entries in mat->A, including diagonals */ 484 nzb = 0; /* num of upper triangular entries in mat->B */ 485 for (i=0; i<m; i++) { 486 nza += (ai[i+1] - adiag[i]); 487 countB = bi[i+1] - bi[i]; 488 bjj = bj + bi[i]; 489 for (j=0; j<countB; j++) { 490 if (garray[bjj[j]] > rstart) nzb++; 491 } 492 } 493 494 nz = nza + nzb; /* total nz of upper triangular part of mat */ 495 *nnz = nz; 496 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 497 col = row + nz; 498 val = (PetscScalar*)(col + nz); 499 500 *r = row; *c = col; *v = val; 501 } else { 502 row = *r; col = *c; val = *v; 503 } 504 505 jj = 0; irow = rstart; 506 for (i=0; i<m; i++) { 507 ajj = aj + adiag[i]; /* ptr to the beginning of the diagonal of this row */ 508 v1 = av + adiag[i]; 509 countA = ai[i+1] - adiag[i]; 510 countB = bi[i+1] - bi[i]; 511 bjj = bj + bi[i]; 512 v2 = bv + bi[i]; 513 514 /* A-part */ 515 for (j=0; j<countA; j++) { 516 if (reuse == MAT_INITIAL_MATRIX) { 517 row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift; 518 } 519 val[jj++] = v1[j]; 520 } 521 522 /* B-part */ 523 for (j=0; j < countB; j++) { 524 if (garray[bjj[j]] > rstart) { 525 if (reuse == MAT_INITIAL_MATRIX) { 526 row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift; 527 } 528 val[jj++] = v2[j]; 529 } 530 } 531 irow++; 532 } 533 PetscFunctionReturn(0); 534 } 535 536 #undef __FUNCT__ 537 #define __FUNCT__ "MatGetDiagonal_MUMPS" 538 PetscErrorCode MatGetDiagonal_MUMPS(Mat A,Vec v) 539 { 540 PetscFunctionBegin; 541 SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type: MUMPS factor"); 542 PetscFunctionReturn(0); 543 } 544 545 #undef __FUNCT__ 546 #define __FUNCT__ "MatDestroy_MUMPS" 547 PetscErrorCode MatDestroy_MUMPS(Mat A) 548 { 549 Mat_MUMPS *mumps=(Mat_MUMPS*)A->spptr; 550 PetscErrorCode ierr; 551 552 PetscFunctionBegin; 553 if (mumps->CleanUpMUMPS) { 554 /* Terminate instance, deallocate memories */ 555 ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr); 556 ierr = VecScatterDestroy(&mumps->scat_rhs);CHKERRQ(ierr); 557 ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr); 558 ierr = VecDestroy(&mumps->b_seq);CHKERRQ(ierr); 559 ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr); 560 ierr = PetscFree(mumps->id.perm_in);CHKERRQ(ierr); 561 ierr = PetscFree(mumps->irn);CHKERRQ(ierr); 562 ierr = PetscFree(mumps->info);CHKERRQ(ierr); 563 564 mumps->id.job = JOB_END; 565 PetscMUMPS_c(&mumps->id); 566 ierr = MPI_Comm_free(&(mumps->comm_mumps));CHKERRQ(ierr); 567 } 568 if (mumps->Destroy) { 569 ierr = (mumps->Destroy)(A);CHKERRQ(ierr); 570 } 571 ierr = PetscFree(A->spptr);CHKERRQ(ierr); 572 573 /* clear composed functions */ 574 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);CHKERRQ(ierr); 575 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetIcntl_C",NULL);CHKERRQ(ierr); 576 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetIcntl_C",NULL);CHKERRQ(ierr); 577 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetCntl_C",NULL);CHKERRQ(ierr); 578 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetCntl_C",NULL);CHKERRQ(ierr); 579 580 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfo_C",NULL);CHKERRQ(ierr); 581 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfog_C",NULL);CHKERRQ(ierr); 582 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfo_C",NULL);CHKERRQ(ierr); 583 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfog_C",NULL);CHKERRQ(ierr); 584 PetscFunctionReturn(0); 585 } 586 587 #undef __FUNCT__ 588 #define __FUNCT__ "MatSolve_MUMPS" 589 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x) 590 { 591 Mat_MUMPS *mumps=(Mat_MUMPS*)A->spptr; 592 PetscScalar *array; 593 Vec b_seq; 594 IS is_iden,is_petsc; 595 PetscErrorCode ierr; 596 PetscInt i; 597 static PetscBool cite1 = PETSC_FALSE,cite2 = PETSC_FALSE; 598 599 PetscFunctionBegin; 600 ierr = PetscCitationsRegister("@article{MUMPS01,\n author = {P.~R. Amestoy and I.~S. Duff and J.-Y. L'Excellent and J. Koster},\n title = {A fully asynchronous multifrontal solver using distributed dynamic scheduling},\n journal = {SIAM Journal on Matrix Analysis and Applications},\n volume = {23},\n number = {1},\n pages = {15--41},\n year = {2001}\n}\n",&cite1);CHKERRQ(ierr); 601 ierr = PetscCitationsRegister("@article{MUMPS02,\n author = {P.~R. Amestoy and A. Guermouche and J.-Y. L'Excellent and S. Pralet},\n title = {Hybrid scheduling for the parallel solution of linear systems},\n journal = {Parallel Computing},\n volume = {32},\n number = {2},\n pages = {136--156},\n year = {2006}\n}\n",&cite2);CHKERRQ(ierr); 602 mumps->id.nrhs = 1; 603 b_seq = mumps->b_seq; 604 if (mumps->size > 1) { 605 /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */ 606 ierr = VecScatterBegin(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 607 ierr = VecScatterEnd(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 608 if (!mumps->myid) {ierr = VecGetArray(b_seq,&array);CHKERRQ(ierr);} 609 } else { /* size == 1 */ 610 ierr = VecCopy(b,x);CHKERRQ(ierr); 611 ierr = VecGetArray(x,&array);CHKERRQ(ierr); 612 } 613 if (!mumps->myid) { /* define rhs on the host */ 614 mumps->id.nrhs = 1; 615 mumps->id.rhs = (MumpsScalar*)array; 616 } 617 618 /* solve phase */ 619 /*-------------*/ 620 mumps->id.job = JOB_SOLVE; 621 PetscMUMPS_c(&mumps->id); 622 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 623 624 if (mumps->size > 1) { /* convert mumps distributed solution to petsc mpi x */ 625 if (mumps->scat_sol && mumps->ICNTL9_pre != mumps->id.ICNTL(9)) { 626 /* when id.ICNTL(9) changes, the contents of lsol_loc may change (not its size, lsol_loc), recreates scat_sol */ 627 ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr); 628 } 629 if (!mumps->scat_sol) { /* create scatter scat_sol */ 630 ierr = ISCreateStride(PETSC_COMM_SELF,mumps->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */ 631 for (i=0; i<mumps->id.lsol_loc; i++) { 632 mumps->id.isol_loc[i] -= 1; /* change Fortran style to C style */ 633 } 634 ierr = ISCreateGeneral(PETSC_COMM_SELF,mumps->id.lsol_loc,mumps->id.isol_loc,PETSC_COPY_VALUES,&is_petsc);CHKERRQ(ierr); /* to */ 635 ierr = VecScatterCreate(mumps->x_seq,is_iden,x,is_petsc,&mumps->scat_sol);CHKERRQ(ierr); 636 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 637 ierr = ISDestroy(&is_petsc);CHKERRQ(ierr); 638 639 mumps->ICNTL9_pre = mumps->id.ICNTL(9); /* save current value of id.ICNTL(9) */ 640 } 641 642 ierr = VecScatterBegin(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 643 ierr = VecScatterEnd(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 644 } 645 PetscFunctionReturn(0); 646 } 647 648 #undef __FUNCT__ 649 #define __FUNCT__ "MatSolveTranspose_MUMPS" 650 PetscErrorCode MatSolveTranspose_MUMPS(Mat A,Vec b,Vec x) 651 { 652 Mat_MUMPS *mumps=(Mat_MUMPS*)A->spptr; 653 PetscErrorCode ierr; 654 655 PetscFunctionBegin; 656 mumps->id.ICNTL(9) = 0; 657 ierr = MatSolve_MUMPS(A,b,x);CHKERRQ(ierr); 658 mumps->id.ICNTL(9) = 1; 659 PetscFunctionReturn(0); 660 } 661 662 #undef __FUNCT__ 663 #define __FUNCT__ "MatMatSolve_MUMPS" 664 PetscErrorCode MatMatSolve_MUMPS(Mat A,Mat B,Mat X) 665 { 666 PetscErrorCode ierr; 667 PetscBool flg; 668 Mat_MUMPS *mumps=(Mat_MUMPS*)A->spptr; 669 PetscInt i,nrhs,M; 670 PetscScalar *array,*bray; 671 672 PetscFunctionBegin; 673 ierr = PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 674 if (!flg) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix"); 675 ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 676 if (!flg) SETERRQ(PetscObjectComm((PetscObject)X),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix"); 677 if (B->rmap->n != X->rmap->n) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B and X must have same row distribution"); 678 679 ierr = MatGetSize(B,&M,&nrhs);CHKERRQ(ierr); 680 mumps->id.nrhs = nrhs; 681 mumps->id.lrhs = M; 682 683 if (mumps->size == 1) { 684 /* copy B to X */ 685 ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr); 686 ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr); 687 for (i=0; i<M*nrhs; i++) array[i] = bray[i]; 688 ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr); 689 mumps->id.rhs = (MumpsScalar*)array; 690 691 /* solve phase */ 692 /*-------------*/ 693 mumps->id.job = JOB_SOLVE; 694 PetscMUMPS_c(&mumps->id); 695 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 696 ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr); 697 } else { /*--------- parallel case --------*/ 698 PetscInt lsol_loc,nlsol_loc,*isol_loc,*idx,*iidx,*idxx,*isol_loc_save; 699 MumpsScalar *sol_loc,*sol_loc_save; 700 IS is_to,is_from; 701 PetscInt k,proc,j,m; 702 const PetscInt *rstart; 703 Vec v_mpi,b_seq,x_seq; 704 VecScatter scat_rhs,scat_sol; 705 706 /* create x_seq to hold local solution */ 707 isol_loc_save = mumps->id.isol_loc; /* save it for MatSovle() */ 708 sol_loc_save = mumps->id.sol_loc; 709 710 lsol_loc = mumps->id.INFO(23); 711 nlsol_loc = nrhs*lsol_loc; /* length of sol_loc */ 712 ierr = PetscMalloc2(nlsol_loc,&sol_loc,nlsol_loc,&isol_loc);CHKERRQ(ierr); 713 mumps->id.sol_loc = (MumpsScalar*)sol_loc; 714 mumps->id.isol_loc = isol_loc; 715 716 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nlsol_loc,(PetscScalar*)sol_loc,&x_seq);CHKERRQ(ierr); 717 718 /* copy rhs matrix B into vector v_mpi */ 719 ierr = MatGetLocalSize(B,&m,NULL);CHKERRQ(ierr); 720 ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr); 721 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)B),1,nrhs*m,nrhs*M,(const PetscScalar*)bray,&v_mpi);CHKERRQ(ierr); 722 ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr); 723 724 /* scatter v_mpi to b_seq because MUMPS only supports centralized rhs */ 725 /* idx: maps from k-th index of v_mpi to (i,j)-th global entry of B; 726 iidx: inverse of idx, will be used by scattering xx_seq -> X */ 727 ierr = PetscMalloc2(nrhs*M,&idx,nrhs*M,&iidx);CHKERRQ(ierr); 728 ierr = MatGetOwnershipRanges(B,&rstart);CHKERRQ(ierr); 729 k = 0; 730 for (proc=0; proc<mumps->size; proc++){ 731 for (j=0; j<nrhs; j++){ 732 for (i=rstart[proc]; i<rstart[proc+1]; i++){ 733 iidx[j*M + i] = k; 734 idx[k++] = j*M + i; 735 } 736 } 737 } 738 739 if (!mumps->myid) { 740 ierr = VecCreateSeq(PETSC_COMM_SELF,nrhs*M,&b_seq);CHKERRQ(ierr); 741 ierr = ISCreateGeneral(PETSC_COMM_SELF,nrhs*M,idx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr); 742 ierr = ISCreateStride(PETSC_COMM_SELF,nrhs*M,0,1,&is_from);CHKERRQ(ierr); 743 } else { 744 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&b_seq);CHKERRQ(ierr); 745 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_to);CHKERRQ(ierr); 746 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_from);CHKERRQ(ierr); 747 } 748 ierr = VecScatterCreate(v_mpi,is_from,b_seq,is_to,&scat_rhs);CHKERRQ(ierr); 749 ierr = VecScatterBegin(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 750 ierr = ISDestroy(&is_to);CHKERRQ(ierr); 751 ierr = ISDestroy(&is_from);CHKERRQ(ierr); 752 ierr = VecScatterEnd(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 753 754 if (!mumps->myid) { /* define rhs on the host */ 755 ierr = VecGetArray(b_seq,&bray);CHKERRQ(ierr); 756 mumps->id.rhs = (MumpsScalar*)bray; 757 ierr = VecRestoreArray(b_seq,&bray);CHKERRQ(ierr); 758 } 759 760 /* solve phase */ 761 /*-------------*/ 762 mumps->id.job = JOB_SOLVE; 763 PetscMUMPS_c(&mumps->id); 764 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 765 766 /* scatter mumps distributed solution to petsc vector v_mpi, which shares local arrays with solution matrix X */ 767 ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr); 768 ierr = VecPlaceArray(v_mpi,array);CHKERRQ(ierr); 769 770 /* create scatter scat_sol */ 771 ierr = PetscMalloc1(nlsol_loc,&idxx);CHKERRQ(ierr); 772 ierr = ISCreateStride(PETSC_COMM_SELF,nlsol_loc,0,1,&is_from);CHKERRQ(ierr); 773 for (i=0; i<lsol_loc; i++) { 774 isol_loc[i] -= 1; /* change Fortran style to C style */ 775 idxx[i] = iidx[isol_loc[i]]; 776 for (j=1; j<nrhs; j++){ 777 idxx[j*lsol_loc+i] = iidx[isol_loc[i]+j*M]; 778 } 779 } 780 ierr = ISCreateGeneral(PETSC_COMM_SELF,nlsol_loc,idxx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr); 781 ierr = VecScatterCreate(x_seq,is_from,v_mpi,is_to,&scat_sol);CHKERRQ(ierr); 782 ierr = VecScatterBegin(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 783 ierr = ISDestroy(&is_from);CHKERRQ(ierr); 784 ierr = ISDestroy(&is_to);CHKERRQ(ierr); 785 ierr = VecScatterEnd(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 786 ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr); 787 788 /* free spaces */ 789 mumps->id.sol_loc = sol_loc_save; 790 mumps->id.isol_loc = isol_loc_save; 791 792 ierr = PetscFree2(sol_loc,isol_loc);CHKERRQ(ierr); 793 ierr = PetscFree2(idx,iidx);CHKERRQ(ierr); 794 ierr = PetscFree(idxx);CHKERRQ(ierr); 795 ierr = VecDestroy(&x_seq);CHKERRQ(ierr); 796 ierr = VecDestroy(&v_mpi);CHKERRQ(ierr); 797 ierr = VecDestroy(&b_seq);CHKERRQ(ierr); 798 ierr = VecScatterDestroy(&scat_rhs);CHKERRQ(ierr); 799 ierr = VecScatterDestroy(&scat_sol);CHKERRQ(ierr); 800 } 801 PetscFunctionReturn(0); 802 } 803 804 #if !defined(PETSC_USE_COMPLEX) 805 /* 806 input: 807 F: numeric factor 808 output: 809 nneg: total number of negative pivots 810 nzero: 0 811 npos: (global dimension of F) - nneg 812 */ 813 814 #undef __FUNCT__ 815 #define __FUNCT__ "MatGetInertia_SBAIJMUMPS" 816 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos) 817 { 818 Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr; 819 PetscErrorCode ierr; 820 PetscMPIInt size; 821 822 PetscFunctionBegin; 823 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)F),&size);CHKERRQ(ierr); 824 /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */ 825 if (size > 1 && mumps->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",mumps->id.INFOG(13)); 826 827 if (nneg) *nneg = mumps->id.INFOG(12); 828 if (nzero || npos) { 829 if (mumps->id.ICNTL(24) != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"-mat_mumps_icntl_24 must be set as 1 for null pivot row detection"); 830 if (nzero) *nzero = mumps->id.INFOG(28); 831 if (npos) *npos = F->rmap->N - (mumps->id.INFOG(12) + mumps->id.INFOG(28)); 832 } 833 PetscFunctionReturn(0); 834 } 835 #endif /* !defined(PETSC_USE_COMPLEX) */ 836 837 #undef __FUNCT__ 838 #define __FUNCT__ "MatFactorNumeric_MUMPS" 839 PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info) 840 { 841 Mat_MUMPS *mumps =(Mat_MUMPS*)(F)->spptr; 842 PetscErrorCode ierr; 843 Mat F_diag; 844 PetscBool isMPIAIJ; 845 846 PetscFunctionBegin; 847 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_REUSE_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 848 849 /* numerical factorization phase */ 850 /*-------------------------------*/ 851 mumps->id.job = JOB_FACTNUMERIC; 852 if (!mumps->id.ICNTL(18)) { /* A is centralized */ 853 if (!mumps->myid) { 854 mumps->id.a = (MumpsScalar*)mumps->val; 855 } 856 } else { 857 mumps->id.a_loc = (MumpsScalar*)mumps->val; 858 } 859 PetscMUMPS_c(&mumps->id); 860 if (mumps->id.INFOG(1) < 0) { 861 if (mumps->id.INFO(1) == -13) { 862 if (mumps->id.INFO(2) < 0) { 863 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d megabytes\n",-mumps->id.INFO(2)); 864 } else { 865 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d bytes\n",mumps->id.INFO(2)); 866 } 867 } else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFO(1)=%d, INFO(2)=%d\n",mumps->id.INFO(1),mumps->id.INFO(2)); 868 } 869 if (!mumps->myid && mumps->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB," mumps->id.ICNTL(16):=%d\n",mumps->id.INFOG(16)); 870 871 (F)->assembled = PETSC_TRUE; 872 mumps->matstruc = SAME_NONZERO_PATTERN; 873 mumps->CleanUpMUMPS = PETSC_TRUE; 874 875 if (mumps->size > 1) { 876 PetscInt lsol_loc; 877 PetscScalar *sol_loc; 878 879 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr); 880 if (isMPIAIJ) F_diag = ((Mat_MPIAIJ*)(F)->data)->A; 881 else F_diag = ((Mat_MPISBAIJ*)(F)->data)->A; 882 F_diag->assembled = PETSC_TRUE; 883 884 /* distributed solution; Create x_seq=sol_loc for repeated use */ 885 if (mumps->x_seq) { 886 ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr); 887 ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr); 888 ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr); 889 } 890 lsol_loc = mumps->id.INFO(23); /* length of sol_loc */ 891 ierr = PetscMalloc2(lsol_loc,&sol_loc,lsol_loc,&mumps->id.isol_loc);CHKERRQ(ierr); 892 mumps->id.lsol_loc = lsol_loc; 893 mumps->id.sol_loc = (MumpsScalar*)sol_loc; 894 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,lsol_loc,sol_loc,&mumps->x_seq);CHKERRQ(ierr); 895 } 896 PetscFunctionReturn(0); 897 } 898 899 /* Sets MUMPS options from the options database */ 900 #undef __FUNCT__ 901 #define __FUNCT__ "PetscSetMUMPSFromOptions" 902 PetscErrorCode PetscSetMUMPSFromOptions(Mat F, Mat A) 903 { 904 Mat_MUMPS *mumps = (Mat_MUMPS*)F->spptr; 905 PetscErrorCode ierr; 906 PetscInt icntl,info[40],i,ninfo=40; 907 PetscBool flg; 908 909 PetscFunctionBegin; 910 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr); 911 ierr = PetscOptionsInt("-mat_mumps_icntl_1","ICNTL(1): output stream for error messages","None",mumps->id.ICNTL(1),&icntl,&flg);CHKERRQ(ierr); 912 if (flg) mumps->id.ICNTL(1) = icntl; 913 ierr = PetscOptionsInt("-mat_mumps_icntl_2","ICNTL(2): output stream for diagnostic printing, statistics, and warning","None",mumps->id.ICNTL(2),&icntl,&flg);CHKERRQ(ierr); 914 if (flg) mumps->id.ICNTL(2) = icntl; 915 ierr = PetscOptionsInt("-mat_mumps_icntl_3","ICNTL(3): output stream for global information, collected on the host","None",mumps->id.ICNTL(3),&icntl,&flg);CHKERRQ(ierr); 916 if (flg) mumps->id.ICNTL(3) = icntl; 917 918 ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",mumps->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr); 919 if (flg) mumps->id.ICNTL(4) = icntl; 920 if (mumps->id.ICNTL(4) || PetscLogPrintInfo) mumps->id.ICNTL(3) = 6; /* resume MUMPS default id.ICNTL(3) = 6 */ 921 922 ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7)","None",mumps->id.ICNTL(6),&icntl,&flg);CHKERRQ(ierr); 923 if (flg) mumps->id.ICNTL(6) = icntl; 924 925 ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis","None",mumps->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr); 926 if (flg) { 927 if (icntl== 1 && mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n"); 928 else mumps->id.ICNTL(7) = icntl; 929 } 930 931 ierr = PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 8 or 77)","None",mumps->id.ICNTL(8),&mumps->id.ICNTL(8),NULL);CHKERRQ(ierr); 932 /* ierr = PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): computes the solution using A or A^T","None",mumps->id.ICNTL(9),&mumps->id.ICNTL(9),NULL);CHKERRQ(ierr); handled by MatSolveTranspose_MUMPS() */ 933 ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",mumps->id.ICNTL(10),&mumps->id.ICNTL(10),NULL);CHKERRQ(ierr); 934 ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to an error analysis (via -ksp_view)","None",mumps->id.ICNTL(11),&mumps->id.ICNTL(11),NULL);CHKERRQ(ierr); 935 ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): an ordering strategy for symmetric matrices (0 to 3)","None",mumps->id.ICNTL(12),&mumps->id.ICNTL(12),NULL);CHKERRQ(ierr); 936 ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting","None",mumps->id.ICNTL(13),&mumps->id.ICNTL(13),NULL);CHKERRQ(ierr); 937 ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage increase in the estimated working space","None",mumps->id.ICNTL(14),&mumps->id.ICNTL(14),NULL);CHKERRQ(ierr); 938 ierr = PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): computes the Schur complement","None",mumps->id.ICNTL(19),&mumps->id.ICNTL(19),NULL);CHKERRQ(ierr); 939 /* ierr = PetscOptionsInt("-mat_mumps_icntl_20","ICNTL(20): the format (dense or sparse) of the right-hand sides","None",mumps->id.ICNTL(20),&mumps->id.ICNTL(20),NULL);CHKERRQ(ierr); -- sparse rhs is not supported in PETSc API */ 940 /* ierr = PetscOptionsInt("-mat_mumps_icntl_21","ICNTL(21): the distribution (centralized or distributed) of the solution vectors","None",mumps->id.ICNTL(21),&mumps->id.ICNTL(21),NULL);CHKERRQ(ierr); we only use distributed solution vector */ 941 942 ierr = PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core factorization and solve (0 or 1)","None",mumps->id.ICNTL(22),&mumps->id.ICNTL(22),NULL);CHKERRQ(ierr); 943 ierr = PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",mumps->id.ICNTL(23),&mumps->id.ICNTL(23),NULL);CHKERRQ(ierr); 944 ierr = PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",mumps->id.ICNTL(24),&mumps->id.ICNTL(24),NULL);CHKERRQ(ierr); 945 if (mumps->id.ICNTL(24)) { 946 mumps->id.ICNTL(13) = 1; /* turn-off ScaLAPACK to help with the correct detection of null pivots */ 947 } 948 949 ierr = PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): compute a solution of a deficient matrix and a null space basis","None",mumps->id.ICNTL(25),&mumps->id.ICNTL(25),NULL);CHKERRQ(ierr); 950 ierr = PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): drives the solution phase if a Schur complement matrix","None",mumps->id.ICNTL(26),&mumps->id.ICNTL(26),NULL);CHKERRQ(ierr); 951 ierr = PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): the blocking size for multiple right-hand sides","None",mumps->id.ICNTL(27),&mumps->id.ICNTL(27),NULL);CHKERRQ(ierr); 952 ierr = PetscOptionsInt("-mat_mumps_icntl_28","ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering","None",mumps->id.ICNTL(28),&mumps->id.ICNTL(28),NULL);CHKERRQ(ierr); 953 ierr = PetscOptionsInt("-mat_mumps_icntl_29","ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis","None",mumps->id.ICNTL(29),&mumps->id.ICNTL(29),NULL);CHKERRQ(ierr); 954 ierr = PetscOptionsInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL);CHKERRQ(ierr); 955 ierr = PetscOptionsInt("-mat_mumps_icntl_31","ICNTL(31): indicates which factors may be discarded during factorization","None",mumps->id.ICNTL(31),&mumps->id.ICNTL(31),NULL);CHKERRQ(ierr); 956 /* ierr = PetscOptionsInt("-mat_mumps_icntl_32","ICNTL(32): performs the forward elemination of the right-hand sides during factorization","None",mumps->id.ICNTL(32),&mumps->id.ICNTL(32),NULL);CHKERRQ(ierr); -- not supported by PETSc API */ 957 ierr = PetscOptionsInt("-mat_mumps_icntl_33","ICNTL(33): compute determinant","None",mumps->id.ICNTL(33),&mumps->id.ICNTL(33),NULL);CHKERRQ(ierr); 958 959 ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",mumps->id.CNTL(1),&mumps->id.CNTL(1),NULL);CHKERRQ(ierr); 960 ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",mumps->id.CNTL(2),&mumps->id.CNTL(2),NULL);CHKERRQ(ierr); 961 ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",mumps->id.CNTL(3),&mumps->id.CNTL(3),NULL);CHKERRQ(ierr); 962 ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",mumps->id.CNTL(4),&mumps->id.CNTL(4),NULL);CHKERRQ(ierr); 963 ierr = PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",mumps->id.CNTL(5),&mumps->id.CNTL(5),NULL);CHKERRQ(ierr); 964 965 ierr = PetscOptionsString("-mat_mumps_ooc_tmpdir", "out of core directory", "None", mumps->id.ooc_tmpdir, mumps->id.ooc_tmpdir, 256, NULL); 966 967 ierr = PetscOptionsIntArray("-mat_mumps_view_info","request INFO local to each processor","",info,&ninfo,NULL);CHKERRQ(ierr); 968 if (ninfo) { 969 if (ninfo > 40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"number of INFO %d must <= 40\n",ninfo); 970 ierr = PetscMalloc1(ninfo,&mumps->info);CHKERRQ(ierr); 971 mumps->ninfo = ninfo; 972 for (i=0; i<ninfo; i++) { 973 if (info[i] < 0 || info[i]>40) { 974 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"index of INFO %d must between 1 and 40\n",ninfo); 975 } else { 976 mumps->info[i] = info[i]; 977 } 978 } 979 } 980 981 PetscOptionsEnd(); 982 PetscFunctionReturn(0); 983 } 984 985 #undef __FUNCT__ 986 #define __FUNCT__ "PetscInitializeMUMPS" 987 PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS *mumps) 988 { 989 PetscErrorCode ierr; 990 991 PetscFunctionBegin; 992 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A), &mumps->myid); 993 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&mumps->size);CHKERRQ(ierr); 994 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(mumps->comm_mumps));CHKERRQ(ierr); 995 996 mumps->id.comm_fortran = MPI_Comm_c2f(mumps->comm_mumps); 997 998 mumps->id.job = JOB_INIT; 999 mumps->id.par = 1; /* host participates factorizaton and solve */ 1000 mumps->id.sym = mumps->sym; 1001 PetscMUMPS_c(&mumps->id); 1002 1003 mumps->CleanUpMUMPS = PETSC_FALSE; 1004 mumps->scat_rhs = NULL; 1005 mumps->scat_sol = NULL; 1006 1007 /* set PETSc-MUMPS default options - override MUMPS default */ 1008 mumps->id.ICNTL(3) = 0; 1009 mumps->id.ICNTL(4) = 0; 1010 if (mumps->size == 1) { 1011 mumps->id.ICNTL(18) = 0; /* centralized assembled matrix input */ 1012 } else { 1013 mumps->id.ICNTL(18) = 3; /* distributed assembled matrix input */ 1014 mumps->id.ICNTL(20) = 0; /* rhs is in dense format */ 1015 mumps->id.ICNTL(21) = 1; /* distributed solution */ 1016 } 1017 PetscFunctionReturn(0); 1018 } 1019 1020 /* Note Petsc r(=c) permutation is used when mumps->id.ICNTL(7)==1 with centralized assembled matrix input; otherwise r and c are ignored */ 1021 #undef __FUNCT__ 1022 #define __FUNCT__ "MatLUFactorSymbolic_AIJMUMPS" 1023 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 1024 { 1025 Mat_MUMPS *mumps = (Mat_MUMPS*)F->spptr; 1026 PetscErrorCode ierr; 1027 Vec b; 1028 IS is_iden; 1029 const PetscInt M = A->rmap->N; 1030 1031 PetscFunctionBegin; 1032 mumps->matstruc = DIFFERENT_NONZERO_PATTERN; 1033 1034 /* Set MUMPS options from the options database */ 1035 ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr); 1036 1037 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1038 1039 /* analysis phase */ 1040 /*----------------*/ 1041 mumps->id.job = JOB_FACTSYMBOLIC; 1042 mumps->id.n = M; 1043 switch (mumps->id.ICNTL(18)) { 1044 case 0: /* centralized assembled matrix input */ 1045 if (!mumps->myid) { 1046 mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn; 1047 if (mumps->id.ICNTL(6)>1) { 1048 mumps->id.a = (MumpsScalar*)mumps->val; 1049 } 1050 if (mumps->id.ICNTL(7) == 1) { /* use user-provide matrix ordering - assuming r = c ordering */ 1051 /* 1052 PetscBool flag; 1053 ierr = ISEqual(r,c,&flag);CHKERRQ(ierr); 1054 if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"row_perm != col_perm"); 1055 ierr = ISView(r,PETSC_VIEWER_STDOUT_SELF); 1056 */ 1057 if (!mumps->myid) { 1058 const PetscInt *idx; 1059 PetscInt i,*perm_in; 1060 1061 ierr = PetscMalloc1(M,&perm_in);CHKERRQ(ierr); 1062 ierr = ISGetIndices(r,&idx);CHKERRQ(ierr); 1063 1064 mumps->id.perm_in = perm_in; 1065 for (i=0; i<M; i++) perm_in[i] = idx[i]+1; /* perm_in[]: start from 1, not 0! */ 1066 ierr = ISRestoreIndices(r,&idx);CHKERRQ(ierr); 1067 } 1068 } 1069 } 1070 break; 1071 case 3: /* distributed assembled matrix input (size>1) */ 1072 mumps->id.nz_loc = mumps->nz; 1073 mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn; 1074 if (mumps->id.ICNTL(6)>1) { 1075 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1076 } 1077 /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */ 1078 if (!mumps->myid) { 1079 ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->N,&mumps->b_seq);CHKERRQ(ierr); 1080 ierr = ISCreateStride(PETSC_COMM_SELF,A->rmap->N,0,1,&is_iden);CHKERRQ(ierr); 1081 } else { 1082 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr); 1083 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr); 1084 } 1085 ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr); 1086 ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr); 1087 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 1088 ierr = VecDestroy(&b);CHKERRQ(ierr); 1089 break; 1090 } 1091 PetscMUMPS_c(&mumps->id); 1092 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 1093 1094 F->ops->lufactornumeric = MatFactorNumeric_MUMPS; 1095 F->ops->solve = MatSolve_MUMPS; 1096 F->ops->solvetranspose = MatSolveTranspose_MUMPS; 1097 F->ops->matsolve = MatMatSolve_MUMPS; 1098 PetscFunctionReturn(0); 1099 } 1100 1101 /* Note the Petsc r and c permutations are ignored */ 1102 #undef __FUNCT__ 1103 #define __FUNCT__ "MatLUFactorSymbolic_BAIJMUMPS" 1104 PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 1105 { 1106 Mat_MUMPS *mumps = (Mat_MUMPS*)F->spptr; 1107 PetscErrorCode ierr; 1108 Vec b; 1109 IS is_iden; 1110 const PetscInt M = A->rmap->N; 1111 1112 PetscFunctionBegin; 1113 mumps->matstruc = DIFFERENT_NONZERO_PATTERN; 1114 1115 /* Set MUMPS options from the options database */ 1116 ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr); 1117 1118 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1119 1120 /* analysis phase */ 1121 /*----------------*/ 1122 mumps->id.job = JOB_FACTSYMBOLIC; 1123 mumps->id.n = M; 1124 switch (mumps->id.ICNTL(18)) { 1125 case 0: /* centralized assembled matrix input */ 1126 if (!mumps->myid) { 1127 mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn; 1128 if (mumps->id.ICNTL(6)>1) { 1129 mumps->id.a = (MumpsScalar*)mumps->val; 1130 } 1131 } 1132 break; 1133 case 3: /* distributed assembled matrix input (size>1) */ 1134 mumps->id.nz_loc = mumps->nz; 1135 mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn; 1136 if (mumps->id.ICNTL(6)>1) { 1137 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1138 } 1139 /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */ 1140 if (!mumps->myid) { 1141 ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr); 1142 ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr); 1143 } else { 1144 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr); 1145 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr); 1146 } 1147 ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr); 1148 ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr); 1149 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 1150 ierr = VecDestroy(&b);CHKERRQ(ierr); 1151 break; 1152 } 1153 PetscMUMPS_c(&mumps->id); 1154 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 1155 1156 F->ops->lufactornumeric = MatFactorNumeric_MUMPS; 1157 F->ops->solve = MatSolve_MUMPS; 1158 F->ops->solvetranspose = MatSolveTranspose_MUMPS; 1159 PetscFunctionReturn(0); 1160 } 1161 1162 /* Note the Petsc r permutation and factor info are ignored */ 1163 #undef __FUNCT__ 1164 #define __FUNCT__ "MatCholeskyFactorSymbolic_MUMPS" 1165 PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info) 1166 { 1167 Mat_MUMPS *mumps = (Mat_MUMPS*)F->spptr; 1168 PetscErrorCode ierr; 1169 Vec b; 1170 IS is_iden; 1171 const PetscInt M = A->rmap->N; 1172 1173 PetscFunctionBegin; 1174 mumps->matstruc = DIFFERENT_NONZERO_PATTERN; 1175 1176 /* Set MUMPS options from the options database */ 1177 ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr); 1178 1179 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1180 1181 /* analysis phase */ 1182 /*----------------*/ 1183 mumps->id.job = JOB_FACTSYMBOLIC; 1184 mumps->id.n = M; 1185 switch (mumps->id.ICNTL(18)) { 1186 case 0: /* centralized assembled matrix input */ 1187 if (!mumps->myid) { 1188 mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn; 1189 if (mumps->id.ICNTL(6)>1) { 1190 mumps->id.a = (MumpsScalar*)mumps->val; 1191 } 1192 } 1193 break; 1194 case 3: /* distributed assembled matrix input (size>1) */ 1195 mumps->id.nz_loc = mumps->nz; 1196 mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn; 1197 if (mumps->id.ICNTL(6)>1) { 1198 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1199 } 1200 /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */ 1201 if (!mumps->myid) { 1202 ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr); 1203 ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr); 1204 } else { 1205 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr); 1206 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr); 1207 } 1208 ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr); 1209 ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr); 1210 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 1211 ierr = VecDestroy(&b);CHKERRQ(ierr); 1212 break; 1213 } 1214 PetscMUMPS_c(&mumps->id); 1215 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 1216 1217 F->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS; 1218 F->ops->solve = MatSolve_MUMPS; 1219 F->ops->solvetranspose = MatSolve_MUMPS; 1220 F->ops->matsolve = MatMatSolve_MUMPS; 1221 #if defined(PETSC_USE_COMPLEX) 1222 F->ops->getinertia = NULL; 1223 #else 1224 F->ops->getinertia = MatGetInertia_SBAIJMUMPS; 1225 #endif 1226 PetscFunctionReturn(0); 1227 } 1228 1229 #undef __FUNCT__ 1230 #define __FUNCT__ "MatView_MUMPS" 1231 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer) 1232 { 1233 PetscErrorCode ierr; 1234 PetscBool iascii; 1235 PetscViewerFormat format; 1236 Mat_MUMPS *mumps=(Mat_MUMPS*)A->spptr; 1237 1238 PetscFunctionBegin; 1239 /* check if matrix is mumps type */ 1240 if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0); 1241 1242 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1243 if (iascii) { 1244 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1245 if (format == PETSC_VIEWER_ASCII_INFO) { 1246 ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr); 1247 ierr = PetscViewerASCIIPrintf(viewer," SYM (matrix type): %d \n",mumps->id.sym);CHKERRQ(ierr); 1248 ierr = PetscViewerASCIIPrintf(viewer," PAR (host participation): %d \n",mumps->id.par);CHKERRQ(ierr); 1249 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(1) (output for error): %d \n",mumps->id.ICNTL(1));CHKERRQ(ierr); 1250 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(2) (output of diagnostic msg): %d \n",mumps->id.ICNTL(2));CHKERRQ(ierr); 1251 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(3) (output for global info): %d \n",mumps->id.ICNTL(3));CHKERRQ(ierr); 1252 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(4) (level of printing): %d \n",mumps->id.ICNTL(4));CHKERRQ(ierr); 1253 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(5) (input mat struct): %d \n",mumps->id.ICNTL(5));CHKERRQ(ierr); 1254 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(6) (matrix prescaling): %d \n",mumps->id.ICNTL(6));CHKERRQ(ierr); 1255 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(7) (sequentia matrix ordering):%d \n",mumps->id.ICNTL(7));CHKERRQ(ierr); 1256 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(8) (scalling strategy): %d \n",mumps->id.ICNTL(8));CHKERRQ(ierr); 1257 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(10) (max num of refinements): %d \n",mumps->id.ICNTL(10));CHKERRQ(ierr); 1258 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(11) (error analysis): %d \n",mumps->id.ICNTL(11));CHKERRQ(ierr); 1259 if (mumps->id.ICNTL(11)>0) { 1260 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(4) (inf norm of input mat): %g\n",mumps->id.RINFOG(4));CHKERRQ(ierr); 1261 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(5) (inf norm of solution): %g\n",mumps->id.RINFOG(5));CHKERRQ(ierr); 1262 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(6) (inf norm of residual): %g\n",mumps->id.RINFOG(6));CHKERRQ(ierr); 1263 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",mumps->id.RINFOG(7),mumps->id.RINFOG(8));CHKERRQ(ierr); 1264 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(9) (error estimate): %g \n",mumps->id.RINFOG(9));CHKERRQ(ierr); 1265 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",mumps->id.RINFOG(10),mumps->id.RINFOG(11));CHKERRQ(ierr); 1266 } 1267 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(12) (efficiency control): %d \n",mumps->id.ICNTL(12));CHKERRQ(ierr); 1268 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(13) (efficiency control): %d \n",mumps->id.ICNTL(13));CHKERRQ(ierr); 1269 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(14) (percentage of estimated workspace increase): %d \n",mumps->id.ICNTL(14));CHKERRQ(ierr); 1270 /* ICNTL(15-17) not used */ 1271 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(18) (input mat struct): %d \n",mumps->id.ICNTL(18));CHKERRQ(ierr); 1272 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(19) (Shur complement info): %d \n",mumps->id.ICNTL(19));CHKERRQ(ierr); 1273 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(20) (rhs sparse pattern): %d \n",mumps->id.ICNTL(20));CHKERRQ(ierr); 1274 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(21) (solution struct): %d \n",mumps->id.ICNTL(21));CHKERRQ(ierr); 1275 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(22) (in-core/out-of-core facility): %d \n",mumps->id.ICNTL(22));CHKERRQ(ierr); 1276 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(23) (max size of memory can be allocated locally):%d \n",mumps->id.ICNTL(23));CHKERRQ(ierr); 1277 1278 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(24) (detection of null pivot rows): %d \n",mumps->id.ICNTL(24));CHKERRQ(ierr); 1279 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(25) (computation of a null space basis): %d \n",mumps->id.ICNTL(25));CHKERRQ(ierr); 1280 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(26) (Schur options for rhs or solution): %d \n",mumps->id.ICNTL(26));CHKERRQ(ierr); 1281 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(27) (experimental parameter): %d \n",mumps->id.ICNTL(27));CHKERRQ(ierr); 1282 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(28) (use parallel or sequential ordering): %d \n",mumps->id.ICNTL(28));CHKERRQ(ierr); 1283 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(29) (parallel ordering): %d \n",mumps->id.ICNTL(29));CHKERRQ(ierr); 1284 1285 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(30) (user-specified set of entries in inv(A)): %d \n",mumps->id.ICNTL(30));CHKERRQ(ierr); 1286 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(31) (factors is discarded in the solve phase): %d \n",mumps->id.ICNTL(31));CHKERRQ(ierr); 1287 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(33) (compute determinant): %d \n",mumps->id.ICNTL(33));CHKERRQ(ierr); 1288 1289 ierr = PetscViewerASCIIPrintf(viewer," CNTL(1) (relative pivoting threshold): %g \n",mumps->id.CNTL(1));CHKERRQ(ierr); 1290 ierr = PetscViewerASCIIPrintf(viewer," CNTL(2) (stopping criterion of refinement): %g \n",mumps->id.CNTL(2));CHKERRQ(ierr); 1291 ierr = PetscViewerASCIIPrintf(viewer," CNTL(3) (absolute pivoting threshold): %g \n",mumps->id.CNTL(3));CHKERRQ(ierr); 1292 ierr = PetscViewerASCIIPrintf(viewer," CNTL(4) (value of static pivoting): %g \n",mumps->id.CNTL(4));CHKERRQ(ierr); 1293 ierr = PetscViewerASCIIPrintf(viewer," CNTL(5) (fixation for null pivots): %g \n",mumps->id.CNTL(5));CHKERRQ(ierr); 1294 1295 /* infomation local to each processor */ 1296 ierr = PetscViewerASCIIPrintf(viewer, " RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr); 1297 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 1298 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(1));CHKERRQ(ierr); 1299 ierr = PetscViewerFlush(viewer); 1300 ierr = PetscViewerASCIIPrintf(viewer, " RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr); 1301 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(2));CHKERRQ(ierr); 1302 ierr = PetscViewerFlush(viewer); 1303 ierr = PetscViewerASCIIPrintf(viewer, " RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr); 1304 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(3));CHKERRQ(ierr); 1305 ierr = PetscViewerFlush(viewer); 1306 1307 ierr = PetscViewerASCIIPrintf(viewer, " INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr); 1308 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(15));CHKERRQ(ierr); 1309 ierr = PetscViewerFlush(viewer); 1310 1311 ierr = PetscViewerASCIIPrintf(viewer, " INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr); 1312 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(16));CHKERRQ(ierr); 1313 ierr = PetscViewerFlush(viewer); 1314 1315 ierr = PetscViewerASCIIPrintf(viewer, " INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr); 1316 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(23));CHKERRQ(ierr); 1317 ierr = PetscViewerFlush(viewer); 1318 1319 if (mumps->ninfo && mumps->ninfo <= 40){ 1320 PetscInt i; 1321 for (i=0; i<mumps->ninfo; i++){ 1322 ierr = PetscViewerASCIIPrintf(viewer, " INFO(%d): \n",mumps->info[i]);CHKERRQ(ierr); 1323 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(mumps->info[i]));CHKERRQ(ierr); 1324 ierr = PetscViewerFlush(viewer); 1325 } 1326 } 1327 1328 1329 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 1330 1331 if (!mumps->myid) { /* information from the host */ 1332 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",mumps->id.RINFOG(1));CHKERRQ(ierr); 1333 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",mumps->id.RINFOG(2));CHKERRQ(ierr); 1334 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",mumps->id.RINFOG(3));CHKERRQ(ierr); 1335 ierr = PetscViewerASCIIPrintf(viewer," (RINFOG(12) RINFOG(13))*2^INFOG(34) (determinant): (%g,%g)*(2^%d)\n",mumps->id.RINFOG(12),mumps->id.RINFOG(13),mumps->id.INFOG(34));CHKERRQ(ierr); 1336 1337 ierr = PetscViewerASCIIPrintf(viewer," INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(3));CHKERRQ(ierr); 1338 ierr = PetscViewerASCIIPrintf(viewer," INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(4));CHKERRQ(ierr); 1339 ierr = PetscViewerASCIIPrintf(viewer," INFOG(5) (estimated maximum front size in the complete tree): %d \n",mumps->id.INFOG(5));CHKERRQ(ierr); 1340 ierr = PetscViewerASCIIPrintf(viewer," INFOG(6) (number of nodes in the complete tree): %d \n",mumps->id.INFOG(6));CHKERRQ(ierr); 1341 ierr = PetscViewerASCIIPrintf(viewer," INFOG(7) (ordering option effectively use after analysis): %d \n",mumps->id.INFOG(7));CHKERRQ(ierr); 1342 ierr = PetscViewerASCIIPrintf(viewer," INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",mumps->id.INFOG(8));CHKERRQ(ierr); 1343 ierr = PetscViewerASCIIPrintf(viewer," INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",mumps->id.INFOG(9));CHKERRQ(ierr); 1344 ierr = PetscViewerASCIIPrintf(viewer," INFOG(10) (total integer space store the matrix factors after factorization): %d \n",mumps->id.INFOG(10));CHKERRQ(ierr); 1345 ierr = PetscViewerASCIIPrintf(viewer," INFOG(11) (order of largest frontal matrix after factorization): %d \n",mumps->id.INFOG(11));CHKERRQ(ierr); 1346 ierr = PetscViewerASCIIPrintf(viewer," INFOG(12) (number of off-diagonal pivots): %d \n",mumps->id.INFOG(12));CHKERRQ(ierr); 1347 ierr = PetscViewerASCIIPrintf(viewer," INFOG(13) (number of delayed pivots after factorization): %d \n",mumps->id.INFOG(13));CHKERRQ(ierr); 1348 ierr = PetscViewerASCIIPrintf(viewer," INFOG(14) (number of memory compress after factorization): %d \n",mumps->id.INFOG(14));CHKERRQ(ierr); 1349 ierr = PetscViewerASCIIPrintf(viewer," INFOG(15) (number of steps of iterative refinement after solution): %d \n",mumps->id.INFOG(15));CHKERRQ(ierr); 1350 ierr = PetscViewerASCIIPrintf(viewer," INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",mumps->id.INFOG(16));CHKERRQ(ierr); 1351 ierr = PetscViewerASCIIPrintf(viewer," INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",mumps->id.INFOG(17));CHKERRQ(ierr); 1352 ierr = PetscViewerASCIIPrintf(viewer," INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",mumps->id.INFOG(18));CHKERRQ(ierr); 1353 ierr = PetscViewerASCIIPrintf(viewer," INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",mumps->id.INFOG(19));CHKERRQ(ierr); 1354 ierr = PetscViewerASCIIPrintf(viewer," INFOG(20) (estimated number of entries in the factors): %d \n",mumps->id.INFOG(20));CHKERRQ(ierr); 1355 ierr = PetscViewerASCIIPrintf(viewer," INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",mumps->id.INFOG(21));CHKERRQ(ierr); 1356 ierr = PetscViewerASCIIPrintf(viewer," INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",mumps->id.INFOG(22));CHKERRQ(ierr); 1357 ierr = PetscViewerASCIIPrintf(viewer," INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",mumps->id.INFOG(23));CHKERRQ(ierr); 1358 ierr = PetscViewerASCIIPrintf(viewer," INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",mumps->id.INFOG(24));CHKERRQ(ierr); 1359 ierr = PetscViewerASCIIPrintf(viewer," INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",mumps->id.INFOG(25));CHKERRQ(ierr); 1360 ierr = PetscViewerASCIIPrintf(viewer," INFOG(28) (after factorization: number of null pivots encountered): %d\n",mumps->id.INFOG(28));CHKERRQ(ierr); 1361 ierr = PetscViewerASCIIPrintf(viewer," INFOG(29) (after factorization: effective number of entries in the factors (sum over all processors)): %d\n",mumps->id.INFOG(29));CHKERRQ(ierr); 1362 ierr = PetscViewerASCIIPrintf(viewer," INFOG(30, 31) (after solution: size in Mbytes of memory used during solution phase): %d, %d\n",mumps->id.INFOG(30),mumps->id.INFOG(31));CHKERRQ(ierr); 1363 ierr = PetscViewerASCIIPrintf(viewer," INFOG(32) (after analysis: type of analysis done): %d\n",mumps->id.INFOG(32));CHKERRQ(ierr); 1364 ierr = PetscViewerASCIIPrintf(viewer," INFOG(33) (value used for ICNTL(8)): %d\n",mumps->id.INFOG(33));CHKERRQ(ierr); 1365 ierr = PetscViewerASCIIPrintf(viewer," INFOG(34) (exponent of the determinant if determinant is requested): %d\n",mumps->id.INFOG(34));CHKERRQ(ierr); 1366 } 1367 } 1368 } 1369 PetscFunctionReturn(0); 1370 } 1371 1372 #undef __FUNCT__ 1373 #define __FUNCT__ "MatGetInfo_MUMPS" 1374 PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info) 1375 { 1376 Mat_MUMPS *mumps =(Mat_MUMPS*)A->spptr; 1377 1378 PetscFunctionBegin; 1379 info->block_size = 1.0; 1380 info->nz_allocated = mumps->id.INFOG(20); 1381 info->nz_used = mumps->id.INFOG(20); 1382 info->nz_unneeded = 0.0; 1383 info->assemblies = 0.0; 1384 info->mallocs = 0.0; 1385 info->memory = 0.0; 1386 info->fill_ratio_given = 0; 1387 info->fill_ratio_needed = 0; 1388 info->factor_mallocs = 0; 1389 PetscFunctionReturn(0); 1390 } 1391 1392 /* -------------------------------------------------------------------------------------------*/ 1393 #undef __FUNCT__ 1394 #define __FUNCT__ "MatMumpsSetIcntl_MUMPS" 1395 PetscErrorCode MatMumpsSetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt ival) 1396 { 1397 Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr; 1398 1399 PetscFunctionBegin; 1400 mumps->id.ICNTL(icntl) = ival; 1401 PetscFunctionReturn(0); 1402 } 1403 1404 #undef __FUNCT__ 1405 #define __FUNCT__ "MatMumpsGetIcntl_MUMPS" 1406 PetscErrorCode MatMumpsGetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt *ival) 1407 { 1408 Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr; 1409 1410 PetscFunctionBegin; 1411 *ival = mumps->id.ICNTL(icntl); 1412 PetscFunctionReturn(0); 1413 } 1414 1415 #undef __FUNCT__ 1416 #define __FUNCT__ "MatMumpsSetIcntl" 1417 /*@ 1418 MatMumpsSetIcntl - Set MUMPS parameter ICNTL() 1419 1420 Logically Collective on Mat 1421 1422 Input Parameters: 1423 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 1424 . icntl - index of MUMPS parameter array ICNTL() 1425 - ival - value of MUMPS ICNTL(icntl) 1426 1427 Options Database: 1428 . -mat_mumps_icntl_<icntl> <ival> 1429 1430 Level: beginner 1431 1432 References: MUMPS Users' Guide 1433 1434 .seealso: MatGetFactor() 1435 @*/ 1436 PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt icntl,PetscInt ival) 1437 { 1438 PetscErrorCode ierr; 1439 1440 PetscFunctionBegin; 1441 PetscValidLogicalCollectiveInt(F,icntl,2); 1442 PetscValidLogicalCollectiveInt(F,ival,3); 1443 ierr = PetscTryMethod(F,"MatMumpsSetIcntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));CHKERRQ(ierr); 1444 PetscFunctionReturn(0); 1445 } 1446 1447 #undef __FUNCT__ 1448 #define __FUNCT__ "MatMumpsGetIcntl" 1449 /*@ 1450 MatMumpsGetIcntl - Get MUMPS parameter ICNTL() 1451 1452 Logically Collective on Mat 1453 1454 Input Parameters: 1455 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 1456 - icntl - index of MUMPS parameter array ICNTL() 1457 1458 Output Parameter: 1459 . ival - value of MUMPS ICNTL(icntl) 1460 1461 Level: beginner 1462 1463 References: MUMPS Users' Guide 1464 1465 .seealso: MatGetFactor() 1466 @*/ 1467 PetscErrorCode MatMumpsGetIcntl(Mat F,PetscInt icntl,PetscInt *ival) 1468 { 1469 PetscErrorCode ierr; 1470 1471 PetscFunctionBegin; 1472 PetscValidLogicalCollectiveInt(F,icntl,2); 1473 PetscValidIntPointer(ival,3); 1474 ierr = PetscTryMethod(F,"MatMumpsGetIcntl_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr); 1475 PetscFunctionReturn(0); 1476 } 1477 1478 /* -------------------------------------------------------------------------------------------*/ 1479 #undef __FUNCT__ 1480 #define __FUNCT__ "MatMumpsSetCntl_MUMPS" 1481 PetscErrorCode MatMumpsSetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal val) 1482 { 1483 Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr; 1484 1485 PetscFunctionBegin; 1486 mumps->id.CNTL(icntl) = val; 1487 PetscFunctionReturn(0); 1488 } 1489 1490 #undef __FUNCT__ 1491 #define __FUNCT__ "MatMumpsGetCntl_MUMPS" 1492 PetscErrorCode MatMumpsGetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal *val) 1493 { 1494 Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr; 1495 1496 PetscFunctionBegin; 1497 *val = mumps->id.CNTL(icntl); 1498 PetscFunctionReturn(0); 1499 } 1500 1501 #undef __FUNCT__ 1502 #define __FUNCT__ "MatMumpsSetCntl" 1503 /*@ 1504 MatMumpsSetCntl - Set MUMPS parameter CNTL() 1505 1506 Logically Collective on Mat 1507 1508 Input Parameters: 1509 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 1510 . icntl - index of MUMPS parameter array CNTL() 1511 - val - value of MUMPS CNTL(icntl) 1512 1513 Options Database: 1514 . -mat_mumps_cntl_<icntl> <val> 1515 1516 Level: beginner 1517 1518 References: MUMPS Users' Guide 1519 1520 .seealso: MatGetFactor() 1521 @*/ 1522 PetscErrorCode MatMumpsSetCntl(Mat F,PetscInt icntl,PetscReal val) 1523 { 1524 PetscErrorCode ierr; 1525 1526 PetscFunctionBegin; 1527 PetscValidLogicalCollectiveInt(F,icntl,2); 1528 PetscValidLogicalCollectiveReal(F,val,3); 1529 ierr = PetscTryMethod(F,"MatMumpsSetCntl_C",(Mat,PetscInt,PetscReal),(F,icntl,val));CHKERRQ(ierr); 1530 PetscFunctionReturn(0); 1531 } 1532 1533 #undef __FUNCT__ 1534 #define __FUNCT__ "MatMumpsGetCntl" 1535 /*@ 1536 MatMumpsGetCntl - Get MUMPS parameter CNTL() 1537 1538 Logically Collective on Mat 1539 1540 Input Parameters: 1541 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 1542 - icntl - index of MUMPS parameter array CNTL() 1543 1544 Output Parameter: 1545 . val - value of MUMPS CNTL(icntl) 1546 1547 Level: beginner 1548 1549 References: MUMPS Users' Guide 1550 1551 .seealso: MatGetFactor() 1552 @*/ 1553 PetscErrorCode MatMumpsGetCntl(Mat F,PetscInt icntl,PetscReal *val) 1554 { 1555 PetscErrorCode ierr; 1556 1557 PetscFunctionBegin; 1558 PetscValidLogicalCollectiveInt(F,icntl,2); 1559 PetscValidRealPointer(val,3); 1560 ierr = PetscTryMethod(F,"MatMumpsGetCntl_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr); 1561 PetscFunctionReturn(0); 1562 } 1563 1564 #undef __FUNCT__ 1565 #define __FUNCT__ "MatMumpsGetInfo_MUMPS" 1566 PetscErrorCode MatMumpsGetInfo_MUMPS(Mat F,PetscInt icntl,PetscInt *info) 1567 { 1568 Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr; 1569 1570 PetscFunctionBegin; 1571 *info = mumps->id.INFO(icntl); 1572 PetscFunctionReturn(0); 1573 } 1574 1575 #undef __FUNCT__ 1576 #define __FUNCT__ "MatMumpsGetInfog_MUMPS" 1577 PetscErrorCode MatMumpsGetInfog_MUMPS(Mat F,PetscInt icntl,PetscInt *infog) 1578 { 1579 Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr; 1580 1581 PetscFunctionBegin; 1582 *infog = mumps->id.INFOG(icntl); 1583 PetscFunctionReturn(0); 1584 } 1585 1586 #undef __FUNCT__ 1587 #define __FUNCT__ "MatMumpsGetRinfo_MUMPS" 1588 PetscErrorCode MatMumpsGetRinfo_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfo) 1589 { 1590 Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr; 1591 1592 PetscFunctionBegin; 1593 *rinfo = mumps->id.RINFO(icntl); 1594 PetscFunctionReturn(0); 1595 } 1596 1597 #undef __FUNCT__ 1598 #define __FUNCT__ "MatMumpsGetRinfog_MUMPS" 1599 PetscErrorCode MatMumpsGetRinfog_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfog) 1600 { 1601 Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr; 1602 1603 PetscFunctionBegin; 1604 *rinfog = mumps->id.RINFOG(icntl); 1605 PetscFunctionReturn(0); 1606 } 1607 1608 #undef __FUNCT__ 1609 #define __FUNCT__ "MatMumpsGetInfo" 1610 /*@ 1611 MatMumpsGetInfo - Get MUMPS parameter INFO() 1612 1613 Logically Collective on Mat 1614 1615 Input Parameters: 1616 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 1617 - icntl - index of MUMPS parameter array INFO() 1618 1619 Output Parameter: 1620 . ival - value of MUMPS INFO(icntl) 1621 1622 Level: beginner 1623 1624 References: MUMPS Users' Guide 1625 1626 .seealso: MatGetFactor() 1627 @*/ 1628 PetscErrorCode MatMumpsGetInfo(Mat F,PetscInt icntl,PetscInt *ival) 1629 { 1630 PetscErrorCode ierr; 1631 1632 PetscFunctionBegin; 1633 PetscValidIntPointer(ival,3); 1634 ierr = PetscTryMethod(F,"MatMumpsGetInfo_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr); 1635 PetscFunctionReturn(0); 1636 } 1637 1638 #undef __FUNCT__ 1639 #define __FUNCT__ "MatMumpsGetInfog" 1640 /*@ 1641 MatMumpsGetInfog - Get MUMPS parameter INFOG() 1642 1643 Logically Collective on Mat 1644 1645 Input Parameters: 1646 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 1647 - icntl - index of MUMPS parameter array INFOG() 1648 1649 Output Parameter: 1650 . ival - value of MUMPS INFOG(icntl) 1651 1652 Level: beginner 1653 1654 References: MUMPS Users' Guide 1655 1656 .seealso: MatGetFactor() 1657 @*/ 1658 PetscErrorCode MatMumpsGetInfog(Mat F,PetscInt icntl,PetscInt *ival) 1659 { 1660 PetscErrorCode ierr; 1661 1662 PetscFunctionBegin; 1663 PetscValidIntPointer(ival,3); 1664 ierr = PetscTryMethod(F,"MatMumpsGetInfog_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr); 1665 PetscFunctionReturn(0); 1666 } 1667 1668 #undef __FUNCT__ 1669 #define __FUNCT__ "MatMumpsGetRinfo" 1670 /*@ 1671 MatMumpsGetRinfo - Get MUMPS parameter RINFO() 1672 1673 Logically Collective on Mat 1674 1675 Input Parameters: 1676 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 1677 - icntl - index of MUMPS parameter array RINFO() 1678 1679 Output Parameter: 1680 . val - value of MUMPS RINFO(icntl) 1681 1682 Level: beginner 1683 1684 References: MUMPS Users' Guide 1685 1686 .seealso: MatGetFactor() 1687 @*/ 1688 PetscErrorCode MatMumpsGetRinfo(Mat F,PetscInt icntl,PetscReal *val) 1689 { 1690 PetscErrorCode ierr; 1691 1692 PetscFunctionBegin; 1693 PetscValidRealPointer(val,3); 1694 ierr = PetscTryMethod(F,"MatMumpsGetRinfo_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr); 1695 PetscFunctionReturn(0); 1696 } 1697 1698 #undef __FUNCT__ 1699 #define __FUNCT__ "MatMumpsGetRinfog" 1700 /*@ 1701 MatMumpsGetRinfog - Get MUMPS parameter RINFOG() 1702 1703 Logically Collective on Mat 1704 1705 Input Parameters: 1706 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 1707 - icntl - index of MUMPS parameter array RINFOG() 1708 1709 Output Parameter: 1710 . val - value of MUMPS RINFOG(icntl) 1711 1712 Level: beginner 1713 1714 References: MUMPS Users' Guide 1715 1716 .seealso: MatGetFactor() 1717 @*/ 1718 PetscErrorCode MatMumpsGetRinfog(Mat F,PetscInt icntl,PetscReal *val) 1719 { 1720 PetscErrorCode ierr; 1721 1722 PetscFunctionBegin; 1723 PetscValidRealPointer(val,3); 1724 ierr = PetscTryMethod(F,"MatMumpsGetRinfog_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr); 1725 PetscFunctionReturn(0); 1726 } 1727 1728 /*MC 1729 MATSOLVERMUMPS - A matrix type providing direct solvers (LU and Cholesky) for 1730 distributed and sequential matrices via the external package MUMPS. 1731 1732 Works with MATAIJ and MATSBAIJ matrices 1733 1734 Options Database Keys: 1735 + -mat_mumps_icntl_1 <6>: ICNTL(1): output stream for error messages (None) 1736 . -mat_mumps_icntl_2 <0>: ICNTL(2): output stream for diagnostic printing, statistics, and warning (None) 1737 . -mat_mumps_icntl_3 <0>: ICNTL(3): output stream for global information, collected on the host (None) 1738 . -mat_mumps_icntl_4 <0>: ICNTL(4): level of printing (0 to 4) (None) 1739 . -mat_mumps_icntl_6 <7>: ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7) (None) 1740 . -mat_mumps_icntl_7 <7>: ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis (None) 1741 . -mat_mumps_icntl_8 <77>: ICNTL(8): scaling strategy (-2 to 8 or 77) (None) 1742 . -mat_mumps_icntl_10 <0>: ICNTL(10): max num of refinements (None) 1743 . -mat_mumps_icntl_11 <0>: ICNTL(11): statistics related to an error analysis (via -ksp_view) (None) 1744 . -mat_mumps_icntl_12 <1>: ICNTL(12): an ordering strategy for symmetric matrices (0 to 3) (None) 1745 . -mat_mumps_icntl_13 <0>: ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting (None) 1746 . -mat_mumps_icntl_14 <20>: ICNTL(14): percentage increase in the estimated working space (None) 1747 . -mat_mumps_icntl_19 <0>: ICNTL(19): computes the Schur complement (None) 1748 . -mat_mumps_icntl_22 <0>: ICNTL(22): in-core/out-of-core factorization and solve (0 or 1) (None) 1749 . -mat_mumps_icntl_23 <0>: ICNTL(23): max size of the working memory (MB) that can allocate per processor (None) 1750 . -mat_mumps_icntl_24 <0>: ICNTL(24): detection of null pivot rows (0 or 1) (None) 1751 . -mat_mumps_icntl_25 <0>: ICNTL(25): compute a solution of a deficient matrix and a null space basis (None) 1752 . -mat_mumps_icntl_26 <0>: ICNTL(26): drives the solution phase if a Schur complement matrix (None) 1753 . -mat_mumps_icntl_28 <1>: ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering (None) 1754 . -mat_mumps_icntl_29 <0>: ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis (None) 1755 . -mat_mumps_icntl_30 <0>: ICNTL(30): compute user-specified set of entries in inv(A) (None) 1756 . -mat_mumps_icntl_31 <0>: ICNTL(31): indicates which factors may be discarded during factorization (None) 1757 . -mat_mumps_icntl_33 <0>: ICNTL(33): compute determinant (None) 1758 . -mat_mumps_cntl_1 <0.01>: CNTL(1): relative pivoting threshold (None) 1759 . -mat_mumps_cntl_2 <1.49012e-08>: CNTL(2): stopping criterion of refinement (None) 1760 . -mat_mumps_cntl_3 <0>: CNTL(3): absolute pivoting threshold (None) 1761 . -mat_mumps_cntl_4 <-1>: CNTL(4): value for static pivoting (None) 1762 - -mat_mumps_cntl_5 <0>: CNTL(5): fixation for null pivots (None) 1763 1764 Level: beginner 1765 1766 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage 1767 1768 M*/ 1769 1770 #undef __FUNCT__ 1771 #define __FUNCT__ "MatFactorGetSolverPackage_mumps" 1772 static PetscErrorCode MatFactorGetSolverPackage_mumps(Mat A,const MatSolverPackage *type) 1773 { 1774 PetscFunctionBegin; 1775 *type = MATSOLVERMUMPS; 1776 PetscFunctionReturn(0); 1777 } 1778 1779 /* MatGetFactor for Seq and MPI AIJ matrices */ 1780 #undef __FUNCT__ 1781 #define __FUNCT__ "MatGetFactor_aij_mumps" 1782 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F) 1783 { 1784 Mat B; 1785 PetscErrorCode ierr; 1786 Mat_MUMPS *mumps; 1787 PetscBool isSeqAIJ; 1788 1789 PetscFunctionBegin; 1790 /* Create the factorization matrix */ 1791 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr); 1792 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 1793 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 1794 ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 1795 if (isSeqAIJ) { 1796 ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr); 1797 } else { 1798 ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr); 1799 } 1800 1801 ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr); 1802 1803 B->ops->view = MatView_MUMPS; 1804 B->ops->getinfo = MatGetInfo_MUMPS; 1805 B->ops->getdiagonal = MatGetDiagonal_MUMPS; 1806 1807 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr); 1808 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr); 1809 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr); 1810 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr); 1811 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr); 1812 1813 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr); 1814 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr); 1815 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr); 1816 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr); 1817 if (ftype == MAT_FACTOR_LU) { 1818 B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS; 1819 B->factortype = MAT_FACTOR_LU; 1820 if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij; 1821 else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij; 1822 mumps->sym = 0; 1823 } else { 1824 B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS; 1825 B->factortype = MAT_FACTOR_CHOLESKY; 1826 if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij; 1827 else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij; 1828 if (A->spd_set && A->spd) mumps->sym = 1; 1829 else mumps->sym = 2; 1830 } 1831 1832 mumps->isAIJ = PETSC_TRUE; 1833 mumps->Destroy = B->ops->destroy; 1834 B->ops->destroy = MatDestroy_MUMPS; 1835 B->spptr = (void*)mumps; 1836 1837 ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr); 1838 1839 *F = B; 1840 PetscFunctionReturn(0); 1841 } 1842 1843 /* MatGetFactor for Seq and MPI SBAIJ matrices */ 1844 #undef __FUNCT__ 1845 #define __FUNCT__ "MatGetFactor_sbaij_mumps" 1846 PETSC_EXTERN PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F) 1847 { 1848 Mat B; 1849 PetscErrorCode ierr; 1850 Mat_MUMPS *mumps; 1851 PetscBool isSeqSBAIJ; 1852 1853 PetscFunctionBegin; 1854 if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix"); 1855 if (A->rmap->bs > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead"); 1856 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr); 1857 /* Create the factorization matrix */ 1858 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 1859 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 1860 ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 1861 ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr); 1862 if (isSeqSBAIJ) { 1863 ierr = MatSeqSBAIJSetPreallocation(B,1,0,NULL);CHKERRQ(ierr); 1864 1865 mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij; 1866 } else { 1867 ierr = MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);CHKERRQ(ierr); 1868 1869 mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij; 1870 } 1871 1872 B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS; 1873 B->ops->view = MatView_MUMPS; 1874 B->ops->getdiagonal = MatGetDiagonal_MUMPS; 1875 1876 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr); 1877 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr); 1878 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr); 1879 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr); 1880 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr); 1881 1882 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr); 1883 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr); 1884 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr); 1885 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr); 1886 1887 B->factortype = MAT_FACTOR_CHOLESKY; 1888 if (A->spd_set && A->spd) mumps->sym = 1; 1889 else mumps->sym = 2; 1890 1891 mumps->isAIJ = PETSC_FALSE; 1892 mumps->Destroy = B->ops->destroy; 1893 B->ops->destroy = MatDestroy_MUMPS; 1894 B->spptr = (void*)mumps; 1895 1896 ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr); 1897 1898 *F = B; 1899 PetscFunctionReturn(0); 1900 } 1901 1902 #undef __FUNCT__ 1903 #define __FUNCT__ "MatGetFactor_baij_mumps" 1904 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F) 1905 { 1906 Mat B; 1907 PetscErrorCode ierr; 1908 Mat_MUMPS *mumps; 1909 PetscBool isSeqBAIJ; 1910 1911 PetscFunctionBegin; 1912 /* Create the factorization matrix */ 1913 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);CHKERRQ(ierr); 1914 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 1915 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 1916 ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 1917 if (isSeqBAIJ) { 1918 ierr = MatSeqBAIJSetPreallocation(B,A->rmap->bs,0,NULL);CHKERRQ(ierr); 1919 } else { 1920 ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr); 1921 } 1922 1923 ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr); 1924 if (ftype == MAT_FACTOR_LU) { 1925 B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS; 1926 B->factortype = MAT_FACTOR_LU; 1927 if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij; 1928 else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij; 1929 mumps->sym = 0; 1930 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n"); 1931 1932 B->ops->view = MatView_MUMPS; 1933 B->ops->getdiagonal = MatGetDiagonal_MUMPS; 1934 1935 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr); 1936 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr); 1937 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr); 1938 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr); 1939 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr); 1940 1941 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr); 1942 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr); 1943 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr); 1944 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr); 1945 1946 mumps->isAIJ = PETSC_TRUE; 1947 mumps->Destroy = B->ops->destroy; 1948 B->ops->destroy = MatDestroy_MUMPS; 1949 B->spptr = (void*)mumps; 1950 1951 ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr); 1952 1953 *F = B; 1954 PetscFunctionReturn(0); 1955 } 1956 1957 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat,MatFactorType,Mat*); 1958 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat,MatFactorType,Mat*); 1959 PETSC_EXTERN PetscErrorCode MatGetFactor_sbaij_mumps(Mat,MatFactorType,Mat*); 1960 1961 #undef __FUNCT__ 1962 #define __FUNCT__ "MatSolverPackageRegister_MUMPS" 1963 PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_MUMPS(void) 1964 { 1965 PetscErrorCode ierr; 1966 1967 PetscFunctionBegin; 1968 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ, MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr); 1969 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ, MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr); 1970 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ, MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr); 1971 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ, MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr); 1972 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPISBAIJ, MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr); 1973 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ, MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr); 1974 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ, MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr); 1975 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ, MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr); 1976 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ, MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr); 1977 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQSBAIJ, MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr); 1978 PetscFunctionReturn(0); 1979 } 1980 1981