1 2 /* 3 Provides an interface to the MUMPS sparse solver 4 */ 5 6 #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I "petscmat.h" I*/ 7 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h> 8 9 EXTERN_C_BEGIN 10 #if defined(PETSC_USE_COMPLEX) 11 #if defined(PETSC_USE_REAL_SINGLE) 12 #include <cmumps_c.h> 13 #else 14 #include <zmumps_c.h> 15 #endif 16 #else 17 #if defined(PETSC_USE_REAL_SINGLE) 18 #include <smumps_c.h> 19 #else 20 #include <dmumps_c.h> 21 #endif 22 #endif 23 EXTERN_C_END 24 #define JOB_INIT -1 25 #define JOB_FACTSYMBOLIC 1 26 #define JOB_FACTNUMERIC 2 27 #define JOB_SOLVE 3 28 #define JOB_END -2 29 30 /* calls to MUMPS */ 31 #if defined(PETSC_USE_COMPLEX) 32 #if defined(PETSC_USE_REAL_SINGLE) 33 #define PetscMUMPS_c cmumps_c 34 #else 35 #define PetscMUMPS_c zmumps_c 36 #endif 37 #else 38 #if defined(PETSC_USE_REAL_SINGLE) 39 #define PetscMUMPS_c smumps_c 40 #else 41 #define PetscMUMPS_c dmumps_c 42 #endif 43 #endif 44 45 /* declare MumpsScalar */ 46 #if defined(PETSC_USE_COMPLEX) 47 #if defined(PETSC_USE_REAL_SINGLE) 48 #define MumpsScalar mumps_complex 49 #else 50 #define MumpsScalar mumps_double_complex 51 #endif 52 #else 53 #define MumpsScalar PetscScalar 54 #endif 55 56 /* macros s.t. indices match MUMPS documentation */ 57 #define ICNTL(I) icntl[(I)-1] 58 #define CNTL(I) cntl[(I)-1] 59 #define INFOG(I) infog[(I)-1] 60 #define INFO(I) info[(I)-1] 61 #define RINFOG(I) rinfog[(I)-1] 62 #define RINFO(I) rinfo[(I)-1] 63 64 typedef struct { 65 #if defined(PETSC_USE_COMPLEX) 66 #if defined(PETSC_USE_REAL_SINGLE) 67 CMUMPS_STRUC_C id; 68 #else 69 ZMUMPS_STRUC_C id; 70 #endif 71 #else 72 #if defined(PETSC_USE_REAL_SINGLE) 73 SMUMPS_STRUC_C id; 74 #else 75 DMUMPS_STRUC_C id; 76 #endif 77 #endif 78 79 MatStructure matstruc; 80 PetscMPIInt myid,size; 81 PetscInt *irn,*jcn,nz,sym; 82 PetscScalar *val; 83 MPI_Comm comm_mumps; 84 PetscBool isAIJ; 85 PetscInt ICNTL9_pre; /* check if ICNTL(9) is changed from previous MatSolve */ 86 VecScatter scat_rhs, scat_sol; /* used by MatSolve() */ 87 Vec b_seq,x_seq; 88 PetscInt ninfo,*info; /* display INFO */ 89 PetscInt sizeredrhs; 90 PetscScalar *schur_sol; 91 PetscInt schur_sizesol; 92 93 PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**); 94 } Mat_MUMPS; 95 96 extern PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*); 97 98 static PetscErrorCode MatMumpsResetSchur_Private(Mat_MUMPS* mumps) 99 { 100 PetscErrorCode ierr; 101 102 PetscFunctionBegin; 103 ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr); 104 ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr); 105 ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr); 106 mumps->id.size_schur = 0; 107 mumps->id.schur_lld = 0; 108 mumps->id.ICNTL(19) = 0; 109 PetscFunctionReturn(0); 110 } 111 112 /* solve with rhs in mumps->id.redrhs and return in the same location */ 113 static PetscErrorCode MatMumpsSolveSchur_Private(Mat F) 114 { 115 Mat_MUMPS *mumps=(Mat_MUMPS*)F->data; 116 Mat S,B,X; 117 MatFactorSchurStatus schurstatus; 118 PetscInt sizesol; 119 PetscErrorCode ierr; 120 121 PetscFunctionBegin; 122 ierr = MatFactorFactorizeSchurComplement(F);CHKERRQ(ierr); 123 ierr = MatFactorGetSchurComplement(F,&S,&schurstatus);CHKERRQ(ierr); 124 ierr = MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.nrhs,(PetscScalar*)mumps->id.redrhs,&B);CHKERRQ(ierr); 125 switch (schurstatus) { 126 case MAT_FACTOR_SCHUR_FACTORED: 127 ierr = MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.nrhs,(PetscScalar*)mumps->id.redrhs,&X);CHKERRQ(ierr); 128 if (!mumps->id.ICNTL(9)) { /* transpose solve */ 129 ierr = MatMatSolveTranspose(S,B,X);CHKERRQ(ierr); 130 } else { 131 ierr = MatMatSolve(S,B,X);CHKERRQ(ierr); 132 } 133 break; 134 case MAT_FACTOR_SCHUR_INVERTED: 135 sizesol = mumps->id.nrhs*mumps->id.size_schur; 136 if (!mumps->schur_sol || sizesol > mumps->schur_sizesol) { 137 ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr); 138 ierr = PetscMalloc1(sizesol,&mumps->schur_sol);CHKERRQ(ierr); 139 mumps->schur_sizesol = sizesol; 140 } 141 ierr = MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.nrhs,mumps->schur_sol,&X);CHKERRQ(ierr); 142 if (!mumps->id.ICNTL(9)) { /* transpose solve */ 143 ierr = MatTransposeMatMult(S,B,MAT_REUSE_MATRIX,PETSC_DEFAULT,&X);CHKERRQ(ierr); 144 } else { 145 ierr = MatMatMult(S,B,MAT_REUSE_MATRIX,PETSC_DEFAULT,&X);CHKERRQ(ierr); 146 } 147 ierr = MatCopy(X,B,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 148 break; 149 default: 150 SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Unhandled MatFactorSchurStatus %D",F->schur_status); 151 break; 152 } 153 ierr = MatFactorRestoreSchurComplement(F,&S,schurstatus);CHKERRQ(ierr); 154 ierr = MatDestroy(&B);CHKERRQ(ierr); 155 ierr = MatDestroy(&X);CHKERRQ(ierr); 156 PetscFunctionReturn(0); 157 } 158 159 static PetscErrorCode MatMumpsHandleSchur_Private(Mat F, PetscBool expansion) 160 { 161 Mat_MUMPS *mumps=(Mat_MUMPS*)F->data; 162 PetscErrorCode ierr; 163 164 PetscFunctionBegin; 165 if (!mumps->id.ICNTL(19)) { /* do nothing when Schur complement has not been computed */ 166 PetscFunctionReturn(0); 167 } 168 if (!expansion) { /* prepare for the condensation step */ 169 PetscInt sizeredrhs = mumps->id.nrhs*mumps->id.size_schur; 170 /* allocate MUMPS internal array to store reduced right-hand sides */ 171 if (!mumps->id.redrhs || sizeredrhs > mumps->sizeredrhs) { 172 ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr); 173 mumps->id.lredrhs = mumps->id.size_schur; 174 ierr = PetscMalloc1(mumps->id.nrhs*mumps->id.lredrhs,&mumps->id.redrhs);CHKERRQ(ierr); 175 mumps->sizeredrhs = mumps->id.nrhs*mumps->id.lredrhs; 176 } 177 mumps->id.ICNTL(26) = 1; /* condensation phase */ 178 } else { /* prepare for the expansion step */ 179 /* solve Schur complement (this has to be done by the MUMPS user, so basically us) */ 180 ierr = MatMumpsSolveSchur_Private(F);CHKERRQ(ierr); 181 mumps->id.ICNTL(26) = 2; /* expansion phase */ 182 PetscMUMPS_c(&mumps->id); 183 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 184 /* restore defaults */ 185 mumps->id.ICNTL(26) = -1; 186 /* free MUMPS internal array for redrhs if we have solved for multiple rhs in order to save memory space */ 187 if (mumps->id.nrhs > 1) { 188 ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr); 189 mumps->id.lredrhs = 0; 190 mumps->sizeredrhs = 0; 191 } 192 } 193 PetscFunctionReturn(0); 194 } 195 196 /* 197 MatConvertToTriples_A_B - convert Petsc matrix to triples: row[nz], col[nz], val[nz] 198 199 input: 200 A - matrix in aij,baij or sbaij (bs=1) format 201 shift - 0: C style output triple; 1: Fortran style output triple. 202 reuse - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple 203 MAT_REUSE_MATRIX: only the values in v array are updated 204 output: 205 nnz - dim of r, c, and v (number of local nonzero entries of A) 206 r, c, v - row and col index, matrix values (matrix triples) 207 208 The returned values r, c, and sometimes v are obtained in a single PetscMalloc(). Then in MatDestroy_MUMPS() it is 209 freed with PetscFree((mumps->irn); This is not ideal code, the fact that v is ONLY sometimes part of mumps->irn means 210 that the PetscMalloc() cannot easily be replaced with a PetscMalloc3(). 211 212 */ 213 214 PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 215 { 216 const PetscInt *ai,*aj,*ajj,M=A->rmap->n; 217 PetscInt nz,rnz,i,j; 218 PetscErrorCode ierr; 219 PetscInt *row,*col; 220 Mat_SeqAIJ *aa=(Mat_SeqAIJ*)A->data; 221 222 PetscFunctionBegin; 223 *v=aa->a; 224 if (reuse == MAT_INITIAL_MATRIX) { 225 nz = aa->nz; 226 ai = aa->i; 227 aj = aa->j; 228 *nnz = nz; 229 ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr); 230 col = row + nz; 231 232 nz = 0; 233 for (i=0; i<M; i++) { 234 rnz = ai[i+1] - ai[i]; 235 ajj = aj + ai[i]; 236 for (j=0; j<rnz; j++) { 237 row[nz] = i+shift; col[nz++] = ajj[j] + shift; 238 } 239 } 240 *r = row; *c = col; 241 } 242 PetscFunctionReturn(0); 243 } 244 245 PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 246 { 247 Mat_SeqBAIJ *aa=(Mat_SeqBAIJ*)A->data; 248 const PetscInt *ai,*aj,*ajj,bs2 = aa->bs2; 249 PetscInt bs,M,nz,idx=0,rnz,i,j,k,m; 250 PetscErrorCode ierr; 251 PetscInt *row,*col; 252 253 PetscFunctionBegin; 254 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 255 M = A->rmap->N/bs; 256 *v = aa->a; 257 if (reuse == MAT_INITIAL_MATRIX) { 258 ai = aa->i; aj = aa->j; 259 nz = bs2*aa->nz; 260 *nnz = nz; 261 ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr); 262 col = row + nz; 263 264 for (i=0; i<M; i++) { 265 ajj = aj + ai[i]; 266 rnz = ai[i+1] - ai[i]; 267 for (k=0; k<rnz; k++) { 268 for (j=0; j<bs; j++) { 269 for (m=0; m<bs; m++) { 270 row[idx] = i*bs + m + shift; 271 col[idx++] = bs*(ajj[k]) + j + shift; 272 } 273 } 274 } 275 } 276 *r = row; *c = col; 277 } 278 PetscFunctionReturn(0); 279 } 280 281 PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 282 { 283 const PetscInt *ai, *aj,*ajj,M=A->rmap->n; 284 PetscInt nz,rnz,i,j; 285 PetscErrorCode ierr; 286 PetscInt *row,*col; 287 Mat_SeqSBAIJ *aa=(Mat_SeqSBAIJ*)A->data; 288 289 PetscFunctionBegin; 290 *v = aa->a; 291 if (reuse == MAT_INITIAL_MATRIX) { 292 nz = aa->nz; 293 ai = aa->i; 294 aj = aa->j; 295 *v = aa->a; 296 *nnz = nz; 297 ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr); 298 col = row + nz; 299 300 nz = 0; 301 for (i=0; i<M; i++) { 302 rnz = ai[i+1] - ai[i]; 303 ajj = aj + ai[i]; 304 for (j=0; j<rnz; j++) { 305 row[nz] = i+shift; col[nz++] = ajj[j] + shift; 306 } 307 } 308 *r = row; *c = col; 309 } 310 PetscFunctionReturn(0); 311 } 312 313 PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 314 { 315 const PetscInt *ai,*aj,*ajj,*adiag,M=A->rmap->n; 316 PetscInt nz,rnz,i,j; 317 const PetscScalar *av,*v1; 318 PetscScalar *val; 319 PetscErrorCode ierr; 320 PetscInt *row,*col; 321 Mat_SeqAIJ *aa=(Mat_SeqAIJ*)A->data; 322 PetscBool missing; 323 324 PetscFunctionBegin; 325 ai =aa->i; aj=aa->j;av=aa->a; 326 adiag=aa->diag; 327 ierr = MatMissingDiagonal_SeqAIJ(A,&missing,&i);CHKERRQ(ierr); 328 if (reuse == MAT_INITIAL_MATRIX) { 329 /* count nz in the uppper triangular part of A */ 330 nz = 0; 331 if (missing) { 332 for (i=0; i<M; i++) { 333 if (PetscUnlikely(adiag[i] >= ai[i+1])) { 334 for (j=ai[i];j<ai[i+1];j++) { 335 if (aj[j] < i) continue; 336 nz++; 337 } 338 } else { 339 nz += ai[i+1] - adiag[i]; 340 } 341 } 342 } else { 343 for (i=0; i<M; i++) nz += ai[i+1] - adiag[i]; 344 } 345 *nnz = nz; 346 347 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 348 col = row + nz; 349 val = (PetscScalar*)(col + nz); 350 351 nz = 0; 352 if (missing) { 353 for (i=0; i<M; i++) { 354 if (PetscUnlikely(adiag[i] >= ai[i+1])) { 355 for (j=ai[i];j<ai[i+1];j++) { 356 if (aj[j] < i) continue; 357 row[nz] = i+shift; 358 col[nz] = aj[j]+shift; 359 val[nz] = av[j]; 360 nz++; 361 } 362 } else { 363 rnz = ai[i+1] - adiag[i]; 364 ajj = aj + adiag[i]; 365 v1 = av + adiag[i]; 366 for (j=0; j<rnz; j++) { 367 row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j]; 368 } 369 } 370 } 371 } else { 372 for (i=0; i<M; i++) { 373 rnz = ai[i+1] - adiag[i]; 374 ajj = aj + adiag[i]; 375 v1 = av + adiag[i]; 376 for (j=0; j<rnz; j++) { 377 row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j]; 378 } 379 } 380 } 381 *r = row; *c = col; *v = val; 382 } else { 383 nz = 0; val = *v; 384 if (missing) { 385 for (i=0; i <M; i++) { 386 if (PetscUnlikely(adiag[i] >= ai[i+1])) { 387 for (j=ai[i];j<ai[i+1];j++) { 388 if (aj[j] < i) continue; 389 val[nz++] = av[j]; 390 } 391 } else { 392 rnz = ai[i+1] - adiag[i]; 393 v1 = av + adiag[i]; 394 for (j=0; j<rnz; j++) { 395 val[nz++] = v1[j]; 396 } 397 } 398 } 399 } else { 400 for (i=0; i <M; i++) { 401 rnz = ai[i+1] - adiag[i]; 402 v1 = av + adiag[i]; 403 for (j=0; j<rnz; j++) { 404 val[nz++] = v1[j]; 405 } 406 } 407 } 408 } 409 PetscFunctionReturn(0); 410 } 411 412 PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 413 { 414 const PetscInt *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj; 415 PetscErrorCode ierr; 416 PetscInt rstart,nz,i,j,jj,irow,countA,countB; 417 PetscInt *row,*col; 418 const PetscScalar *av, *bv,*v1,*v2; 419 PetscScalar *val; 420 Mat_MPISBAIJ *mat = (Mat_MPISBAIJ*)A->data; 421 Mat_SeqSBAIJ *aa = (Mat_SeqSBAIJ*)(mat->A)->data; 422 Mat_SeqBAIJ *bb = (Mat_SeqBAIJ*)(mat->B)->data; 423 424 PetscFunctionBegin; 425 ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart; 426 av=aa->a; bv=bb->a; 427 428 garray = mat->garray; 429 430 if (reuse == MAT_INITIAL_MATRIX) { 431 nz = aa->nz + bb->nz; 432 *nnz = nz; 433 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 434 col = row + nz; 435 val = (PetscScalar*)(col + nz); 436 437 *r = row; *c = col; *v = val; 438 } else { 439 row = *r; col = *c; val = *v; 440 } 441 442 jj = 0; irow = rstart; 443 for (i=0; i<m; i++) { 444 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 445 countA = ai[i+1] - ai[i]; 446 countB = bi[i+1] - bi[i]; 447 bjj = bj + bi[i]; 448 v1 = av + ai[i]; 449 v2 = bv + bi[i]; 450 451 /* A-part */ 452 for (j=0; j<countA; j++) { 453 if (reuse == MAT_INITIAL_MATRIX) { 454 row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift; 455 } 456 val[jj++] = v1[j]; 457 } 458 459 /* B-part */ 460 for (j=0; j < countB; j++) { 461 if (reuse == MAT_INITIAL_MATRIX) { 462 row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift; 463 } 464 val[jj++] = v2[j]; 465 } 466 irow++; 467 } 468 PetscFunctionReturn(0); 469 } 470 471 PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 472 { 473 const PetscInt *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj; 474 PetscErrorCode ierr; 475 PetscInt rstart,nz,i,j,jj,irow,countA,countB; 476 PetscInt *row,*col; 477 const PetscScalar *av, *bv,*v1,*v2; 478 PetscScalar *val; 479 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 480 Mat_SeqAIJ *aa = (Mat_SeqAIJ*)(mat->A)->data; 481 Mat_SeqAIJ *bb = (Mat_SeqAIJ*)(mat->B)->data; 482 483 PetscFunctionBegin; 484 ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart; 485 av=aa->a; bv=bb->a; 486 487 garray = mat->garray; 488 489 if (reuse == MAT_INITIAL_MATRIX) { 490 nz = aa->nz + bb->nz; 491 *nnz = nz; 492 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 493 col = row + nz; 494 val = (PetscScalar*)(col + nz); 495 496 *r = row; *c = col; *v = val; 497 } else { 498 row = *r; col = *c; val = *v; 499 } 500 501 jj = 0; irow = rstart; 502 for (i=0; i<m; i++) { 503 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 504 countA = ai[i+1] - ai[i]; 505 countB = bi[i+1] - bi[i]; 506 bjj = bj + bi[i]; 507 v1 = av + ai[i]; 508 v2 = bv + bi[i]; 509 510 /* A-part */ 511 for (j=0; j<countA; j++) { 512 if (reuse == MAT_INITIAL_MATRIX) { 513 row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift; 514 } 515 val[jj++] = v1[j]; 516 } 517 518 /* B-part */ 519 for (j=0; j < countB; j++) { 520 if (reuse == MAT_INITIAL_MATRIX) { 521 row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift; 522 } 523 val[jj++] = v2[j]; 524 } 525 irow++; 526 } 527 PetscFunctionReturn(0); 528 } 529 530 PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 531 { 532 Mat_MPIBAIJ *mat = (Mat_MPIBAIJ*)A->data; 533 Mat_SeqBAIJ *aa = (Mat_SeqBAIJ*)(mat->A)->data; 534 Mat_SeqBAIJ *bb = (Mat_SeqBAIJ*)(mat->B)->data; 535 const PetscInt *ai = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj; 536 const PetscInt *garray = mat->garray,mbs=mat->mbs,rstart=A->rmap->rstart; 537 const PetscInt bs2=mat->bs2; 538 PetscErrorCode ierr; 539 PetscInt bs,nz,i,j,k,n,jj,irow,countA,countB,idx; 540 PetscInt *row,*col; 541 const PetscScalar *av=aa->a, *bv=bb->a,*v1,*v2; 542 PetscScalar *val; 543 544 PetscFunctionBegin; 545 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 546 if (reuse == MAT_INITIAL_MATRIX) { 547 nz = bs2*(aa->nz + bb->nz); 548 *nnz = nz; 549 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 550 col = row + nz; 551 val = (PetscScalar*)(col + nz); 552 553 *r = row; *c = col; *v = val; 554 } else { 555 row = *r; col = *c; val = *v; 556 } 557 558 jj = 0; irow = rstart; 559 for (i=0; i<mbs; i++) { 560 countA = ai[i+1] - ai[i]; 561 countB = bi[i+1] - bi[i]; 562 ajj = aj + ai[i]; 563 bjj = bj + bi[i]; 564 v1 = av + bs2*ai[i]; 565 v2 = bv + bs2*bi[i]; 566 567 idx = 0; 568 /* A-part */ 569 for (k=0; k<countA; k++) { 570 for (j=0; j<bs; j++) { 571 for (n=0; n<bs; n++) { 572 if (reuse == MAT_INITIAL_MATRIX) { 573 row[jj] = irow + n + shift; 574 col[jj] = rstart + bs*ajj[k] + j + shift; 575 } 576 val[jj++] = v1[idx++]; 577 } 578 } 579 } 580 581 idx = 0; 582 /* B-part */ 583 for (k=0; k<countB; k++) { 584 for (j=0; j<bs; j++) { 585 for (n=0; n<bs; n++) { 586 if (reuse == MAT_INITIAL_MATRIX) { 587 row[jj] = irow + n + shift; 588 col[jj] = bs*garray[bjj[k]] + j + shift; 589 } 590 val[jj++] = v2[idx++]; 591 } 592 } 593 } 594 irow += bs; 595 } 596 PetscFunctionReturn(0); 597 } 598 599 PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 600 { 601 const PetscInt *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj; 602 PetscErrorCode ierr; 603 PetscInt rstart,nz,nza,nzb,i,j,jj,irow,countA,countB; 604 PetscInt *row,*col; 605 const PetscScalar *av, *bv,*v1,*v2; 606 PetscScalar *val; 607 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 608 Mat_SeqAIJ *aa =(Mat_SeqAIJ*)(mat->A)->data; 609 Mat_SeqAIJ *bb =(Mat_SeqAIJ*)(mat->B)->data; 610 611 PetscFunctionBegin; 612 ai=aa->i; aj=aa->j; adiag=aa->diag; 613 bi=bb->i; bj=bb->j; garray = mat->garray; 614 av=aa->a; bv=bb->a; 615 616 rstart = A->rmap->rstart; 617 618 if (reuse == MAT_INITIAL_MATRIX) { 619 nza = 0; /* num of upper triangular entries in mat->A, including diagonals */ 620 nzb = 0; /* num of upper triangular entries in mat->B */ 621 for (i=0; i<m; i++) { 622 nza += (ai[i+1] - adiag[i]); 623 countB = bi[i+1] - bi[i]; 624 bjj = bj + bi[i]; 625 for (j=0; j<countB; j++) { 626 if (garray[bjj[j]] > rstart) nzb++; 627 } 628 } 629 630 nz = nza + nzb; /* total nz of upper triangular part of mat */ 631 *nnz = nz; 632 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 633 col = row + nz; 634 val = (PetscScalar*)(col + nz); 635 636 *r = row; *c = col; *v = val; 637 } else { 638 row = *r; col = *c; val = *v; 639 } 640 641 jj = 0; irow = rstart; 642 for (i=0; i<m; i++) { 643 ajj = aj + adiag[i]; /* ptr to the beginning of the diagonal of this row */ 644 v1 = av + adiag[i]; 645 countA = ai[i+1] - adiag[i]; 646 countB = bi[i+1] - bi[i]; 647 bjj = bj + bi[i]; 648 v2 = bv + bi[i]; 649 650 /* A-part */ 651 for (j=0; j<countA; j++) { 652 if (reuse == MAT_INITIAL_MATRIX) { 653 row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift; 654 } 655 val[jj++] = v1[j]; 656 } 657 658 /* B-part */ 659 for (j=0; j < countB; j++) { 660 if (garray[bjj[j]] > rstart) { 661 if (reuse == MAT_INITIAL_MATRIX) { 662 row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift; 663 } 664 val[jj++] = v2[j]; 665 } 666 } 667 irow++; 668 } 669 PetscFunctionReturn(0); 670 } 671 672 PetscErrorCode MatDestroy_MUMPS(Mat A) 673 { 674 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 675 PetscErrorCode ierr; 676 677 PetscFunctionBegin; 678 ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr); 679 ierr = VecScatterDestroy(&mumps->scat_rhs);CHKERRQ(ierr); 680 ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr); 681 ierr = VecDestroy(&mumps->b_seq);CHKERRQ(ierr); 682 ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr); 683 ierr = PetscFree(mumps->id.perm_in);CHKERRQ(ierr); 684 ierr = PetscFree(mumps->irn);CHKERRQ(ierr); 685 ierr = PetscFree(mumps->info);CHKERRQ(ierr); 686 ierr = MatMumpsResetSchur_Private(mumps);CHKERRQ(ierr); 687 mumps->id.job = JOB_END; 688 PetscMUMPS_c(&mumps->id); 689 ierr = MPI_Comm_free(&mumps->comm_mumps);CHKERRQ(ierr); 690 ierr = PetscFree(A->data);CHKERRQ(ierr); 691 692 /* clear composed functions */ 693 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);CHKERRQ(ierr); 694 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorSetSchurIS_C",NULL);CHKERRQ(ierr); 695 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorCreateSchurComplement_C",NULL);CHKERRQ(ierr); 696 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetIcntl_C",NULL);CHKERRQ(ierr); 697 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetIcntl_C",NULL);CHKERRQ(ierr); 698 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetCntl_C",NULL);CHKERRQ(ierr); 699 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetCntl_C",NULL);CHKERRQ(ierr); 700 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfo_C",NULL);CHKERRQ(ierr); 701 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfog_C",NULL);CHKERRQ(ierr); 702 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfo_C",NULL);CHKERRQ(ierr); 703 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfog_C",NULL);CHKERRQ(ierr); 704 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInverse_C",NULL);CHKERRQ(ierr); 705 PetscFunctionReturn(0); 706 } 707 708 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x) 709 { 710 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 711 PetscScalar *array; 712 Vec b_seq; 713 IS is_iden,is_petsc; 714 PetscErrorCode ierr; 715 PetscInt i; 716 PetscBool second_solve = PETSC_FALSE; 717 static PetscBool cite1 = PETSC_FALSE,cite2 = PETSC_FALSE; 718 719 PetscFunctionBegin; 720 ierr = PetscCitationsRegister("@article{MUMPS01,\n author = {P.~R. Amestoy and I.~S. Duff and J.-Y. L'Excellent and J. Koster},\n title = {A fully asynchronous multifrontal solver using distributed dynamic scheduling},\n journal = {SIAM Journal on Matrix Analysis and Applications},\n volume = {23},\n number = {1},\n pages = {15--41},\n year = {2001}\n}\n",&cite1);CHKERRQ(ierr); 721 ierr = PetscCitationsRegister("@article{MUMPS02,\n author = {P.~R. Amestoy and A. Guermouche and J.-Y. L'Excellent and S. Pralet},\n title = {Hybrid scheduling for the parallel solution of linear systems},\n journal = {Parallel Computing},\n volume = {32},\n number = {2},\n pages = {136--156},\n year = {2006}\n}\n",&cite2);CHKERRQ(ierr); 722 723 if (A->factorerrortype) { 724 ierr = PetscInfo2(A,"MatSolve is called with singular matrix factor, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 725 ierr = VecSetInf(x);CHKERRQ(ierr); 726 PetscFunctionReturn(0); 727 } 728 729 mumps->id.nrhs = 1; 730 b_seq = mumps->b_seq; 731 if (mumps->size > 1) { 732 /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */ 733 ierr = VecScatterBegin(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 734 ierr = VecScatterEnd(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 735 if (!mumps->myid) {ierr = VecGetArray(b_seq,&array);CHKERRQ(ierr);} 736 } else { /* size == 1 */ 737 ierr = VecCopy(b,x);CHKERRQ(ierr); 738 ierr = VecGetArray(x,&array);CHKERRQ(ierr); 739 } 740 if (!mumps->myid) { /* define rhs on the host */ 741 mumps->id.nrhs = 1; 742 mumps->id.rhs = (MumpsScalar*)array; 743 } 744 745 /* 746 handle condensation step of Schur complement (if any) 747 We set by default ICNTL(26) == -1 when Schur indices have been provided by the user. 748 According to MUMPS (5.0.0) manual, any value should be harmful during the factorization phase 749 Unless the user provides a valid value for ICNTL(26), MatSolve and MatMatSolve routines solve the full system. 750 This requires an extra call to PetscMUMPS_c and the computation of the factors for S 751 */ 752 if (mumps->id.ICNTL(26) < 0 || mumps->id.ICNTL(26) > 2) { 753 if (mumps->size > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Parallel Schur complements not yet supported from PETSc\n"); 754 second_solve = PETSC_TRUE; 755 ierr = MatMumpsHandleSchur_Private(A,PETSC_FALSE);CHKERRQ(ierr); 756 } 757 /* solve phase */ 758 /*-------------*/ 759 mumps->id.job = JOB_SOLVE; 760 PetscMUMPS_c(&mumps->id); 761 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 762 763 /* handle expansion step of Schur complement (if any) */ 764 if (second_solve) { 765 ierr = MatMumpsHandleSchur_Private(A,PETSC_TRUE);CHKERRQ(ierr); 766 } 767 768 if (mumps->size > 1) { /* convert mumps distributed solution to petsc mpi x */ 769 if (mumps->scat_sol && mumps->ICNTL9_pre != mumps->id.ICNTL(9)) { 770 /* when id.ICNTL(9) changes, the contents of lsol_loc may change (not its size, lsol_loc), recreates scat_sol */ 771 ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr); 772 } 773 if (!mumps->scat_sol) { /* create scatter scat_sol */ 774 ierr = ISCreateStride(PETSC_COMM_SELF,mumps->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */ 775 for (i=0; i<mumps->id.lsol_loc; i++) { 776 mumps->id.isol_loc[i] -= 1; /* change Fortran style to C style */ 777 } 778 ierr = ISCreateGeneral(PETSC_COMM_SELF,mumps->id.lsol_loc,mumps->id.isol_loc,PETSC_COPY_VALUES,&is_petsc);CHKERRQ(ierr); /* to */ 779 ierr = VecScatterCreate(mumps->x_seq,is_iden,x,is_petsc,&mumps->scat_sol);CHKERRQ(ierr); 780 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 781 ierr = ISDestroy(&is_petsc);CHKERRQ(ierr); 782 783 mumps->ICNTL9_pre = mumps->id.ICNTL(9); /* save current value of id.ICNTL(9) */ 784 } 785 786 ierr = VecScatterBegin(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 787 ierr = VecScatterEnd(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 788 } 789 PetscFunctionReturn(0); 790 } 791 792 PetscErrorCode MatSolveTranspose_MUMPS(Mat A,Vec b,Vec x) 793 { 794 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 795 PetscErrorCode ierr; 796 797 PetscFunctionBegin; 798 mumps->id.ICNTL(9) = 0; 799 ierr = MatSolve_MUMPS(A,b,x);CHKERRQ(ierr); 800 mumps->id.ICNTL(9) = 1; 801 PetscFunctionReturn(0); 802 } 803 804 PetscErrorCode MatMatSolve_MUMPS(Mat A,Mat B,Mat X) 805 { 806 PetscErrorCode ierr; 807 Mat Bt = NULL; 808 PetscBool flg, flgT; 809 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 810 PetscInt i,nrhs,M; 811 PetscScalar *array,*bray; 812 813 PetscFunctionBegin; 814 ierr = PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 815 ierr = PetscObjectTypeCompare((PetscObject)B,MATTRANSPOSEMAT,&flgT);CHKERRQ(ierr); 816 if (flgT) { 817 if (mumps->size > 1) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix"); 818 ierr = MatTransposeGetMat(B,&Bt);CHKERRQ(ierr); 819 } else { 820 if (!flg) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix"); 821 } 822 823 ierr = MatGetSize(B,&M,&nrhs);CHKERRQ(ierr); 824 mumps->id.nrhs = nrhs; 825 mumps->id.lrhs = M; 826 827 ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 828 if (!flg) SETERRQ(PetscObjectComm((PetscObject)X),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix"); 829 830 if (B->rmap->n != X->rmap->n) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B and X must have same row distribution"); 831 832 if (mumps->size == 1) { 833 PetscScalar *aa; 834 PetscInt spnr,*ia,*ja; 835 PetscBool second_solve = PETSC_FALSE; 836 837 /* copy B to X */ 838 ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr); 839 mumps->id.rhs = (MumpsScalar*)array; 840 if (!Bt) { 841 ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr); 842 ierr = PetscMemcpy(array,bray,M*nrhs*sizeof(PetscScalar));CHKERRQ(ierr); 843 ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr); 844 } else { 845 PetscBool done; 846 847 ierr = MatSeqAIJGetArray(Bt,&aa);CHKERRQ(ierr); 848 ierr = MatGetRowIJ(Bt,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&done);CHKERRQ(ierr); 849 if (!done) SETERRQ(PetscObjectComm((PetscObject)Bt),PETSC_ERR_ARG_WRONG,"Cannot get IJ structure"); 850 mumps->id.irhs_ptr = ia; 851 mumps->id.irhs_sparse = ja; 852 mumps->id.nz_rhs = ia[spnr] - 1; 853 mumps->id.rhs_sparse = (MumpsScalar*)aa; 854 mumps->id.ICNTL(20) = 1; 855 } 856 /* handle condensation step of Schur complement (if any) */ 857 if (mumps->id.ICNTL(26) < 0 || mumps->id.ICNTL(26) > 2) { 858 second_solve = PETSC_TRUE; 859 ierr = MatMumpsHandleSchur_Private(A,PETSC_FALSE);CHKERRQ(ierr); 860 } 861 /* solve phase */ 862 /*-------------*/ 863 mumps->id.job = JOB_SOLVE; 864 PetscMUMPS_c(&mumps->id); 865 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 866 867 /* handle expansion step of Schur complement (if any) */ 868 if (second_solve) { 869 ierr = MatMumpsHandleSchur_Private(A,PETSC_TRUE);CHKERRQ(ierr); 870 } 871 if (Bt) { 872 PetscBool done; 873 874 ierr = MatSeqAIJRestoreArray(Bt,&aa);CHKERRQ(ierr); 875 ierr = MatRestoreRowIJ(Bt,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&done);CHKERRQ(ierr); 876 if (!done) SETERRQ(PetscObjectComm((PetscObject)Bt),PETSC_ERR_ARG_WRONG,"Cannot restore IJ structure"); 877 mumps->id.ICNTL(20) = 0; 878 } 879 ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr); 880 } else { /*--------- parallel case --------*/ 881 PetscInt lsol_loc,nlsol_loc,*isol_loc,*idx,*iidx,*idxx,*isol_loc_save; 882 MumpsScalar *sol_loc,*sol_loc_save; 883 IS is_to,is_from; 884 PetscInt k,proc,j,m; 885 const PetscInt *rstart; 886 Vec v_mpi,b_seq,x_seq; 887 VecScatter scat_rhs,scat_sol; 888 889 if (mumps->size > 1 && mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Parallel Schur complements not yet supported from PETSc\n"); 890 891 /* create x_seq to hold local solution */ 892 isol_loc_save = mumps->id.isol_loc; /* save it for MatSovle() */ 893 sol_loc_save = mumps->id.sol_loc; 894 895 lsol_loc = mumps->id.INFO(23); 896 nlsol_loc = nrhs*lsol_loc; /* length of sol_loc */ 897 ierr = PetscMalloc2(nlsol_loc,&sol_loc,nlsol_loc,&isol_loc);CHKERRQ(ierr); 898 mumps->id.sol_loc = (MumpsScalar*)sol_loc; 899 mumps->id.isol_loc = isol_loc; 900 901 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nlsol_loc,(PetscScalar*)sol_loc,&x_seq);CHKERRQ(ierr); 902 903 /* copy rhs matrix B into vector v_mpi */ 904 ierr = MatGetLocalSize(B,&m,NULL);CHKERRQ(ierr); 905 ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr); 906 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)B),1,nrhs*m,nrhs*M,(const PetscScalar*)bray,&v_mpi);CHKERRQ(ierr); 907 ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr); 908 909 /* scatter v_mpi to b_seq because MUMPS only supports centralized rhs */ 910 /* idx: maps from k-th index of v_mpi to (i,j)-th global entry of B; 911 iidx: inverse of idx, will be used by scattering xx_seq -> X */ 912 ierr = PetscMalloc2(nrhs*M,&idx,nrhs*M,&iidx);CHKERRQ(ierr); 913 ierr = MatGetOwnershipRanges(B,&rstart);CHKERRQ(ierr); 914 k = 0; 915 for (proc=0; proc<mumps->size; proc++){ 916 for (j=0; j<nrhs; j++){ 917 for (i=rstart[proc]; i<rstart[proc+1]; i++){ 918 iidx[j*M + i] = k; 919 idx[k++] = j*M + i; 920 } 921 } 922 } 923 924 if (!mumps->myid) { 925 ierr = VecCreateSeq(PETSC_COMM_SELF,nrhs*M,&b_seq);CHKERRQ(ierr); 926 ierr = ISCreateGeneral(PETSC_COMM_SELF,nrhs*M,idx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr); 927 ierr = ISCreateStride(PETSC_COMM_SELF,nrhs*M,0,1,&is_from);CHKERRQ(ierr); 928 } else { 929 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&b_seq);CHKERRQ(ierr); 930 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_to);CHKERRQ(ierr); 931 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_from);CHKERRQ(ierr); 932 } 933 ierr = VecScatterCreate(v_mpi,is_from,b_seq,is_to,&scat_rhs);CHKERRQ(ierr); 934 ierr = VecScatterBegin(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 935 ierr = ISDestroy(&is_to);CHKERRQ(ierr); 936 ierr = ISDestroy(&is_from);CHKERRQ(ierr); 937 ierr = VecScatterEnd(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 938 939 if (!mumps->myid) { /* define rhs on the host */ 940 ierr = VecGetArray(b_seq,&bray);CHKERRQ(ierr); 941 mumps->id.rhs = (MumpsScalar*)bray; 942 ierr = VecRestoreArray(b_seq,&bray);CHKERRQ(ierr); 943 } 944 945 /* solve phase */ 946 /*-------------*/ 947 mumps->id.job = JOB_SOLVE; 948 PetscMUMPS_c(&mumps->id); 949 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 950 951 /* scatter mumps distributed solution to petsc vector v_mpi, which shares local arrays with solution matrix X */ 952 ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr); 953 ierr = VecPlaceArray(v_mpi,array);CHKERRQ(ierr); 954 955 /* create scatter scat_sol */ 956 ierr = PetscMalloc1(nlsol_loc,&idxx);CHKERRQ(ierr); 957 ierr = ISCreateStride(PETSC_COMM_SELF,nlsol_loc,0,1,&is_from);CHKERRQ(ierr); 958 for (i=0; i<lsol_loc; i++) { 959 isol_loc[i] -= 1; /* change Fortran style to C style */ 960 idxx[i] = iidx[isol_loc[i]]; 961 for (j=1; j<nrhs; j++){ 962 idxx[j*lsol_loc+i] = iidx[isol_loc[i]+j*M]; 963 } 964 } 965 ierr = ISCreateGeneral(PETSC_COMM_SELF,nlsol_loc,idxx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr); 966 ierr = VecScatterCreate(x_seq,is_from,v_mpi,is_to,&scat_sol);CHKERRQ(ierr); 967 ierr = VecScatterBegin(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 968 ierr = ISDestroy(&is_from);CHKERRQ(ierr); 969 ierr = ISDestroy(&is_to);CHKERRQ(ierr); 970 ierr = VecScatterEnd(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 971 ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr); 972 973 /* free spaces */ 974 mumps->id.sol_loc = sol_loc_save; 975 mumps->id.isol_loc = isol_loc_save; 976 977 ierr = PetscFree2(sol_loc,isol_loc);CHKERRQ(ierr); 978 ierr = PetscFree2(idx,iidx);CHKERRQ(ierr); 979 ierr = PetscFree(idxx);CHKERRQ(ierr); 980 ierr = VecDestroy(&x_seq);CHKERRQ(ierr); 981 ierr = VecDestroy(&v_mpi);CHKERRQ(ierr); 982 ierr = VecDestroy(&b_seq);CHKERRQ(ierr); 983 ierr = VecScatterDestroy(&scat_rhs);CHKERRQ(ierr); 984 ierr = VecScatterDestroy(&scat_sol);CHKERRQ(ierr); 985 } 986 PetscFunctionReturn(0); 987 } 988 989 #if !defined(PETSC_USE_COMPLEX) 990 /* 991 input: 992 F: numeric factor 993 output: 994 nneg: total number of negative pivots 995 nzero: total number of zero pivots 996 npos: (global dimension of F) - nneg - nzero 997 */ 998 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos) 999 { 1000 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1001 PetscErrorCode ierr; 1002 PetscMPIInt size; 1003 1004 PetscFunctionBegin; 1005 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)F),&size);CHKERRQ(ierr); 1006 /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */ 1007 if (size > 1 && mumps->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",mumps->id.INFOG(13)); 1008 1009 if (nneg) *nneg = mumps->id.INFOG(12); 1010 if (nzero || npos) { 1011 if (mumps->id.ICNTL(24) != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"-mat_mumps_icntl_24 must be set as 1 for null pivot row detection"); 1012 if (nzero) *nzero = mumps->id.INFOG(28); 1013 if (npos) *npos = F->rmap->N - (mumps->id.INFOG(12) + mumps->id.INFOG(28)); 1014 } 1015 PetscFunctionReturn(0); 1016 } 1017 #endif 1018 1019 PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info) 1020 { 1021 Mat_MUMPS *mumps =(Mat_MUMPS*)(F)->data; 1022 PetscErrorCode ierr; 1023 PetscBool isMPIAIJ; 1024 1025 PetscFunctionBegin; 1026 if (mumps->id.INFOG(1) < 0) { 1027 if (mumps->id.INFOG(1) == -6) { 1028 ierr = PetscInfo2(A,"MatFactorNumeric is called with singular matrix structure, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1029 } 1030 ierr = PetscInfo2(A,"MatFactorNumeric is called after analysis phase fails, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1031 PetscFunctionReturn(0); 1032 } 1033 1034 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_REUSE_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1035 1036 /* numerical factorization phase */ 1037 /*-------------------------------*/ 1038 mumps->id.job = JOB_FACTNUMERIC; 1039 if (!mumps->id.ICNTL(18)) { /* A is centralized */ 1040 if (!mumps->myid) { 1041 mumps->id.a = (MumpsScalar*)mumps->val; 1042 } 1043 } else { 1044 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1045 } 1046 PetscMUMPS_c(&mumps->id); 1047 if (mumps->id.INFOG(1) < 0) { 1048 if (A->erroriffailure) { 1049 SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2)); 1050 } else { 1051 if (mumps->id.INFOG(1) == -10) { /* numerically singular matrix */ 1052 ierr = PetscInfo2(F,"matrix is numerically singular, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1053 F->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1054 } else if (mumps->id.INFOG(1) == -13) { 1055 ierr = PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, cannot allocate required memory %d megabytes\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1056 F->factorerrortype = MAT_FACTOR_OUTMEMORY; 1057 } else if (mumps->id.INFOG(1) == -8 || mumps->id.INFOG(1) == -9 || (-16 < mumps->id.INFOG(1) && mumps->id.INFOG(1) < -10) ) { 1058 ierr = PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d, problem with workarray \n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1059 F->factorerrortype = MAT_FACTOR_OUTMEMORY; 1060 } else { 1061 ierr = PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1062 F->factorerrortype = MAT_FACTOR_OTHER; 1063 } 1064 } 1065 } 1066 if (!mumps->myid && mumps->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB," mumps->id.ICNTL(16):=%d\n",mumps->id.INFOG(16)); 1067 1068 F->assembled = PETSC_TRUE; 1069 mumps->matstruc = SAME_NONZERO_PATTERN; 1070 if (F->schur) { /* reset Schur status to unfactored */ 1071 if (mumps->id.ICNTL(19) == 1) { /* stored by rows */ 1072 mumps->id.ICNTL(19) = 2; 1073 ierr = MatTranspose(F->schur,MAT_INPLACE_MATRIX,&F->schur);CHKERRQ(ierr); 1074 } 1075 ierr = MatFactorRestoreSchurComplement(F,NULL,MAT_FACTOR_SCHUR_UNFACTORED);CHKERRQ(ierr); 1076 } 1077 1078 /* just to be sure that ICNTL(19) value returned by a call from MatMumpsGetIcntl is always consistent */ 1079 if (!mumps->sym && mumps->id.ICNTL(19) && mumps->id.ICNTL(19) != 1) mumps->id.ICNTL(19) = 3; 1080 1081 if (mumps->size > 1) { 1082 PetscInt lsol_loc; 1083 PetscScalar *sol_loc; 1084 1085 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr); 1086 1087 /* distributed solution; Create x_seq=sol_loc for repeated use */ 1088 if (mumps->x_seq) { 1089 ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr); 1090 ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr); 1091 ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr); 1092 } 1093 lsol_loc = mumps->id.INFO(23); /* length of sol_loc */ 1094 ierr = PetscMalloc2(lsol_loc,&sol_loc,lsol_loc,&mumps->id.isol_loc);CHKERRQ(ierr); 1095 mumps->id.lsol_loc = lsol_loc; 1096 mumps->id.sol_loc = (MumpsScalar*)sol_loc; 1097 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,lsol_loc,sol_loc,&mumps->x_seq);CHKERRQ(ierr); 1098 } 1099 PetscFunctionReturn(0); 1100 } 1101 1102 /* Sets MUMPS options from the options database */ 1103 PetscErrorCode PetscSetMUMPSFromOptions(Mat F, Mat A) 1104 { 1105 Mat_MUMPS *mumps = (Mat_MUMPS*)F->data; 1106 PetscErrorCode ierr; 1107 PetscInt icntl,info[40],i,ninfo=40; 1108 PetscBool flg; 1109 1110 PetscFunctionBegin; 1111 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr); 1112 ierr = PetscOptionsInt("-mat_mumps_icntl_1","ICNTL(1): output stream for error messages","None",mumps->id.ICNTL(1),&icntl,&flg);CHKERRQ(ierr); 1113 if (flg) mumps->id.ICNTL(1) = icntl; 1114 ierr = PetscOptionsInt("-mat_mumps_icntl_2","ICNTL(2): output stream for diagnostic printing, statistics, and warning","None",mumps->id.ICNTL(2),&icntl,&flg);CHKERRQ(ierr); 1115 if (flg) mumps->id.ICNTL(2) = icntl; 1116 ierr = PetscOptionsInt("-mat_mumps_icntl_3","ICNTL(3): output stream for global information, collected on the host","None",mumps->id.ICNTL(3),&icntl,&flg);CHKERRQ(ierr); 1117 if (flg) mumps->id.ICNTL(3) = icntl; 1118 1119 ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",mumps->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr); 1120 if (flg) mumps->id.ICNTL(4) = icntl; 1121 if (mumps->id.ICNTL(4) || PetscLogPrintInfo) mumps->id.ICNTL(3) = 6; /* resume MUMPS default id.ICNTL(3) = 6 */ 1122 1123 ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7)","None",mumps->id.ICNTL(6),&icntl,&flg);CHKERRQ(ierr); 1124 if (flg) mumps->id.ICNTL(6) = icntl; 1125 1126 ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis","None",mumps->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr); 1127 if (flg) { 1128 if (icntl== 1 && mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n"); 1129 else mumps->id.ICNTL(7) = icntl; 1130 } 1131 1132 ierr = PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 8 or 77)","None",mumps->id.ICNTL(8),&mumps->id.ICNTL(8),NULL);CHKERRQ(ierr); 1133 /* ierr = PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): computes the solution using A or A^T","None",mumps->id.ICNTL(9),&mumps->id.ICNTL(9),NULL);CHKERRQ(ierr); handled by MatSolveTranspose_MUMPS() */ 1134 ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",mumps->id.ICNTL(10),&mumps->id.ICNTL(10),NULL);CHKERRQ(ierr); 1135 ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to an error analysis (via -ksp_view)","None",mumps->id.ICNTL(11),&mumps->id.ICNTL(11),NULL);CHKERRQ(ierr); 1136 ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): an ordering strategy for symmetric matrices (0 to 3)","None",mumps->id.ICNTL(12),&mumps->id.ICNTL(12),NULL);CHKERRQ(ierr); 1137 ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting","None",mumps->id.ICNTL(13),&mumps->id.ICNTL(13),NULL);CHKERRQ(ierr); 1138 ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage increase in the estimated working space","None",mumps->id.ICNTL(14),&mumps->id.ICNTL(14),NULL);CHKERRQ(ierr); 1139 ierr = PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): computes the Schur complement","None",mumps->id.ICNTL(19),&mumps->id.ICNTL(19),NULL);CHKERRQ(ierr); 1140 if (mumps->id.ICNTL(19) <= 0 || mumps->id.ICNTL(19) > 3) { /* reset any schur data (if any) */ 1141 ierr = MatDestroy(&F->schur);CHKERRQ(ierr); 1142 ierr = MatMumpsResetSchur_Private(mumps);CHKERRQ(ierr); 1143 } 1144 /* ierr = PetscOptionsInt("-mat_mumps_icntl_20","ICNTL(20): the format (dense or sparse) of the right-hand sides","None",mumps->id.ICNTL(20),&mumps->id.ICNTL(20),NULL);CHKERRQ(ierr); -- sparse rhs is not supported in PETSc API */ 1145 /* ierr = PetscOptionsInt("-mat_mumps_icntl_21","ICNTL(21): the distribution (centralized or distributed) of the solution vectors","None",mumps->id.ICNTL(21),&mumps->id.ICNTL(21),NULL);CHKERRQ(ierr); we only use distributed solution vector */ 1146 1147 ierr = PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core factorization and solve (0 or 1)","None",mumps->id.ICNTL(22),&mumps->id.ICNTL(22),NULL);CHKERRQ(ierr); 1148 ierr = PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",mumps->id.ICNTL(23),&mumps->id.ICNTL(23),NULL);CHKERRQ(ierr); 1149 ierr = PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",mumps->id.ICNTL(24),&mumps->id.ICNTL(24),NULL);CHKERRQ(ierr); 1150 if (mumps->id.ICNTL(24)) { 1151 mumps->id.ICNTL(13) = 1; /* turn-off ScaLAPACK to help with the correct detection of null pivots */ 1152 } 1153 1154 ierr = PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): computes a solution of a deficient matrix and a null space basis","None",mumps->id.ICNTL(25),&mumps->id.ICNTL(25),NULL);CHKERRQ(ierr); 1155 ierr = PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): drives the solution phase if a Schur complement matrix","None",mumps->id.ICNTL(26),&mumps->id.ICNTL(26),NULL);CHKERRQ(ierr); 1156 ierr = PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): the blocking size for multiple right-hand sides","None",mumps->id.ICNTL(27),&mumps->id.ICNTL(27),NULL);CHKERRQ(ierr); 1157 ierr = PetscOptionsInt("-mat_mumps_icntl_28","ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering","None",mumps->id.ICNTL(28),&mumps->id.ICNTL(28),NULL);CHKERRQ(ierr); 1158 ierr = PetscOptionsInt("-mat_mumps_icntl_29","ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis","None",mumps->id.ICNTL(29),&mumps->id.ICNTL(29),NULL);CHKERRQ(ierr); 1159 /* ierr = PetscOptionsInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL);CHKERRQ(ierr); */ /* call MatMumpsGetInverse() directly */ 1160 ierr = PetscOptionsInt("-mat_mumps_icntl_31","ICNTL(31): indicates which factors may be discarded during factorization","None",mumps->id.ICNTL(31),&mumps->id.ICNTL(31),NULL);CHKERRQ(ierr); 1161 /* ierr = PetscOptionsInt("-mat_mumps_icntl_32","ICNTL(32): performs the forward elemination of the right-hand sides during factorization","None",mumps->id.ICNTL(32),&mumps->id.ICNTL(32),NULL);CHKERRQ(ierr); -- not supported by PETSc API */ 1162 ierr = PetscOptionsInt("-mat_mumps_icntl_33","ICNTL(33): compute determinant","None",mumps->id.ICNTL(33),&mumps->id.ICNTL(33),NULL);CHKERRQ(ierr); 1163 ierr = PetscOptionsInt("-mat_mumps_icntl_35","ICNTL(35): activates Block Lock Rank (BLR) based factorization","None",mumps->id.ICNTL(35),&mumps->id.ICNTL(35),NULL);CHKERRQ(ierr); 1164 1165 ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",mumps->id.CNTL(1),&mumps->id.CNTL(1),NULL);CHKERRQ(ierr); 1166 ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",mumps->id.CNTL(2),&mumps->id.CNTL(2),NULL);CHKERRQ(ierr); 1167 ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",mumps->id.CNTL(3),&mumps->id.CNTL(3),NULL);CHKERRQ(ierr); 1168 ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",mumps->id.CNTL(4),&mumps->id.CNTL(4),NULL);CHKERRQ(ierr); 1169 ierr = PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",mumps->id.CNTL(5),&mumps->id.CNTL(5),NULL);CHKERRQ(ierr); 1170 ierr = PetscOptionsReal("-mat_mumps_cntl_7","CNTL(7): dropping parameter used during BLR","None",mumps->id.CNTL(7),&mumps->id.CNTL(7),NULL);CHKERRQ(ierr); 1171 1172 ierr = PetscOptionsString("-mat_mumps_ooc_tmpdir", "out of core directory", "None", mumps->id.ooc_tmpdir, mumps->id.ooc_tmpdir, 256, NULL);CHKERRQ(ierr); 1173 1174 ierr = PetscOptionsIntArray("-mat_mumps_view_info","request INFO local to each processor","",info,&ninfo,NULL);CHKERRQ(ierr); 1175 if (ninfo) { 1176 if (ninfo > 40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"number of INFO %d must <= 40\n",ninfo); 1177 ierr = PetscMalloc1(ninfo,&mumps->info);CHKERRQ(ierr); 1178 mumps->ninfo = ninfo; 1179 for (i=0; i<ninfo; i++) { 1180 if (info[i] < 0 || info[i]>40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"index of INFO %d must between 1 and 40\n",ninfo); 1181 else mumps->info[i] = info[i]; 1182 } 1183 } 1184 1185 ierr = PetscOptionsEnd();CHKERRQ(ierr); 1186 PetscFunctionReturn(0); 1187 } 1188 1189 PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS *mumps) 1190 { 1191 PetscErrorCode ierr; 1192 1193 PetscFunctionBegin; 1194 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A), &mumps->myid);CHKERRQ(ierr); 1195 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&mumps->size);CHKERRQ(ierr); 1196 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(mumps->comm_mumps));CHKERRQ(ierr); 1197 1198 mumps->id.comm_fortran = MPI_Comm_c2f(mumps->comm_mumps); 1199 1200 mumps->id.job = JOB_INIT; 1201 mumps->id.par = 1; /* host participates factorizaton and solve */ 1202 mumps->id.sym = mumps->sym; 1203 PetscMUMPS_c(&mumps->id); 1204 1205 mumps->scat_rhs = NULL; 1206 mumps->scat_sol = NULL; 1207 1208 /* set PETSc-MUMPS default options - override MUMPS default */ 1209 mumps->id.ICNTL(3) = 0; 1210 mumps->id.ICNTL(4) = 0; 1211 if (mumps->size == 1) { 1212 mumps->id.ICNTL(18) = 0; /* centralized assembled matrix input */ 1213 } else { 1214 mumps->id.ICNTL(18) = 3; /* distributed assembled matrix input */ 1215 mumps->id.ICNTL(20) = 0; /* rhs is in dense format */ 1216 mumps->id.ICNTL(21) = 1; /* distributed solution */ 1217 } 1218 1219 /* schur */ 1220 mumps->id.size_schur = 0; 1221 mumps->id.listvar_schur = NULL; 1222 mumps->id.schur = NULL; 1223 mumps->sizeredrhs = 0; 1224 mumps->schur_sol = NULL; 1225 mumps->schur_sizesol = 0; 1226 PetscFunctionReturn(0); 1227 } 1228 1229 PetscErrorCode MatFactorSymbolic_MUMPS_ReportIfError(Mat F,Mat A,const MatFactorInfo *info,Mat_MUMPS *mumps) 1230 { 1231 PetscErrorCode ierr; 1232 1233 PetscFunctionBegin; 1234 if (mumps->id.INFOG(1) < 0) { 1235 if (A->erroriffailure) { 1236 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 1237 } else { 1238 if (mumps->id.INFOG(1) == -6) { 1239 ierr = PetscInfo2(F,"matrix is singular in structure, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1240 F->factorerrortype = MAT_FACTOR_STRUCT_ZEROPIVOT; 1241 } else if (mumps->id.INFOG(1) == -5 || mumps->id.INFOG(1) == -7) { 1242 ierr = PetscInfo2(F,"problem of workspace, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1243 F->factorerrortype = MAT_FACTOR_OUTMEMORY; 1244 } else { 1245 ierr = PetscInfo2(F,"Error reported by MUMPS in analysis phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1246 F->factorerrortype = MAT_FACTOR_OTHER; 1247 } 1248 } 1249 } 1250 PetscFunctionReturn(0); 1251 } 1252 1253 /* Note Petsc r(=c) permutation is used when mumps->id.ICNTL(7)==1 with centralized assembled matrix input; otherwise r and c are ignored */ 1254 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 1255 { 1256 Mat_MUMPS *mumps = (Mat_MUMPS*)F->data; 1257 PetscErrorCode ierr; 1258 Vec b; 1259 IS is_iden; 1260 const PetscInt M = A->rmap->N; 1261 1262 PetscFunctionBegin; 1263 mumps->matstruc = DIFFERENT_NONZERO_PATTERN; 1264 1265 /* Set MUMPS options from the options database */ 1266 ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr); 1267 1268 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1269 1270 /* analysis phase */ 1271 /*----------------*/ 1272 mumps->id.job = JOB_FACTSYMBOLIC; 1273 mumps->id.n = M; 1274 switch (mumps->id.ICNTL(18)) { 1275 case 0: /* centralized assembled matrix input */ 1276 if (!mumps->myid) { 1277 mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn; 1278 if (mumps->id.ICNTL(6)>1) { 1279 mumps->id.a = (MumpsScalar*)mumps->val; 1280 } 1281 if (mumps->id.ICNTL(7) == 1) { /* use user-provide matrix ordering - assuming r = c ordering */ 1282 /* 1283 PetscBool flag; 1284 ierr = ISEqual(r,c,&flag);CHKERRQ(ierr); 1285 if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"row_perm != col_perm"); 1286 ierr = ISView(r,PETSC_VIEWER_STDOUT_SELF); 1287 */ 1288 if (!mumps->myid) { 1289 const PetscInt *idx; 1290 PetscInt i,*perm_in; 1291 1292 ierr = PetscMalloc1(M,&perm_in);CHKERRQ(ierr); 1293 ierr = ISGetIndices(r,&idx);CHKERRQ(ierr); 1294 1295 mumps->id.perm_in = perm_in; 1296 for (i=0; i<M; i++) perm_in[i] = idx[i]+1; /* perm_in[]: start from 1, not 0! */ 1297 ierr = ISRestoreIndices(r,&idx);CHKERRQ(ierr); 1298 } 1299 } 1300 } 1301 break; 1302 case 3: /* distributed assembled matrix input (size>1) */ 1303 mumps->id.nz_loc = mumps->nz; 1304 mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn; 1305 if (mumps->id.ICNTL(6)>1) { 1306 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1307 } 1308 /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */ 1309 if (!mumps->myid) { 1310 ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->N,&mumps->b_seq);CHKERRQ(ierr); 1311 ierr = ISCreateStride(PETSC_COMM_SELF,A->rmap->N,0,1,&is_iden);CHKERRQ(ierr); 1312 } else { 1313 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr); 1314 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr); 1315 } 1316 ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr); 1317 ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr); 1318 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 1319 ierr = VecDestroy(&b);CHKERRQ(ierr); 1320 break; 1321 } 1322 PetscMUMPS_c(&mumps->id); 1323 ierr = MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);CHKERRQ(ierr); 1324 1325 F->ops->lufactornumeric = MatFactorNumeric_MUMPS; 1326 F->ops->solve = MatSolve_MUMPS; 1327 F->ops->solvetranspose = MatSolveTranspose_MUMPS; 1328 F->ops->matsolve = MatMatSolve_MUMPS; 1329 PetscFunctionReturn(0); 1330 } 1331 1332 /* Note the Petsc r and c permutations are ignored */ 1333 PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 1334 { 1335 Mat_MUMPS *mumps = (Mat_MUMPS*)F->data; 1336 PetscErrorCode ierr; 1337 Vec b; 1338 IS is_iden; 1339 const PetscInt M = A->rmap->N; 1340 1341 PetscFunctionBegin; 1342 mumps->matstruc = DIFFERENT_NONZERO_PATTERN; 1343 1344 /* Set MUMPS options from the options database */ 1345 ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr); 1346 1347 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1348 1349 /* analysis phase */ 1350 /*----------------*/ 1351 mumps->id.job = JOB_FACTSYMBOLIC; 1352 mumps->id.n = M; 1353 switch (mumps->id.ICNTL(18)) { 1354 case 0: /* centralized assembled matrix input */ 1355 if (!mumps->myid) { 1356 mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn; 1357 if (mumps->id.ICNTL(6)>1) { 1358 mumps->id.a = (MumpsScalar*)mumps->val; 1359 } 1360 } 1361 break; 1362 case 3: /* distributed assembled matrix input (size>1) */ 1363 mumps->id.nz_loc = mumps->nz; 1364 mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn; 1365 if (mumps->id.ICNTL(6)>1) { 1366 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1367 } 1368 /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */ 1369 if (!mumps->myid) { 1370 ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr); 1371 ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr); 1372 } else { 1373 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr); 1374 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr); 1375 } 1376 ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr); 1377 ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr); 1378 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 1379 ierr = VecDestroy(&b);CHKERRQ(ierr); 1380 break; 1381 } 1382 PetscMUMPS_c(&mumps->id); 1383 ierr = MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);CHKERRQ(ierr); 1384 1385 F->ops->lufactornumeric = MatFactorNumeric_MUMPS; 1386 F->ops->solve = MatSolve_MUMPS; 1387 F->ops->solvetranspose = MatSolveTranspose_MUMPS; 1388 PetscFunctionReturn(0); 1389 } 1390 1391 /* Note the Petsc r permutation and factor info are ignored */ 1392 PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info) 1393 { 1394 Mat_MUMPS *mumps = (Mat_MUMPS*)F->data; 1395 PetscErrorCode ierr; 1396 Vec b; 1397 IS is_iden; 1398 const PetscInt M = A->rmap->N; 1399 1400 PetscFunctionBegin; 1401 mumps->matstruc = DIFFERENT_NONZERO_PATTERN; 1402 1403 /* Set MUMPS options from the options database */ 1404 ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr); 1405 1406 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1407 1408 /* analysis phase */ 1409 /*----------------*/ 1410 mumps->id.job = JOB_FACTSYMBOLIC; 1411 mumps->id.n = M; 1412 switch (mumps->id.ICNTL(18)) { 1413 case 0: /* centralized assembled matrix input */ 1414 if (!mumps->myid) { 1415 mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn; 1416 if (mumps->id.ICNTL(6)>1) { 1417 mumps->id.a = (MumpsScalar*)mumps->val; 1418 } 1419 } 1420 break; 1421 case 3: /* distributed assembled matrix input (size>1) */ 1422 mumps->id.nz_loc = mumps->nz; 1423 mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn; 1424 if (mumps->id.ICNTL(6)>1) { 1425 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1426 } 1427 /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */ 1428 if (!mumps->myid) { 1429 ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr); 1430 ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr); 1431 } else { 1432 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr); 1433 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr); 1434 } 1435 ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr); 1436 ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr); 1437 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 1438 ierr = VecDestroy(&b);CHKERRQ(ierr); 1439 break; 1440 } 1441 PetscMUMPS_c(&mumps->id); 1442 ierr = MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);CHKERRQ(ierr); 1443 1444 F->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS; 1445 F->ops->solve = MatSolve_MUMPS; 1446 F->ops->solvetranspose = MatSolve_MUMPS; 1447 F->ops->matsolve = MatMatSolve_MUMPS; 1448 #if defined(PETSC_USE_COMPLEX) 1449 F->ops->getinertia = NULL; 1450 #else 1451 F->ops->getinertia = MatGetInertia_SBAIJMUMPS; 1452 #endif 1453 PetscFunctionReturn(0); 1454 } 1455 1456 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer) 1457 { 1458 PetscErrorCode ierr; 1459 PetscBool iascii; 1460 PetscViewerFormat format; 1461 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 1462 1463 PetscFunctionBegin; 1464 /* check if matrix is mumps type */ 1465 if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0); 1466 1467 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1468 if (iascii) { 1469 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1470 if (format == PETSC_VIEWER_ASCII_INFO) { 1471 ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr); 1472 ierr = PetscViewerASCIIPrintf(viewer," SYM (matrix type): %d \n",mumps->id.sym);CHKERRQ(ierr); 1473 ierr = PetscViewerASCIIPrintf(viewer," PAR (host participation): %d \n",mumps->id.par);CHKERRQ(ierr); 1474 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(1) (output for error): %d \n",mumps->id.ICNTL(1));CHKERRQ(ierr); 1475 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(2) (output of diagnostic msg): %d \n",mumps->id.ICNTL(2));CHKERRQ(ierr); 1476 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(3) (output for global info): %d \n",mumps->id.ICNTL(3));CHKERRQ(ierr); 1477 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(4) (level of printing): %d \n",mumps->id.ICNTL(4));CHKERRQ(ierr); 1478 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(5) (input mat struct): %d \n",mumps->id.ICNTL(5));CHKERRQ(ierr); 1479 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(6) (matrix prescaling): %d \n",mumps->id.ICNTL(6));CHKERRQ(ierr); 1480 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(7) (sequential matrix ordering):%d \n",mumps->id.ICNTL(7));CHKERRQ(ierr); 1481 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(8) (scaling strategy): %d \n",mumps->id.ICNTL(8));CHKERRQ(ierr); 1482 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(10) (max num of refinements): %d \n",mumps->id.ICNTL(10));CHKERRQ(ierr); 1483 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(11) (error analysis): %d \n",mumps->id.ICNTL(11));CHKERRQ(ierr); 1484 if (mumps->id.ICNTL(11)>0) { 1485 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(4) (inf norm of input mat): %g\n",mumps->id.RINFOG(4));CHKERRQ(ierr); 1486 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(5) (inf norm of solution): %g\n",mumps->id.RINFOG(5));CHKERRQ(ierr); 1487 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(6) (inf norm of residual): %g\n",mumps->id.RINFOG(6));CHKERRQ(ierr); 1488 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",mumps->id.RINFOG(7),mumps->id.RINFOG(8));CHKERRQ(ierr); 1489 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(9) (error estimate): %g \n",mumps->id.RINFOG(9));CHKERRQ(ierr); 1490 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",mumps->id.RINFOG(10),mumps->id.RINFOG(11));CHKERRQ(ierr); 1491 } 1492 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(12) (efficiency control): %d \n",mumps->id.ICNTL(12));CHKERRQ(ierr); 1493 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(13) (efficiency control): %d \n",mumps->id.ICNTL(13));CHKERRQ(ierr); 1494 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(14) (percentage of estimated workspace increase): %d \n",mumps->id.ICNTL(14));CHKERRQ(ierr); 1495 /* ICNTL(15-17) not used */ 1496 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(18) (input mat struct): %d \n",mumps->id.ICNTL(18));CHKERRQ(ierr); 1497 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(19) (Schur complement info): %d \n",mumps->id.ICNTL(19));CHKERRQ(ierr); 1498 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(20) (rhs sparse pattern): %d \n",mumps->id.ICNTL(20));CHKERRQ(ierr); 1499 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(21) (solution struct): %d \n",mumps->id.ICNTL(21));CHKERRQ(ierr); 1500 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(22) (in-core/out-of-core facility): %d \n",mumps->id.ICNTL(22));CHKERRQ(ierr); 1501 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(23) (max size of memory can be allocated locally):%d \n",mumps->id.ICNTL(23));CHKERRQ(ierr); 1502 1503 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(24) (detection of null pivot rows): %d \n",mumps->id.ICNTL(24));CHKERRQ(ierr); 1504 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(25) (computation of a null space basis): %d \n",mumps->id.ICNTL(25));CHKERRQ(ierr); 1505 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(26) (Schur options for rhs or solution): %d \n",mumps->id.ICNTL(26));CHKERRQ(ierr); 1506 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(27) (experimental parameter): %d \n",mumps->id.ICNTL(27));CHKERRQ(ierr); 1507 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(28) (use parallel or sequential ordering): %d \n",mumps->id.ICNTL(28));CHKERRQ(ierr); 1508 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(29) (parallel ordering): %d \n",mumps->id.ICNTL(29));CHKERRQ(ierr); 1509 1510 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(30) (user-specified set of entries in inv(A)): %d \n",mumps->id.ICNTL(30));CHKERRQ(ierr); 1511 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(31) (factors is discarded in the solve phase): %d \n",mumps->id.ICNTL(31));CHKERRQ(ierr); 1512 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(33) (compute determinant): %d \n",mumps->id.ICNTL(33));CHKERRQ(ierr); 1513 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(35) (activate BLR based factorization): %d \n",mumps->id.ICNTL(35));CHKERRQ(ierr); 1514 1515 ierr = PetscViewerASCIIPrintf(viewer," CNTL(1) (relative pivoting threshold): %g \n",mumps->id.CNTL(1));CHKERRQ(ierr); 1516 ierr = PetscViewerASCIIPrintf(viewer," CNTL(2) (stopping criterion of refinement): %g \n",mumps->id.CNTL(2));CHKERRQ(ierr); 1517 ierr = PetscViewerASCIIPrintf(viewer," CNTL(3) (absolute pivoting threshold): %g \n",mumps->id.CNTL(3));CHKERRQ(ierr); 1518 ierr = PetscViewerASCIIPrintf(viewer," CNTL(4) (value of static pivoting): %g \n",mumps->id.CNTL(4));CHKERRQ(ierr); 1519 ierr = PetscViewerASCIIPrintf(viewer," CNTL(5) (fixation for null pivots): %g \n",mumps->id.CNTL(5));CHKERRQ(ierr); 1520 ierr = PetscViewerASCIIPrintf(viewer," CNTL(7) (dropping parameter for BLR): %g \n",mumps->id.CNTL(7));CHKERRQ(ierr); 1521 1522 /* infomation local to each processor */ 1523 ierr = PetscViewerASCIIPrintf(viewer, " RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr); 1524 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 1525 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(1));CHKERRQ(ierr); 1526 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1527 ierr = PetscViewerASCIIPrintf(viewer, " RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr); 1528 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(2));CHKERRQ(ierr); 1529 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1530 ierr = PetscViewerASCIIPrintf(viewer, " RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr); 1531 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(3));CHKERRQ(ierr); 1532 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1533 1534 ierr = PetscViewerASCIIPrintf(viewer, " INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr); 1535 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(15));CHKERRQ(ierr); 1536 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1537 1538 ierr = PetscViewerASCIIPrintf(viewer, " INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr); 1539 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(16));CHKERRQ(ierr); 1540 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1541 1542 ierr = PetscViewerASCIIPrintf(viewer, " INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr); 1543 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(23));CHKERRQ(ierr); 1544 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1545 1546 if (mumps->ninfo && mumps->ninfo <= 40){ 1547 PetscInt i; 1548 for (i=0; i<mumps->ninfo; i++){ 1549 ierr = PetscViewerASCIIPrintf(viewer, " INFO(%d): \n",mumps->info[i]);CHKERRQ(ierr); 1550 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(mumps->info[i]));CHKERRQ(ierr); 1551 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1552 } 1553 } 1554 1555 1556 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 1557 1558 if (!mumps->myid) { /* information from the host */ 1559 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",mumps->id.RINFOG(1));CHKERRQ(ierr); 1560 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",mumps->id.RINFOG(2));CHKERRQ(ierr); 1561 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",mumps->id.RINFOG(3));CHKERRQ(ierr); 1562 ierr = PetscViewerASCIIPrintf(viewer," (RINFOG(12) RINFOG(13))*2^INFOG(34) (determinant): (%g,%g)*(2^%d)\n",mumps->id.RINFOG(12),mumps->id.RINFOG(13),mumps->id.INFOG(34));CHKERRQ(ierr); 1563 1564 ierr = PetscViewerASCIIPrintf(viewer," INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(3));CHKERRQ(ierr); 1565 ierr = PetscViewerASCIIPrintf(viewer," INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(4));CHKERRQ(ierr); 1566 ierr = PetscViewerASCIIPrintf(viewer," INFOG(5) (estimated maximum front size in the complete tree): %d \n",mumps->id.INFOG(5));CHKERRQ(ierr); 1567 ierr = PetscViewerASCIIPrintf(viewer," INFOG(6) (number of nodes in the complete tree): %d \n",mumps->id.INFOG(6));CHKERRQ(ierr); 1568 ierr = PetscViewerASCIIPrintf(viewer," INFOG(7) (ordering option effectively use after analysis): %d \n",mumps->id.INFOG(7));CHKERRQ(ierr); 1569 ierr = PetscViewerASCIIPrintf(viewer," INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",mumps->id.INFOG(8));CHKERRQ(ierr); 1570 ierr = PetscViewerASCIIPrintf(viewer," INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",mumps->id.INFOG(9));CHKERRQ(ierr); 1571 ierr = PetscViewerASCIIPrintf(viewer," INFOG(10) (total integer space store the matrix factors after factorization): %d \n",mumps->id.INFOG(10));CHKERRQ(ierr); 1572 ierr = PetscViewerASCIIPrintf(viewer," INFOG(11) (order of largest frontal matrix after factorization): %d \n",mumps->id.INFOG(11));CHKERRQ(ierr); 1573 ierr = PetscViewerASCIIPrintf(viewer," INFOG(12) (number of off-diagonal pivots): %d \n",mumps->id.INFOG(12));CHKERRQ(ierr); 1574 ierr = PetscViewerASCIIPrintf(viewer," INFOG(13) (number of delayed pivots after factorization): %d \n",mumps->id.INFOG(13));CHKERRQ(ierr); 1575 ierr = PetscViewerASCIIPrintf(viewer," INFOG(14) (number of memory compress after factorization): %d \n",mumps->id.INFOG(14));CHKERRQ(ierr); 1576 ierr = PetscViewerASCIIPrintf(viewer," INFOG(15) (number of steps of iterative refinement after solution): %d \n",mumps->id.INFOG(15));CHKERRQ(ierr); 1577 ierr = PetscViewerASCIIPrintf(viewer," INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",mumps->id.INFOG(16));CHKERRQ(ierr); 1578 ierr = PetscViewerASCIIPrintf(viewer," INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",mumps->id.INFOG(17));CHKERRQ(ierr); 1579 ierr = PetscViewerASCIIPrintf(viewer," INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",mumps->id.INFOG(18));CHKERRQ(ierr); 1580 ierr = PetscViewerASCIIPrintf(viewer," INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",mumps->id.INFOG(19));CHKERRQ(ierr); 1581 ierr = PetscViewerASCIIPrintf(viewer," INFOG(20) (estimated number of entries in the factors): %d \n",mumps->id.INFOG(20));CHKERRQ(ierr); 1582 ierr = PetscViewerASCIIPrintf(viewer," INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",mumps->id.INFOG(21));CHKERRQ(ierr); 1583 ierr = PetscViewerASCIIPrintf(viewer," INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",mumps->id.INFOG(22));CHKERRQ(ierr); 1584 ierr = PetscViewerASCIIPrintf(viewer," INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",mumps->id.INFOG(23));CHKERRQ(ierr); 1585 ierr = PetscViewerASCIIPrintf(viewer," INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",mumps->id.INFOG(24));CHKERRQ(ierr); 1586 ierr = PetscViewerASCIIPrintf(viewer," INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",mumps->id.INFOG(25));CHKERRQ(ierr); 1587 ierr = PetscViewerASCIIPrintf(viewer," INFOG(28) (after factorization: number of null pivots encountered): %d\n",mumps->id.INFOG(28));CHKERRQ(ierr); 1588 ierr = PetscViewerASCIIPrintf(viewer," INFOG(29) (after factorization: effective number of entries in the factors (sum over all processors)): %d\n",mumps->id.INFOG(29));CHKERRQ(ierr); 1589 ierr = PetscViewerASCIIPrintf(viewer," INFOG(30, 31) (after solution: size in Mbytes of memory used during solution phase): %d, %d\n",mumps->id.INFOG(30),mumps->id.INFOG(31));CHKERRQ(ierr); 1590 ierr = PetscViewerASCIIPrintf(viewer," INFOG(32) (after analysis: type of analysis done): %d\n",mumps->id.INFOG(32));CHKERRQ(ierr); 1591 ierr = PetscViewerASCIIPrintf(viewer," INFOG(33) (value used for ICNTL(8)): %d\n",mumps->id.INFOG(33));CHKERRQ(ierr); 1592 ierr = PetscViewerASCIIPrintf(viewer," INFOG(34) (exponent of the determinant if determinant is requested): %d\n",mumps->id.INFOG(34));CHKERRQ(ierr); 1593 } 1594 } 1595 } 1596 PetscFunctionReturn(0); 1597 } 1598 1599 PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info) 1600 { 1601 Mat_MUMPS *mumps =(Mat_MUMPS*)A->data; 1602 1603 PetscFunctionBegin; 1604 info->block_size = 1.0; 1605 info->nz_allocated = mumps->id.INFOG(20); 1606 info->nz_used = mumps->id.INFOG(20); 1607 info->nz_unneeded = 0.0; 1608 info->assemblies = 0.0; 1609 info->mallocs = 0.0; 1610 info->memory = 0.0; 1611 info->fill_ratio_given = 0; 1612 info->fill_ratio_needed = 0; 1613 info->factor_mallocs = 0; 1614 PetscFunctionReturn(0); 1615 } 1616 1617 /* -------------------------------------------------------------------------------------------*/ 1618 PetscErrorCode MatFactorSetSchurIS_MUMPS(Mat F, IS is) 1619 { 1620 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1621 const PetscInt *idxs; 1622 PetscInt size,i; 1623 PetscErrorCode ierr; 1624 1625 PetscFunctionBegin; 1626 ierr = ISGetLocalSize(is,&size);CHKERRQ(ierr); 1627 if (mumps->size > 1) { 1628 PetscBool ls,gs; 1629 1630 ls = mumps->myid ? (size ? PETSC_FALSE : PETSC_TRUE) : PETSC_TRUE; 1631 ierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,mumps->comm_mumps);CHKERRQ(ierr); 1632 if (!gs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MUMPS distributed parallel Schur complements not yet supported from PETSc\n"); 1633 } 1634 if (mumps->id.size_schur != size) { 1635 ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr); 1636 mumps->id.size_schur = size; 1637 mumps->id.schur_lld = size; 1638 ierr = PetscMalloc2(size,&mumps->id.listvar_schur,size*size,&mumps->id.schur);CHKERRQ(ierr); 1639 } 1640 1641 /* Schur complement matrix */ 1642 ierr = MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.size_schur,(PetscScalar*)mumps->id.schur,&F->schur);CHKERRQ(ierr); 1643 if (mumps->sym == 1) { 1644 ierr = MatSetOption(F->schur,MAT_SPD,PETSC_TRUE);CHKERRQ(ierr); 1645 } 1646 1647 /* MUMPS expects Fortran style indices */ 1648 ierr = ISGetIndices(is,&idxs);CHKERRQ(ierr); 1649 ierr = PetscMemcpy(mumps->id.listvar_schur,idxs,size*sizeof(PetscInt));CHKERRQ(ierr); 1650 for (i=0;i<size;i++) mumps->id.listvar_schur[i]++; 1651 ierr = ISRestoreIndices(is,&idxs);CHKERRQ(ierr); 1652 if (mumps->size > 1) { 1653 mumps->id.ICNTL(19) = 1; /* MUMPS returns Schur centralized on the host */ 1654 } else { 1655 if (F->factortype == MAT_FACTOR_LU) { 1656 mumps->id.ICNTL(19) = 3; /* MUMPS returns full matrix */ 1657 } else { 1658 mumps->id.ICNTL(19) = 2; /* MUMPS returns lower triangular part */ 1659 } 1660 } 1661 /* set a special value of ICNTL (not handled my MUMPS) to be used in the solve phase by PETSc */ 1662 mumps->id.ICNTL(26) = -1; 1663 PetscFunctionReturn(0); 1664 } 1665 1666 /* -------------------------------------------------------------------------------------------*/ 1667 PetscErrorCode MatFactorCreateSchurComplement_MUMPS(Mat F,Mat* S) 1668 { 1669 Mat St; 1670 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1671 PetscScalar *array; 1672 #if defined(PETSC_USE_COMPLEX) 1673 PetscScalar im = PetscSqrtScalar((PetscScalar)-1.0); 1674 #endif 1675 PetscErrorCode ierr; 1676 1677 PetscFunctionBegin; 1678 if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatFactorSetSchurIS to enable it"); 1679 ierr = MatCreate(PETSC_COMM_SELF,&St);CHKERRQ(ierr); 1680 ierr = MatSetSizes(St,PETSC_DECIDE,PETSC_DECIDE,mumps->id.size_schur,mumps->id.size_schur);CHKERRQ(ierr); 1681 ierr = MatSetType(St,MATDENSE);CHKERRQ(ierr); 1682 ierr = MatSetUp(St);CHKERRQ(ierr); 1683 ierr = MatDenseGetArray(St,&array);CHKERRQ(ierr); 1684 if (!mumps->sym) { /* MUMPS always return a full matrix */ 1685 if (mumps->id.ICNTL(19) == 1) { /* stored by rows */ 1686 PetscInt i,j,N=mumps->id.size_schur; 1687 for (i=0;i<N;i++) { 1688 for (j=0;j<N;j++) { 1689 #if !defined(PETSC_USE_COMPLEX) 1690 PetscScalar val = mumps->id.schur[i*N+j]; 1691 #else 1692 PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i; 1693 #endif 1694 array[j*N+i] = val; 1695 } 1696 } 1697 } else { /* stored by columns */ 1698 ierr = PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));CHKERRQ(ierr); 1699 } 1700 } else { /* either full or lower-triangular (not packed) */ 1701 if (mumps->id.ICNTL(19) == 2) { /* lower triangular stored by columns */ 1702 PetscInt i,j,N=mumps->id.size_schur; 1703 for (i=0;i<N;i++) { 1704 for (j=i;j<N;j++) { 1705 #if !defined(PETSC_USE_COMPLEX) 1706 PetscScalar val = mumps->id.schur[i*N+j]; 1707 #else 1708 PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i; 1709 #endif 1710 array[i*N+j] = val; 1711 array[j*N+i] = val; 1712 } 1713 } 1714 } else if (mumps->id.ICNTL(19) == 3) { /* full matrix */ 1715 ierr = PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));CHKERRQ(ierr); 1716 } else { /* ICNTL(19) == 1 lower triangular stored by rows */ 1717 PetscInt i,j,N=mumps->id.size_schur; 1718 for (i=0;i<N;i++) { 1719 for (j=0;j<i+1;j++) { 1720 #if !defined(PETSC_USE_COMPLEX) 1721 PetscScalar val = mumps->id.schur[i*N+j]; 1722 #else 1723 PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i; 1724 #endif 1725 array[i*N+j] = val; 1726 array[j*N+i] = val; 1727 } 1728 } 1729 } 1730 } 1731 ierr = MatDenseRestoreArray(St,&array);CHKERRQ(ierr); 1732 *S = St; 1733 PetscFunctionReturn(0); 1734 } 1735 1736 /* -------------------------------------------------------------------------------------------*/ 1737 PetscErrorCode MatMumpsSetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt ival) 1738 { 1739 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1740 1741 PetscFunctionBegin; 1742 mumps->id.ICNTL(icntl) = ival; 1743 PetscFunctionReturn(0); 1744 } 1745 1746 PetscErrorCode MatMumpsGetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt *ival) 1747 { 1748 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1749 1750 PetscFunctionBegin; 1751 *ival = mumps->id.ICNTL(icntl); 1752 PetscFunctionReturn(0); 1753 } 1754 1755 /*@ 1756 MatMumpsSetIcntl - Set MUMPS parameter ICNTL() 1757 1758 Logically Collective on Mat 1759 1760 Input Parameters: 1761 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 1762 . icntl - index of MUMPS parameter array ICNTL() 1763 - ival - value of MUMPS ICNTL(icntl) 1764 1765 Options Database: 1766 . -mat_mumps_icntl_<icntl> <ival> 1767 1768 Level: beginner 1769 1770 References: 1771 . MUMPS Users' Guide 1772 1773 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 1774 @*/ 1775 PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt icntl,PetscInt ival) 1776 { 1777 PetscErrorCode ierr; 1778 1779 PetscFunctionBegin; 1780 PetscValidType(F,1); 1781 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 1782 PetscValidLogicalCollectiveInt(F,icntl,2); 1783 PetscValidLogicalCollectiveInt(F,ival,3); 1784 ierr = PetscTryMethod(F,"MatMumpsSetIcntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));CHKERRQ(ierr); 1785 PetscFunctionReturn(0); 1786 } 1787 1788 /*@ 1789 MatMumpsGetIcntl - Get MUMPS parameter ICNTL() 1790 1791 Logically Collective on Mat 1792 1793 Input Parameters: 1794 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 1795 - icntl - index of MUMPS parameter array ICNTL() 1796 1797 Output Parameter: 1798 . ival - value of MUMPS ICNTL(icntl) 1799 1800 Level: beginner 1801 1802 References: 1803 . MUMPS Users' Guide 1804 1805 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 1806 @*/ 1807 PetscErrorCode MatMumpsGetIcntl(Mat F,PetscInt icntl,PetscInt *ival) 1808 { 1809 PetscErrorCode ierr; 1810 1811 PetscFunctionBegin; 1812 PetscValidType(F,1); 1813 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 1814 PetscValidLogicalCollectiveInt(F,icntl,2); 1815 PetscValidIntPointer(ival,3); 1816 ierr = PetscUseMethod(F,"MatMumpsGetIcntl_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr); 1817 PetscFunctionReturn(0); 1818 } 1819 1820 /* -------------------------------------------------------------------------------------------*/ 1821 PetscErrorCode MatMumpsSetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal val) 1822 { 1823 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1824 1825 PetscFunctionBegin; 1826 mumps->id.CNTL(icntl) = val; 1827 PetscFunctionReturn(0); 1828 } 1829 1830 PetscErrorCode MatMumpsGetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal *val) 1831 { 1832 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1833 1834 PetscFunctionBegin; 1835 *val = mumps->id.CNTL(icntl); 1836 PetscFunctionReturn(0); 1837 } 1838 1839 /*@ 1840 MatMumpsSetCntl - Set MUMPS parameter CNTL() 1841 1842 Logically Collective on Mat 1843 1844 Input Parameters: 1845 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 1846 . icntl - index of MUMPS parameter array CNTL() 1847 - val - value of MUMPS CNTL(icntl) 1848 1849 Options Database: 1850 . -mat_mumps_cntl_<icntl> <val> 1851 1852 Level: beginner 1853 1854 References: 1855 . MUMPS Users' Guide 1856 1857 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 1858 @*/ 1859 PetscErrorCode MatMumpsSetCntl(Mat F,PetscInt icntl,PetscReal val) 1860 { 1861 PetscErrorCode ierr; 1862 1863 PetscFunctionBegin; 1864 PetscValidType(F,1); 1865 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 1866 PetscValidLogicalCollectiveInt(F,icntl,2); 1867 PetscValidLogicalCollectiveReal(F,val,3); 1868 ierr = PetscTryMethod(F,"MatMumpsSetCntl_C",(Mat,PetscInt,PetscReal),(F,icntl,val));CHKERRQ(ierr); 1869 PetscFunctionReturn(0); 1870 } 1871 1872 /*@ 1873 MatMumpsGetCntl - Get MUMPS parameter CNTL() 1874 1875 Logically Collective on Mat 1876 1877 Input Parameters: 1878 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 1879 - icntl - index of MUMPS parameter array CNTL() 1880 1881 Output Parameter: 1882 . val - value of MUMPS CNTL(icntl) 1883 1884 Level: beginner 1885 1886 References: 1887 . MUMPS Users' Guide 1888 1889 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 1890 @*/ 1891 PetscErrorCode MatMumpsGetCntl(Mat F,PetscInt icntl,PetscReal *val) 1892 { 1893 PetscErrorCode ierr; 1894 1895 PetscFunctionBegin; 1896 PetscValidType(F,1); 1897 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 1898 PetscValidLogicalCollectiveInt(F,icntl,2); 1899 PetscValidRealPointer(val,3); 1900 ierr = PetscUseMethod(F,"MatMumpsGetCntl_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr); 1901 PetscFunctionReturn(0); 1902 } 1903 1904 PetscErrorCode MatMumpsGetInfo_MUMPS(Mat F,PetscInt icntl,PetscInt *info) 1905 { 1906 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1907 1908 PetscFunctionBegin; 1909 *info = mumps->id.INFO(icntl); 1910 PetscFunctionReturn(0); 1911 } 1912 1913 PetscErrorCode MatMumpsGetInfog_MUMPS(Mat F,PetscInt icntl,PetscInt *infog) 1914 { 1915 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1916 1917 PetscFunctionBegin; 1918 *infog = mumps->id.INFOG(icntl); 1919 PetscFunctionReturn(0); 1920 } 1921 1922 PetscErrorCode MatMumpsGetRinfo_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfo) 1923 { 1924 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1925 1926 PetscFunctionBegin; 1927 *rinfo = mumps->id.RINFO(icntl); 1928 PetscFunctionReturn(0); 1929 } 1930 1931 PetscErrorCode MatMumpsGetRinfog_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfog) 1932 { 1933 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1934 1935 PetscFunctionBegin; 1936 *rinfog = mumps->id.RINFOG(icntl); 1937 PetscFunctionReturn(0); 1938 } 1939 1940 PetscErrorCode MatMumpsGetInverse_MUMPS(Mat F,Mat spRHS) 1941 { 1942 PetscErrorCode ierr; 1943 Mat Bt = NULL; 1944 PetscBool flgT; 1945 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1946 PetscBool done; 1947 PetscScalar *aa; 1948 PetscInt spnr,*ia,*ja; 1949 1950 PetscFunctionBegin; 1951 if (!mumps->myid) { 1952 PetscValidIntPointer(spRHS,2); 1953 ierr = PetscObjectTypeCompare((PetscObject)spRHS,MATTRANSPOSEMAT,&flgT);CHKERRQ(ierr); 1954 if (flgT) { 1955 ierr = MatTransposeGetMat(spRHS,&Bt);CHKERRQ(ierr); 1956 } else { 1957 SETERRQ(PetscObjectComm((PetscObject)spRHS),PETSC_ERR_ARG_WRONG,"Matrix spRHS must be type MATTRANSPOSEMAT matrix"); 1958 } 1959 } 1960 1961 ierr = MatMumpsSetIcntl(F,30,1);CHKERRQ(ierr); 1962 1963 if (!mumps->myid) { 1964 ierr = MatSeqAIJGetArray(Bt,&aa);CHKERRQ(ierr); 1965 ierr = MatGetRowIJ(Bt,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&done);CHKERRQ(ierr); 1966 if (!done) SETERRQ(PetscObjectComm((PetscObject)Bt),PETSC_ERR_ARG_WRONG,"Cannot get IJ structure"); 1967 1968 mumps->id.irhs_ptr = ia; 1969 mumps->id.irhs_sparse = ja; 1970 mumps->id.nz_rhs = ia[spnr] - 1; 1971 mumps->id.rhs_sparse = (MumpsScalar*)aa; 1972 } else { 1973 mumps->id.irhs_ptr = NULL; 1974 mumps->id.irhs_sparse = NULL; 1975 mumps->id.nz_rhs = 0; 1976 mumps->id.rhs_sparse = NULL; 1977 } 1978 mumps->id.ICNTL(20) = 1; /* rhs is sparse */ 1979 mumps->id.ICNTL(21) = 0; /* solution is in assembled centralized format */ 1980 1981 /* solve phase */ 1982 /*-------------*/ 1983 mumps->id.job = JOB_SOLVE; 1984 PetscMUMPS_c(&mumps->id); 1985 if (mumps->id.INFOG(1) < 0) 1986 SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2)); 1987 1988 if (!mumps->myid) { 1989 ierr = MatSeqAIJRestoreArray(Bt,&aa);CHKERRQ(ierr); 1990 ierr = MatRestoreRowIJ(Bt,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&done);CHKERRQ(ierr); 1991 } 1992 PetscFunctionReturn(0); 1993 } 1994 1995 /*@ 1996 MatMumpsGetInverse - Get user-specified set of entries in inverse of A 1997 1998 Logically Collective on Mat 1999 2000 Input Parameters: 2001 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2002 - spRHS - sequential sparse matrix in MATTRANSPOSEMAT format holding specified indices in processor[0] 2003 2004 Output Parameter: 2005 . spRHS - requested entries of inverse of A 2006 2007 Level: beginner 2008 2009 References: 2010 . MUMPS Users' Guide 2011 2012 .seealso: MatGetFactor(), MatCreateTranspose() 2013 @*/ 2014 PetscErrorCode MatMumpsGetInverse(Mat F,Mat spRHS) 2015 { 2016 PetscErrorCode ierr; 2017 2018 PetscFunctionBegin; 2019 PetscValidType(F,1); 2020 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2021 ierr = PetscUseMethod(F,"MatMumpsGetInverse_C",(Mat,Mat),(F,spRHS));CHKERRQ(ierr); 2022 PetscFunctionReturn(0); 2023 } 2024 2025 /*@ 2026 MatMumpsGetInfo - Get MUMPS parameter INFO() 2027 2028 Logically Collective on Mat 2029 2030 Input Parameters: 2031 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2032 - icntl - index of MUMPS parameter array INFO() 2033 2034 Output Parameter: 2035 . ival - value of MUMPS INFO(icntl) 2036 2037 Level: beginner 2038 2039 References: 2040 . MUMPS Users' Guide 2041 2042 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2043 @*/ 2044 PetscErrorCode MatMumpsGetInfo(Mat F,PetscInt icntl,PetscInt *ival) 2045 { 2046 PetscErrorCode ierr; 2047 2048 PetscFunctionBegin; 2049 PetscValidType(F,1); 2050 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2051 PetscValidIntPointer(ival,3); 2052 ierr = PetscUseMethod(F,"MatMumpsGetInfo_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr); 2053 PetscFunctionReturn(0); 2054 } 2055 2056 /*@ 2057 MatMumpsGetInfog - Get MUMPS parameter INFOG() 2058 2059 Logically Collective on Mat 2060 2061 Input Parameters: 2062 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2063 - icntl - index of MUMPS parameter array INFOG() 2064 2065 Output Parameter: 2066 . ival - value of MUMPS INFOG(icntl) 2067 2068 Level: beginner 2069 2070 References: 2071 . MUMPS Users' Guide 2072 2073 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2074 @*/ 2075 PetscErrorCode MatMumpsGetInfog(Mat F,PetscInt icntl,PetscInt *ival) 2076 { 2077 PetscErrorCode ierr; 2078 2079 PetscFunctionBegin; 2080 PetscValidType(F,1); 2081 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2082 PetscValidIntPointer(ival,3); 2083 ierr = PetscUseMethod(F,"MatMumpsGetInfog_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr); 2084 PetscFunctionReturn(0); 2085 } 2086 2087 /*@ 2088 MatMumpsGetRinfo - Get MUMPS parameter RINFO() 2089 2090 Logically Collective on Mat 2091 2092 Input Parameters: 2093 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2094 - icntl - index of MUMPS parameter array RINFO() 2095 2096 Output Parameter: 2097 . val - value of MUMPS RINFO(icntl) 2098 2099 Level: beginner 2100 2101 References: 2102 . MUMPS Users' Guide 2103 2104 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2105 @*/ 2106 PetscErrorCode MatMumpsGetRinfo(Mat F,PetscInt icntl,PetscReal *val) 2107 { 2108 PetscErrorCode ierr; 2109 2110 PetscFunctionBegin; 2111 PetscValidType(F,1); 2112 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2113 PetscValidRealPointer(val,3); 2114 ierr = PetscUseMethod(F,"MatMumpsGetRinfo_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr); 2115 PetscFunctionReturn(0); 2116 } 2117 2118 /*@ 2119 MatMumpsGetRinfog - Get MUMPS parameter RINFOG() 2120 2121 Logically Collective on Mat 2122 2123 Input Parameters: 2124 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2125 - icntl - index of MUMPS parameter array RINFOG() 2126 2127 Output Parameter: 2128 . val - value of MUMPS RINFOG(icntl) 2129 2130 Level: beginner 2131 2132 References: 2133 . MUMPS Users' Guide 2134 2135 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2136 @*/ 2137 PetscErrorCode MatMumpsGetRinfog(Mat F,PetscInt icntl,PetscReal *val) 2138 { 2139 PetscErrorCode ierr; 2140 2141 PetscFunctionBegin; 2142 PetscValidType(F,1); 2143 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2144 PetscValidRealPointer(val,3); 2145 ierr = PetscUseMethod(F,"MatMumpsGetRinfog_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr); 2146 PetscFunctionReturn(0); 2147 } 2148 2149 /*MC 2150 MATSOLVERMUMPS - A matrix type providing direct solvers (LU and Cholesky) for 2151 distributed and sequential matrices via the external package MUMPS. 2152 2153 Works with MATAIJ and MATSBAIJ matrices 2154 2155 Use ./configure --download-mumps --download-scalapack --download-parmetis --download-metis --download-ptscotch to have PETSc installed with MUMPS 2156 2157 Use -pc_type cholesky or lu -pc_factor_mat_solver_type mumps to use this direct solver 2158 2159 Options Database Keys: 2160 + -mat_mumps_icntl_1 - ICNTL(1): output stream for error messages 2161 . -mat_mumps_icntl_2 - ICNTL(2): output stream for diagnostic printing, statistics, and warning 2162 . -mat_mumps_icntl_3 - ICNTL(3): output stream for global information, collected on the host 2163 . -mat_mumps_icntl_4 - ICNTL(4): level of printing (0 to 4) 2164 . -mat_mumps_icntl_6 - ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7) 2165 . -mat_mumps_icntl_7 - ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis 2166 . -mat_mumps_icntl_8 - ICNTL(8): scaling strategy (-2 to 8 or 77) 2167 . -mat_mumps_icntl_10 - ICNTL(10): max num of refinements 2168 . -mat_mumps_icntl_11 - ICNTL(11): statistics related to an error analysis (via -ksp_view) 2169 . -mat_mumps_icntl_12 - ICNTL(12): an ordering strategy for symmetric matrices (0 to 3) 2170 . -mat_mumps_icntl_13 - ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting 2171 . -mat_mumps_icntl_14 - ICNTL(14): percentage increase in the estimated working space 2172 . -mat_mumps_icntl_19 - ICNTL(19): computes the Schur complement 2173 . -mat_mumps_icntl_22 - ICNTL(22): in-core/out-of-core factorization and solve (0 or 1) 2174 . -mat_mumps_icntl_23 - ICNTL(23): max size of the working memory (MB) that can allocate per processor 2175 . -mat_mumps_icntl_24 - ICNTL(24): detection of null pivot rows (0 or 1) 2176 . -mat_mumps_icntl_25 - ICNTL(25): compute a solution of a deficient matrix and a null space basis 2177 . -mat_mumps_icntl_26 - ICNTL(26): drives the solution phase if a Schur complement matrix 2178 . -mat_mumps_icntl_28 - ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering 2179 . -mat_mumps_icntl_29 - ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis 2180 . -mat_mumps_icntl_30 - ICNTL(30): compute user-specified set of entries in inv(A) 2181 . -mat_mumps_icntl_31 - ICNTL(31): indicates which factors may be discarded during factorization 2182 . -mat_mumps_icntl_33 - ICNTL(33): compute determinant 2183 . -mat_mumps_cntl_1 - CNTL(1): relative pivoting threshold 2184 . -mat_mumps_cntl_2 - CNTL(2): stopping criterion of refinement 2185 . -mat_mumps_cntl_3 - CNTL(3): absolute pivoting threshold 2186 . -mat_mumps_cntl_4 - CNTL(4): value for static pivoting 2187 - -mat_mumps_cntl_5 - CNTL(5): fixation for null pivots 2188 2189 Level: beginner 2190 2191 Notes: When a MUMPS factorization fails inside a KSP solve, for example with a KSP_DIVERGED_PCSETUP_FAILED, one can find the MUMPS information about the failure by calling 2192 $ KSPGetPC(ksp,&pc); 2193 $ PCFactorGetMatrix(pc,&mat); 2194 $ MatMumpsGetInfo(mat,....); 2195 $ MatMumpsGetInfog(mat,....); etc. 2196 Or you can run with -ksp_error_if_not_converged and the program will be stopped and the information printed in the error message. 2197 2198 .seealso: PCFactorSetMatSolverType(), MatSolverType, MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog(), KSPGetPC(), PCGetFactor(), PCFactorGetMatrix() 2199 2200 M*/ 2201 2202 static PetscErrorCode MatFactorGetSolverType_mumps(Mat A,MatSolverType *type) 2203 { 2204 PetscFunctionBegin; 2205 *type = MATSOLVERMUMPS; 2206 PetscFunctionReturn(0); 2207 } 2208 2209 /* MatGetFactor for Seq and MPI AIJ matrices */ 2210 static PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F) 2211 { 2212 Mat B; 2213 PetscErrorCode ierr; 2214 Mat_MUMPS *mumps; 2215 PetscBool isSeqAIJ; 2216 2217 PetscFunctionBegin; 2218 /* Create the factorization matrix */ 2219 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr); 2220 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 2221 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 2222 ierr = PetscStrallocpy("mumps",&((PetscObject)B)->type_name);CHKERRQ(ierr); 2223 ierr = MatSetUp(B);CHKERRQ(ierr); 2224 2225 ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr); 2226 2227 B->ops->view = MatView_MUMPS; 2228 B->ops->getinfo = MatGetInfo_MUMPS; 2229 2230 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_mumps);CHKERRQ(ierr); 2231 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);CHKERRQ(ierr); 2232 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);CHKERRQ(ierr); 2233 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr); 2234 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr); 2235 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr); 2236 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr); 2237 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr); 2238 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr); 2239 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr); 2240 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr); 2241 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverse_C",MatMumpsGetInverse_MUMPS);CHKERRQ(ierr); 2242 2243 if (ftype == MAT_FACTOR_LU) { 2244 B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS; 2245 B->factortype = MAT_FACTOR_LU; 2246 if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij; 2247 else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij; 2248 mumps->sym = 0; 2249 } else { 2250 B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS; 2251 B->factortype = MAT_FACTOR_CHOLESKY; 2252 if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij; 2253 else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij; 2254 #if defined(PETSC_USE_COMPLEX) 2255 mumps->sym = 2; 2256 #else 2257 if (A->spd_set && A->spd) mumps->sym = 1; 2258 else mumps->sym = 2; 2259 #endif 2260 } 2261 2262 /* set solvertype */ 2263 ierr = PetscFree(B->solvertype);CHKERRQ(ierr); 2264 ierr = PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);CHKERRQ(ierr); 2265 2266 mumps->isAIJ = PETSC_TRUE; 2267 B->ops->destroy = MatDestroy_MUMPS; 2268 B->data = (void*)mumps; 2269 2270 ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr); 2271 2272 *F = B; 2273 PetscFunctionReturn(0); 2274 } 2275 2276 /* MatGetFactor for Seq and MPI SBAIJ matrices */ 2277 static PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F) 2278 { 2279 Mat B; 2280 PetscErrorCode ierr; 2281 Mat_MUMPS *mumps; 2282 PetscBool isSeqSBAIJ; 2283 2284 PetscFunctionBegin; 2285 if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix"); 2286 if (A->rmap->bs > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead"); 2287 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr); 2288 /* Create the factorization matrix */ 2289 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 2290 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 2291 ierr = PetscStrallocpy("mumps",&((PetscObject)B)->type_name);CHKERRQ(ierr); 2292 ierr = MatSetUp(B);CHKERRQ(ierr); 2293 2294 ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr); 2295 if (isSeqSBAIJ) { 2296 mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij; 2297 } else { 2298 mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij; 2299 } 2300 2301 B->ops->getinfo = MatGetInfo_External; 2302 B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS; 2303 B->ops->view = MatView_MUMPS; 2304 2305 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_mumps);CHKERRQ(ierr); 2306 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);CHKERRQ(ierr); 2307 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);CHKERRQ(ierr); 2308 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr); 2309 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr); 2310 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr); 2311 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr); 2312 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr); 2313 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr); 2314 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr); 2315 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr); 2316 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverse_C",MatMumpsGetInverse_MUMPS);CHKERRQ(ierr); 2317 2318 B->factortype = MAT_FACTOR_CHOLESKY; 2319 #if defined(PETSC_USE_COMPLEX) 2320 mumps->sym = 2; 2321 #else 2322 if (A->spd_set && A->spd) mumps->sym = 1; 2323 else mumps->sym = 2; 2324 #endif 2325 2326 /* set solvertype */ 2327 ierr = PetscFree(B->solvertype);CHKERRQ(ierr); 2328 ierr = PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);CHKERRQ(ierr); 2329 2330 mumps->isAIJ = PETSC_FALSE; 2331 B->ops->destroy = MatDestroy_MUMPS; 2332 B->data = (void*)mumps; 2333 2334 ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr); 2335 2336 *F = B; 2337 PetscFunctionReturn(0); 2338 } 2339 2340 static PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F) 2341 { 2342 Mat B; 2343 PetscErrorCode ierr; 2344 Mat_MUMPS *mumps; 2345 PetscBool isSeqBAIJ; 2346 2347 PetscFunctionBegin; 2348 /* Create the factorization matrix */ 2349 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);CHKERRQ(ierr); 2350 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 2351 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 2352 ierr = PetscStrallocpy("mumps",&((PetscObject)B)->type_name);CHKERRQ(ierr); 2353 ierr = MatSetUp(B);CHKERRQ(ierr); 2354 2355 ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr); 2356 if (ftype == MAT_FACTOR_LU) { 2357 B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS; 2358 B->factortype = MAT_FACTOR_LU; 2359 if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij; 2360 else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij; 2361 mumps->sym = 0; 2362 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n"); 2363 2364 B->ops->getinfo = MatGetInfo_External; 2365 B->ops->view = MatView_MUMPS; 2366 2367 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_mumps);CHKERRQ(ierr); 2368 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);CHKERRQ(ierr); 2369 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);CHKERRQ(ierr); 2370 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr); 2371 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr); 2372 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr); 2373 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr); 2374 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr); 2375 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr); 2376 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr); 2377 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr); 2378 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverse_C",MatMumpsGetInverse_MUMPS);CHKERRQ(ierr); 2379 2380 /* set solvertype */ 2381 ierr = PetscFree(B->solvertype);CHKERRQ(ierr); 2382 ierr = PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);CHKERRQ(ierr); 2383 2384 mumps->isAIJ = PETSC_TRUE; 2385 B->ops->destroy = MatDestroy_MUMPS; 2386 B->data = (void*)mumps; 2387 2388 ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr); 2389 2390 *F = B; 2391 PetscFunctionReturn(0); 2392 } 2393 2394 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MUMPS(void) 2395 { 2396 PetscErrorCode ierr; 2397 2398 PetscFunctionBegin; 2399 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr); 2400 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr); 2401 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr); 2402 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr); 2403 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPISBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr); 2404 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr); 2405 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr); 2406 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr); 2407 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr); 2408 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQSBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr); 2409 PetscFunctionReturn(0); 2410 } 2411 2412