1 2 /* 3 Provides an interface to the MUMPS sparse solver 4 */ 5 6 #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I "petscmat.h" I*/ 7 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h> 8 #include <../src/mat/impls/sell/mpi/mpisell.h> 9 10 EXTERN_C_BEGIN 11 #if defined(PETSC_USE_COMPLEX) 12 #if defined(PETSC_USE_REAL_SINGLE) 13 #include <cmumps_c.h> 14 #else 15 #include <zmumps_c.h> 16 #endif 17 #else 18 #if defined(PETSC_USE_REAL_SINGLE) 19 #include <smumps_c.h> 20 #else 21 #include <dmumps_c.h> 22 #endif 23 #endif 24 EXTERN_C_END 25 #define JOB_INIT -1 26 #define JOB_FACTSYMBOLIC 1 27 #define JOB_FACTNUMERIC 2 28 #define JOB_SOLVE 3 29 #define JOB_END -2 30 31 /* calls to MUMPS */ 32 #if defined(PETSC_USE_COMPLEX) 33 #if defined(PETSC_USE_REAL_SINGLE) 34 #define MUMPS_c cmumps_c 35 #else 36 #define MUMPS_c zmumps_c 37 #endif 38 #else 39 #if defined(PETSC_USE_REAL_SINGLE) 40 #define MUMPS_c smumps_c 41 #else 42 #define MUMPS_c dmumps_c 43 #endif 44 #endif 45 46 #if defined(PETSC_HAVE_OPENMP) && defined(PETSC_HAVE_PTHREAD) && (defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) || defined(PETSC_HAVE_MMAP)) && defined(PETSC_HAVE_HWLOC) 47 #define PETSC_HAVE_OPENMP_SUPPORT 1 48 #endif 49 50 #if defined(PETSC_HAVE_OPENMP_SUPPORT) 51 #define PetscMUMPS_c(mumps) \ 52 do { \ 53 if (mumps->use_petsc_omp_support) { \ 54 if (mumps->is_omp_master) { \ 55 ierr = PetscOmpCtrlOmpRegionOnMasterBegin(mumps->omp_ctrl);CHKERRQ(ierr); \ 56 MUMPS_c(&mumps->id); \ 57 ierr = PetscOmpCtrlOmpRegionOnMasterEnd(mumps->omp_ctrl);CHKERRQ(ierr); \ 58 } \ 59 ierr = PetscOmpCtrlBarrier(mumps->omp_ctrl);CHKERRQ(ierr); \ 60 } else { \ 61 MUMPS_c(&mumps->id); \ 62 } \ 63 } while(0) 64 #else 65 #define PetscMUMPS_c(mumps) \ 66 do { MUMPS_c(&mumps->id); } while (0) 67 #endif 68 69 /* declare MumpsScalar */ 70 #if defined(PETSC_USE_COMPLEX) 71 #if defined(PETSC_USE_REAL_SINGLE) 72 #define MumpsScalar mumps_complex 73 #else 74 #define MumpsScalar mumps_double_complex 75 #endif 76 #else 77 #define MumpsScalar PetscScalar 78 #endif 79 80 /* macros s.t. indices match MUMPS documentation */ 81 #define ICNTL(I) icntl[(I)-1] 82 #define CNTL(I) cntl[(I)-1] 83 #define INFOG(I) infog[(I)-1] 84 #define INFO(I) info[(I)-1] 85 #define RINFOG(I) rinfog[(I)-1] 86 #define RINFO(I) rinfo[(I)-1] 87 88 typedef struct { 89 #if defined(PETSC_USE_COMPLEX) 90 #if defined(PETSC_USE_REAL_SINGLE) 91 CMUMPS_STRUC_C id; 92 #else 93 ZMUMPS_STRUC_C id; 94 #endif 95 #else 96 #if defined(PETSC_USE_REAL_SINGLE) 97 SMUMPS_STRUC_C id; 98 #else 99 DMUMPS_STRUC_C id; 100 #endif 101 #endif 102 103 MatStructure matstruc; 104 PetscMPIInt myid,petsc_size; 105 PetscInt *irn,*jcn,nz,sym; 106 PetscScalar *val; 107 MPI_Comm mumps_comm; 108 PetscInt ICNTL9_pre; /* check if ICNTL(9) is changed from previous MatSolve */ 109 VecScatter scat_rhs, scat_sol; /* used by MatSolve() */ 110 Vec b_seq,x_seq; 111 PetscInt ninfo,*info; /* display INFO */ 112 PetscInt sizeredrhs; 113 PetscScalar *schur_sol; 114 PetscInt schur_sizesol; 115 116 PetscBool use_petsc_omp_support; 117 PetscOmpCtrl omp_ctrl; /* an OpenMP controler that blocked processes will release their CPU (MPI_Barrier does not have this guarantee) */ 118 MPI_Comm petsc_comm,omp_comm; /* petsc_comm is petsc matrix's comm */ 119 PetscMPIInt mpinz; /* on master rank, nz = sum(mpinz) over omp_comm; on other ranks, mpinz = nz*/ 120 PetscMPIInt omp_comm_size; 121 PetscBool is_omp_master; /* is this rank the master of omp_comm */ 122 PetscMPIInt *recvcount,*displs; 123 124 PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**); 125 } Mat_MUMPS; 126 127 extern PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*); 128 129 static PetscErrorCode MatMumpsResetSchur_Private(Mat_MUMPS* mumps) 130 { 131 PetscErrorCode ierr; 132 133 PetscFunctionBegin; 134 ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr); 135 ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr); 136 ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr); 137 mumps->id.size_schur = 0; 138 mumps->id.schur_lld = 0; 139 mumps->id.ICNTL(19) = 0; 140 PetscFunctionReturn(0); 141 } 142 143 /* solve with rhs in mumps->id.redrhs and return in the same location */ 144 static PetscErrorCode MatMumpsSolveSchur_Private(Mat F) 145 { 146 Mat_MUMPS *mumps=(Mat_MUMPS*)F->data; 147 Mat S,B,X; 148 MatFactorSchurStatus schurstatus; 149 PetscInt sizesol; 150 PetscErrorCode ierr; 151 152 PetscFunctionBegin; 153 ierr = MatFactorFactorizeSchurComplement(F);CHKERRQ(ierr); 154 ierr = MatFactorGetSchurComplement(F,&S,&schurstatus);CHKERRQ(ierr); 155 ierr = MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.nrhs,(PetscScalar*)mumps->id.redrhs,&B);CHKERRQ(ierr); 156 switch (schurstatus) { 157 case MAT_FACTOR_SCHUR_FACTORED: 158 ierr = MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.nrhs,(PetscScalar*)mumps->id.redrhs,&X);CHKERRQ(ierr); 159 if (!mumps->id.ICNTL(9)) { /* transpose solve */ 160 ierr = MatMatSolveTranspose(S,B,X);CHKERRQ(ierr); 161 } else { 162 ierr = MatMatSolve(S,B,X);CHKERRQ(ierr); 163 } 164 break; 165 case MAT_FACTOR_SCHUR_INVERTED: 166 sizesol = mumps->id.nrhs*mumps->id.size_schur; 167 if (!mumps->schur_sol || sizesol > mumps->schur_sizesol) { 168 ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr); 169 ierr = PetscMalloc1(sizesol,&mumps->schur_sol);CHKERRQ(ierr); 170 mumps->schur_sizesol = sizesol; 171 } 172 ierr = MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.nrhs,mumps->schur_sol,&X);CHKERRQ(ierr); 173 if (!mumps->id.ICNTL(9)) { /* transpose solve */ 174 ierr = MatTransposeMatMult(S,B,MAT_REUSE_MATRIX,PETSC_DEFAULT,&X);CHKERRQ(ierr); 175 } else { 176 ierr = MatMatMult(S,B,MAT_REUSE_MATRIX,PETSC_DEFAULT,&X);CHKERRQ(ierr); 177 } 178 ierr = MatCopy(X,B,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 179 break; 180 default: 181 SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Unhandled MatFactorSchurStatus %D",F->schur_status); 182 break; 183 } 184 ierr = MatFactorRestoreSchurComplement(F,&S,schurstatus);CHKERRQ(ierr); 185 ierr = MatDestroy(&B);CHKERRQ(ierr); 186 ierr = MatDestroy(&X);CHKERRQ(ierr); 187 PetscFunctionReturn(0); 188 } 189 190 static PetscErrorCode MatMumpsHandleSchur_Private(Mat F, PetscBool expansion) 191 { 192 Mat_MUMPS *mumps=(Mat_MUMPS*)F->data; 193 PetscErrorCode ierr; 194 195 PetscFunctionBegin; 196 if (!mumps->id.ICNTL(19)) { /* do nothing when Schur complement has not been computed */ 197 PetscFunctionReturn(0); 198 } 199 if (!expansion) { /* prepare for the condensation step */ 200 PetscInt sizeredrhs = mumps->id.nrhs*mumps->id.size_schur; 201 /* allocate MUMPS internal array to store reduced right-hand sides */ 202 if (!mumps->id.redrhs || sizeredrhs > mumps->sizeredrhs) { 203 ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr); 204 mumps->id.lredrhs = mumps->id.size_schur; 205 ierr = PetscMalloc1(mumps->id.nrhs*mumps->id.lredrhs,&mumps->id.redrhs);CHKERRQ(ierr); 206 mumps->sizeredrhs = mumps->id.nrhs*mumps->id.lredrhs; 207 } 208 mumps->id.ICNTL(26) = 1; /* condensation phase */ 209 } else { /* prepare for the expansion step */ 210 /* solve Schur complement (this has to be done by the MUMPS user, so basically us) */ 211 ierr = MatMumpsSolveSchur_Private(F);CHKERRQ(ierr); 212 mumps->id.ICNTL(26) = 2; /* expansion phase */ 213 PetscMUMPS_c(mumps); 214 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 215 /* restore defaults */ 216 mumps->id.ICNTL(26) = -1; 217 /* free MUMPS internal array for redrhs if we have solved for multiple rhs in order to save memory space */ 218 if (mumps->id.nrhs > 1) { 219 ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr); 220 mumps->id.lredrhs = 0; 221 mumps->sizeredrhs = 0; 222 } 223 } 224 PetscFunctionReturn(0); 225 } 226 227 /* 228 MatConvertToTriples_A_B - convert Petsc matrix to triples: row[nz], col[nz], val[nz] 229 230 input: 231 A - matrix in aij,baij or sbaij (bs=1) format 232 shift - 0: C style output triple; 1: Fortran style output triple. 233 reuse - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple 234 MAT_REUSE_MATRIX: only the values in v array are updated 235 output: 236 nnz - dim of r, c, and v (number of local nonzero entries of A) 237 r, c, v - row and col index, matrix values (matrix triples) 238 239 The returned values r, c, and sometimes v are obtained in a single PetscMalloc(). Then in MatDestroy_MUMPS() it is 240 freed with PetscFree(mumps->irn); This is not ideal code, the fact that v is ONLY sometimes part of mumps->irn means 241 that the PetscMalloc() cannot easily be replaced with a PetscMalloc3(). 242 243 */ 244 245 PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 246 { 247 const PetscInt *ai,*aj,*ajj,M=A->rmap->n; 248 PetscInt nz,rnz,i,j; 249 PetscErrorCode ierr; 250 PetscInt *row,*col; 251 Mat_SeqAIJ *aa=(Mat_SeqAIJ*)A->data; 252 253 PetscFunctionBegin; 254 *v=aa->a; 255 if (reuse == MAT_INITIAL_MATRIX) { 256 nz = aa->nz; 257 ai = aa->i; 258 aj = aa->j; 259 *nnz = nz; 260 ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr); 261 col = row + nz; 262 263 nz = 0; 264 for (i=0; i<M; i++) { 265 rnz = ai[i+1] - ai[i]; 266 ajj = aj + ai[i]; 267 for (j=0; j<rnz; j++) { 268 row[nz] = i+shift; col[nz++] = ajj[j] + shift; 269 } 270 } 271 *r = row; *c = col; 272 } 273 PetscFunctionReturn(0); 274 } 275 276 PetscErrorCode MatConvertToTriples_seqsell_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 277 { 278 Mat_SeqSELL *a=(Mat_SeqSELL*)A->data; 279 PetscInt *ptr; 280 281 PetscFunctionBegin; 282 *v = a->val; 283 if (reuse == MAT_INITIAL_MATRIX) { 284 PetscInt nz,i,j,row; 285 PetscErrorCode ierr; 286 287 nz = a->sliidx[a->totalslices]; 288 *nnz = nz; 289 ierr = PetscMalloc1(2*nz, &ptr);CHKERRQ(ierr); 290 *r = ptr; 291 *c = ptr + nz; 292 293 for (i=0; i<a->totalslices; i++) { 294 for (j=a->sliidx[i],row=0; j<a->sliidx[i+1]; j++,row=((row+1)&0x07)) { 295 *ptr++ = 8*i + row + shift; 296 } 297 } 298 for (i=0;i<nz;i++) *ptr++ = a->colidx[i] + shift; 299 } 300 PetscFunctionReturn(0); 301 } 302 303 PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 304 { 305 Mat_SeqBAIJ *aa=(Mat_SeqBAIJ*)A->data; 306 const PetscInt *ai,*aj,*ajj,bs2 = aa->bs2; 307 PetscInt bs,M,nz,idx=0,rnz,i,j,k,m; 308 PetscErrorCode ierr; 309 PetscInt *row,*col; 310 311 PetscFunctionBegin; 312 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 313 M = A->rmap->N/bs; 314 *v = aa->a; 315 if (reuse == MAT_INITIAL_MATRIX) { 316 ai = aa->i; aj = aa->j; 317 nz = bs2*aa->nz; 318 *nnz = nz; 319 ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr); 320 col = row + nz; 321 322 for (i=0; i<M; i++) { 323 ajj = aj + ai[i]; 324 rnz = ai[i+1] - ai[i]; 325 for (k=0; k<rnz; k++) { 326 for (j=0; j<bs; j++) { 327 for (m=0; m<bs; m++) { 328 row[idx] = i*bs + m + shift; 329 col[idx++] = bs*(ajj[k]) + j + shift; 330 } 331 } 332 } 333 } 334 *r = row; *c = col; 335 } 336 PetscFunctionReturn(0); 337 } 338 339 PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 340 { 341 const PetscInt *ai, *aj,*ajj,M=A->rmap->n; 342 PetscInt nz,rnz,i,j; 343 PetscErrorCode ierr; 344 PetscInt *row,*col; 345 Mat_SeqSBAIJ *aa=(Mat_SeqSBAIJ*)A->data; 346 347 PetscFunctionBegin; 348 *v = aa->a; 349 if (reuse == MAT_INITIAL_MATRIX) { 350 nz = aa->nz; 351 ai = aa->i; 352 aj = aa->j; 353 *v = aa->a; 354 *nnz = nz; 355 ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr); 356 col = row + nz; 357 358 nz = 0; 359 for (i=0; i<M; i++) { 360 rnz = ai[i+1] - ai[i]; 361 ajj = aj + ai[i]; 362 for (j=0; j<rnz; j++) { 363 row[nz] = i+shift; col[nz++] = ajj[j] + shift; 364 } 365 } 366 *r = row; *c = col; 367 } 368 PetscFunctionReturn(0); 369 } 370 371 PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 372 { 373 const PetscInt *ai,*aj,*ajj,*adiag,M=A->rmap->n; 374 PetscInt nz,rnz,i,j; 375 const PetscScalar *av,*v1; 376 PetscScalar *val; 377 PetscErrorCode ierr; 378 PetscInt *row,*col; 379 Mat_SeqAIJ *aa=(Mat_SeqAIJ*)A->data; 380 PetscBool missing; 381 382 PetscFunctionBegin; 383 ai = aa->i; aj = aa->j; av = aa->a; 384 adiag = aa->diag; 385 ierr = MatMissingDiagonal_SeqAIJ(A,&missing,&i);CHKERRQ(ierr); 386 if (reuse == MAT_INITIAL_MATRIX) { 387 /* count nz in the upper triangular part of A */ 388 nz = 0; 389 if (missing) { 390 for (i=0; i<M; i++) { 391 if (PetscUnlikely(adiag[i] >= ai[i+1])) { 392 for (j=ai[i];j<ai[i+1];j++) { 393 if (aj[j] < i) continue; 394 nz++; 395 } 396 } else { 397 nz += ai[i+1] - adiag[i]; 398 } 399 } 400 } else { 401 for (i=0; i<M; i++) nz += ai[i+1] - adiag[i]; 402 } 403 *nnz = nz; 404 405 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 406 col = row + nz; 407 val = (PetscScalar*)(col + nz); 408 409 nz = 0; 410 if (missing) { 411 for (i=0; i<M; i++) { 412 if (PetscUnlikely(adiag[i] >= ai[i+1])) { 413 for (j=ai[i];j<ai[i+1];j++) { 414 if (aj[j] < i) continue; 415 row[nz] = i+shift; 416 col[nz] = aj[j]+shift; 417 val[nz] = av[j]; 418 nz++; 419 } 420 } else { 421 rnz = ai[i+1] - adiag[i]; 422 ajj = aj + adiag[i]; 423 v1 = av + adiag[i]; 424 for (j=0; j<rnz; j++) { 425 row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j]; 426 } 427 } 428 } 429 } else { 430 for (i=0; i<M; i++) { 431 rnz = ai[i+1] - adiag[i]; 432 ajj = aj + adiag[i]; 433 v1 = av + adiag[i]; 434 for (j=0; j<rnz; j++) { 435 row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j]; 436 } 437 } 438 } 439 *r = row; *c = col; *v = val; 440 } else { 441 nz = 0; val = *v; 442 if (missing) { 443 for (i=0; i <M; i++) { 444 if (PetscUnlikely(adiag[i] >= ai[i+1])) { 445 for (j=ai[i];j<ai[i+1];j++) { 446 if (aj[j] < i) continue; 447 val[nz++] = av[j]; 448 } 449 } else { 450 rnz = ai[i+1] - adiag[i]; 451 v1 = av + adiag[i]; 452 for (j=0; j<rnz; j++) { 453 val[nz++] = v1[j]; 454 } 455 } 456 } 457 } else { 458 for (i=0; i <M; i++) { 459 rnz = ai[i+1] - adiag[i]; 460 v1 = av + adiag[i]; 461 for (j=0; j<rnz; j++) { 462 val[nz++] = v1[j]; 463 } 464 } 465 } 466 } 467 PetscFunctionReturn(0); 468 } 469 470 PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 471 { 472 const PetscInt *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj; 473 PetscErrorCode ierr; 474 PetscInt rstart,nz,i,j,jj,irow,countA,countB; 475 PetscInt *row,*col; 476 const PetscScalar *av, *bv,*v1,*v2; 477 PetscScalar *val; 478 Mat_MPISBAIJ *mat = (Mat_MPISBAIJ*)A->data; 479 Mat_SeqSBAIJ *aa = (Mat_SeqSBAIJ*)(mat->A)->data; 480 Mat_SeqBAIJ *bb = (Mat_SeqBAIJ*)(mat->B)->data; 481 482 PetscFunctionBegin; 483 ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart; 484 av=aa->a; bv=bb->a; 485 486 garray = mat->garray; 487 488 if (reuse == MAT_INITIAL_MATRIX) { 489 nz = aa->nz + bb->nz; 490 *nnz = nz; 491 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 492 col = row + nz; 493 val = (PetscScalar*)(col + nz); 494 495 *r = row; *c = col; *v = val; 496 } else { 497 row = *r; col = *c; val = *v; 498 } 499 500 jj = 0; irow = rstart; 501 for (i=0; i<m; i++) { 502 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 503 countA = ai[i+1] - ai[i]; 504 countB = bi[i+1] - bi[i]; 505 bjj = bj + bi[i]; 506 v1 = av + ai[i]; 507 v2 = bv + bi[i]; 508 509 /* A-part */ 510 for (j=0; j<countA; j++) { 511 if (reuse == MAT_INITIAL_MATRIX) { 512 row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift; 513 } 514 val[jj++] = v1[j]; 515 } 516 517 /* B-part */ 518 for (j=0; j < countB; j++) { 519 if (reuse == MAT_INITIAL_MATRIX) { 520 row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift; 521 } 522 val[jj++] = v2[j]; 523 } 524 irow++; 525 } 526 PetscFunctionReturn(0); 527 } 528 529 PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 530 { 531 const PetscInt *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj; 532 PetscErrorCode ierr; 533 PetscInt rstart,nz,i,j,jj,irow,countA,countB; 534 PetscInt *row,*col; 535 const PetscScalar *av, *bv,*v1,*v2; 536 PetscScalar *val; 537 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 538 Mat_SeqAIJ *aa = (Mat_SeqAIJ*)(mat->A)->data; 539 Mat_SeqAIJ *bb = (Mat_SeqAIJ*)(mat->B)->data; 540 541 PetscFunctionBegin; 542 ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart; 543 av=aa->a; bv=bb->a; 544 545 garray = mat->garray; 546 547 if (reuse == MAT_INITIAL_MATRIX) { 548 nz = aa->nz + bb->nz; 549 *nnz = nz; 550 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 551 col = row + nz; 552 val = (PetscScalar*)(col + nz); 553 554 *r = row; *c = col; *v = val; 555 } else { 556 row = *r; col = *c; val = *v; 557 } 558 559 jj = 0; irow = rstart; 560 for (i=0; i<m; i++) { 561 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 562 countA = ai[i+1] - ai[i]; 563 countB = bi[i+1] - bi[i]; 564 bjj = bj + bi[i]; 565 v1 = av + ai[i]; 566 v2 = bv + bi[i]; 567 568 /* A-part */ 569 for (j=0; j<countA; j++) { 570 if (reuse == MAT_INITIAL_MATRIX) { 571 row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift; 572 } 573 val[jj++] = v1[j]; 574 } 575 576 /* B-part */ 577 for (j=0; j < countB; j++) { 578 if (reuse == MAT_INITIAL_MATRIX) { 579 row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift; 580 } 581 val[jj++] = v2[j]; 582 } 583 irow++; 584 } 585 PetscFunctionReturn(0); 586 } 587 588 PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 589 { 590 Mat_MPIBAIJ *mat = (Mat_MPIBAIJ*)A->data; 591 Mat_SeqBAIJ *aa = (Mat_SeqBAIJ*)(mat->A)->data; 592 Mat_SeqBAIJ *bb = (Mat_SeqBAIJ*)(mat->B)->data; 593 const PetscInt *ai = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj; 594 const PetscInt *garray = mat->garray,mbs=mat->mbs,rstart=A->rmap->rstart; 595 const PetscInt bs2=mat->bs2; 596 PetscErrorCode ierr; 597 PetscInt bs,nz,i,j,k,n,jj,irow,countA,countB,idx; 598 PetscInt *row,*col; 599 const PetscScalar *av=aa->a, *bv=bb->a,*v1,*v2; 600 PetscScalar *val; 601 602 PetscFunctionBegin; 603 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 604 if (reuse == MAT_INITIAL_MATRIX) { 605 nz = bs2*(aa->nz + bb->nz); 606 *nnz = nz; 607 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 608 col = row + nz; 609 val = (PetscScalar*)(col + nz); 610 611 *r = row; *c = col; *v = val; 612 } else { 613 row = *r; col = *c; val = *v; 614 } 615 616 jj = 0; irow = rstart; 617 for (i=0; i<mbs; i++) { 618 countA = ai[i+1] - ai[i]; 619 countB = bi[i+1] - bi[i]; 620 ajj = aj + ai[i]; 621 bjj = bj + bi[i]; 622 v1 = av + bs2*ai[i]; 623 v2 = bv + bs2*bi[i]; 624 625 idx = 0; 626 /* A-part */ 627 for (k=0; k<countA; k++) { 628 for (j=0; j<bs; j++) { 629 for (n=0; n<bs; n++) { 630 if (reuse == MAT_INITIAL_MATRIX) { 631 row[jj] = irow + n + shift; 632 col[jj] = rstart + bs*ajj[k] + j + shift; 633 } 634 val[jj++] = v1[idx++]; 635 } 636 } 637 } 638 639 idx = 0; 640 /* B-part */ 641 for (k=0; k<countB; k++) { 642 for (j=0; j<bs; j++) { 643 for (n=0; n<bs; n++) { 644 if (reuse == MAT_INITIAL_MATRIX) { 645 row[jj] = irow + n + shift; 646 col[jj] = bs*garray[bjj[k]] + j + shift; 647 } 648 val[jj++] = v2[idx++]; 649 } 650 } 651 } 652 irow += bs; 653 } 654 PetscFunctionReturn(0); 655 } 656 657 PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 658 { 659 const PetscInt *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj; 660 PetscErrorCode ierr; 661 PetscInt rstart,nz,nza,nzb,i,j,jj,irow,countA,countB; 662 PetscInt *row,*col; 663 const PetscScalar *av, *bv,*v1,*v2; 664 PetscScalar *val; 665 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 666 Mat_SeqAIJ *aa =(Mat_SeqAIJ*)(mat->A)->data; 667 Mat_SeqAIJ *bb =(Mat_SeqAIJ*)(mat->B)->data; 668 669 PetscFunctionBegin; 670 ai=aa->i; aj=aa->j; adiag=aa->diag; 671 bi=bb->i; bj=bb->j; garray = mat->garray; 672 av=aa->a; bv=bb->a; 673 674 rstart = A->rmap->rstart; 675 676 if (reuse == MAT_INITIAL_MATRIX) { 677 nza = 0; /* num of upper triangular entries in mat->A, including diagonals */ 678 nzb = 0; /* num of upper triangular entries in mat->B */ 679 for (i=0; i<m; i++) { 680 nza += (ai[i+1] - adiag[i]); 681 countB = bi[i+1] - bi[i]; 682 bjj = bj + bi[i]; 683 for (j=0; j<countB; j++) { 684 if (garray[bjj[j]] > rstart) nzb++; 685 } 686 } 687 688 nz = nza + nzb; /* total nz of upper triangular part of mat */ 689 *nnz = nz; 690 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 691 col = row + nz; 692 val = (PetscScalar*)(col + nz); 693 694 *r = row; *c = col; *v = val; 695 } else { 696 row = *r; col = *c; val = *v; 697 } 698 699 jj = 0; irow = rstart; 700 for (i=0; i<m; i++) { 701 ajj = aj + adiag[i]; /* ptr to the beginning of the diagonal of this row */ 702 v1 = av + adiag[i]; 703 countA = ai[i+1] - adiag[i]; 704 countB = bi[i+1] - bi[i]; 705 bjj = bj + bi[i]; 706 v2 = bv + bi[i]; 707 708 /* A-part */ 709 for (j=0; j<countA; j++) { 710 if (reuse == MAT_INITIAL_MATRIX) { 711 row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift; 712 } 713 val[jj++] = v1[j]; 714 } 715 716 /* B-part */ 717 for (j=0; j < countB; j++) { 718 if (garray[bjj[j]] > rstart) { 719 if (reuse == MAT_INITIAL_MATRIX) { 720 row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift; 721 } 722 val[jj++] = v2[j]; 723 } 724 } 725 irow++; 726 } 727 PetscFunctionReturn(0); 728 } 729 730 PetscErrorCode MatDestroy_MUMPS(Mat A) 731 { 732 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 733 PetscErrorCode ierr; 734 735 PetscFunctionBegin; 736 ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr); 737 ierr = VecScatterDestroy(&mumps->scat_rhs);CHKERRQ(ierr); 738 ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr); 739 ierr = VecDestroy(&mumps->b_seq);CHKERRQ(ierr); 740 ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr); 741 ierr = PetscFree(mumps->id.perm_in);CHKERRQ(ierr); 742 ierr = PetscFree(mumps->irn);CHKERRQ(ierr); 743 ierr = PetscFree(mumps->info);CHKERRQ(ierr); 744 ierr = MatMumpsResetSchur_Private(mumps);CHKERRQ(ierr); 745 mumps->id.job = JOB_END; 746 PetscMUMPS_c(mumps); 747 #if defined(PETSC_HAVE_OPENMP_SUPPORT) 748 if (mumps->use_petsc_omp_support) { ierr = PetscOmpCtrlDestroy(&mumps->omp_ctrl);CHKERRQ(ierr); } 749 #endif 750 ierr = PetscFree2(mumps->recvcount,mumps->displs);CHKERRQ(ierr); 751 ierr = PetscFree(A->data);CHKERRQ(ierr); 752 753 /* clear composed functions */ 754 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);CHKERRQ(ierr); 755 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorSetSchurIS_C",NULL);CHKERRQ(ierr); 756 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorCreateSchurComplement_C",NULL);CHKERRQ(ierr); 757 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetIcntl_C",NULL);CHKERRQ(ierr); 758 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetIcntl_C",NULL);CHKERRQ(ierr); 759 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetCntl_C",NULL);CHKERRQ(ierr); 760 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetCntl_C",NULL);CHKERRQ(ierr); 761 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfo_C",NULL);CHKERRQ(ierr); 762 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfog_C",NULL);CHKERRQ(ierr); 763 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfo_C",NULL);CHKERRQ(ierr); 764 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfog_C",NULL);CHKERRQ(ierr); 765 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInverse_C",NULL);CHKERRQ(ierr); 766 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInverseTranspose_C",NULL);CHKERRQ(ierr); 767 PetscFunctionReturn(0); 768 } 769 770 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x) 771 { 772 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 773 PetscScalar *array; 774 Vec b_seq; 775 IS is_iden,is_petsc; 776 PetscErrorCode ierr; 777 PetscInt i; 778 PetscBool second_solve = PETSC_FALSE; 779 static PetscBool cite1 = PETSC_FALSE,cite2 = PETSC_FALSE; 780 781 PetscFunctionBegin; 782 ierr = PetscCitationsRegister("@article{MUMPS01,\n author = {P.~R. Amestoy and I.~S. Duff and J.-Y. L'Excellent and J. Koster},\n title = {A fully asynchronous multifrontal solver using distributed dynamic scheduling},\n journal = {SIAM Journal on Matrix Analysis and Applications},\n volume = {23},\n number = {1},\n pages = {15--41},\n year = {2001}\n}\n",&cite1);CHKERRQ(ierr); 783 ierr = PetscCitationsRegister("@article{MUMPS02,\n author = {P.~R. Amestoy and A. Guermouche and J.-Y. L'Excellent and S. Pralet},\n title = {Hybrid scheduling for the parallel solution of linear systems},\n journal = {Parallel Computing},\n volume = {32},\n number = {2},\n pages = {136--156},\n year = {2006}\n}\n",&cite2);CHKERRQ(ierr); 784 785 if (A->factorerrortype) { 786 ierr = PetscInfo2(A,"MatSolve is called with singular matrix factor, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 787 ierr = VecSetInf(x);CHKERRQ(ierr); 788 PetscFunctionReturn(0); 789 } 790 791 mumps->id.ICNTL(20)= 0; /* dense RHS */ 792 mumps->id.nrhs = 1; 793 b_seq = mumps->b_seq; 794 if (mumps->petsc_size > 1) { 795 /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */ 796 ierr = VecScatterBegin(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 797 ierr = VecScatterEnd(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 798 if (!mumps->myid) {ierr = VecGetArray(b_seq,&array);CHKERRQ(ierr);} 799 } else { /* petsc_size == 1 */ 800 ierr = VecCopy(b,x);CHKERRQ(ierr); 801 ierr = VecGetArray(x,&array);CHKERRQ(ierr); 802 } 803 if (!mumps->myid) { /* define rhs on the host */ 804 mumps->id.nrhs = 1; 805 mumps->id.rhs = (MumpsScalar*)array; 806 } 807 808 /* 809 handle condensation step of Schur complement (if any) 810 We set by default ICNTL(26) == -1 when Schur indices have been provided by the user. 811 According to MUMPS (5.0.0) manual, any value should be harmful during the factorization phase 812 Unless the user provides a valid value for ICNTL(26), MatSolve and MatMatSolve routines solve the full system. 813 This requires an extra call to PetscMUMPS_c and the computation of the factors for S 814 */ 815 if (mumps->id.size_schur > 0 && (mumps->id.ICNTL(26) < 0 || mumps->id.ICNTL(26) > 2)) { 816 if (mumps->petsc_size > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Parallel Schur complements not yet supported from PETSc\n"); 817 second_solve = PETSC_TRUE; 818 ierr = MatMumpsHandleSchur_Private(A,PETSC_FALSE);CHKERRQ(ierr); 819 } 820 /* solve phase */ 821 /*-------------*/ 822 mumps->id.job = JOB_SOLVE; 823 PetscMUMPS_c(mumps); 824 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 825 826 /* handle expansion step of Schur complement (if any) */ 827 if (second_solve) { 828 ierr = MatMumpsHandleSchur_Private(A,PETSC_TRUE);CHKERRQ(ierr); 829 } 830 831 if (mumps->petsc_size > 1) { /* convert mumps distributed solution to petsc mpi x */ 832 if (mumps->scat_sol && mumps->ICNTL9_pre != mumps->id.ICNTL(9)) { 833 /* when id.ICNTL(9) changes, the contents of lsol_loc may change (not its size, lsol_loc), recreates scat_sol */ 834 ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr); 835 } 836 if (!mumps->scat_sol) { /* create scatter scat_sol */ 837 ierr = ISCreateStride(PETSC_COMM_SELF,mumps->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */ 838 for (i=0; i<mumps->id.lsol_loc; i++) { 839 mumps->id.isol_loc[i] -= 1; /* change Fortran style to C style */ 840 } 841 ierr = ISCreateGeneral(PETSC_COMM_SELF,mumps->id.lsol_loc,mumps->id.isol_loc,PETSC_COPY_VALUES,&is_petsc);CHKERRQ(ierr); /* to */ 842 ierr = VecScatterCreate(mumps->x_seq,is_iden,x,is_petsc,&mumps->scat_sol);CHKERRQ(ierr); 843 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 844 ierr = ISDestroy(&is_petsc);CHKERRQ(ierr); 845 846 mumps->ICNTL9_pre = mumps->id.ICNTL(9); /* save current value of id.ICNTL(9) */ 847 } 848 849 ierr = VecScatterBegin(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 850 ierr = VecScatterEnd(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 851 } 852 ierr = PetscLogFlops(2.0*mumps->id.RINFO(3));CHKERRQ(ierr); 853 PetscFunctionReturn(0); 854 } 855 856 PetscErrorCode MatSolveTranspose_MUMPS(Mat A,Vec b,Vec x) 857 { 858 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 859 PetscErrorCode ierr; 860 861 PetscFunctionBegin; 862 mumps->id.ICNTL(9) = 0; 863 ierr = MatSolve_MUMPS(A,b,x);CHKERRQ(ierr); 864 mumps->id.ICNTL(9) = 1; 865 PetscFunctionReturn(0); 866 } 867 868 PetscErrorCode MatMatSolve_MUMPS(Mat A,Mat B,Mat X) 869 { 870 PetscErrorCode ierr; 871 Mat Bt = NULL; 872 PetscBool flg, flgT; 873 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 874 PetscInt i,nrhs,M; 875 PetscScalar *array,*bray; 876 PetscInt lsol_loc,nlsol_loc,*isol_loc,*idx,*iidx,*idxx,*isol_loc_save; 877 MumpsScalar *sol_loc,*sol_loc_save; 878 IS is_to,is_from; 879 PetscInt k,proc,j,m; 880 const PetscInt *rstart; 881 Vec v_mpi,b_seq,x_seq; 882 VecScatter scat_rhs,scat_sol; 883 PetscScalar *aa; 884 PetscInt spnr,*ia,*ja; 885 Mat_MPIAIJ *b = NULL; 886 887 PetscFunctionBegin; 888 ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 889 if (!flg) SETERRQ(PetscObjectComm((PetscObject)X),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix"); 890 891 ierr = PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 892 if (flg) { /* dense B */ 893 if (B->rmap->n != X->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Matrix B and X must have same row distribution"); 894 mumps->id.ICNTL(20)= 0; /* dense RHS */ 895 } else { /* sparse B */ 896 ierr = PetscObjectTypeCompare((PetscObject)B,MATTRANSPOSEMAT,&flgT);CHKERRQ(ierr); 897 if (flgT) { /* input B is transpose of actural RHS matrix, 898 because mumps requires sparse compressed COLUMN storage! See MatMatTransposeSolve_MUMPS() */ 899 ierr = MatTransposeGetMat(B,&Bt);CHKERRQ(ierr); 900 } else SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B must be MATTRANSPOSEMAT matrix"); 901 mumps->id.ICNTL(20)= 1; /* sparse RHS */ 902 } 903 904 ierr = MatGetSize(B,&M,&nrhs);CHKERRQ(ierr); 905 mumps->id.nrhs = nrhs; 906 mumps->id.lrhs = M; 907 mumps->id.rhs = NULL; 908 909 if (mumps->petsc_size == 1) { 910 PetscScalar *aa; 911 PetscInt spnr,*ia,*ja; 912 PetscBool second_solve = PETSC_FALSE; 913 914 ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr); 915 mumps->id.rhs = (MumpsScalar*)array; 916 917 if (!Bt) { /* dense B */ 918 /* copy B to X */ 919 ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr); 920 ierr = PetscMemcpy(array,bray,M*nrhs*sizeof(PetscScalar));CHKERRQ(ierr); 921 ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr); 922 } else { /* sparse B */ 923 ierr = MatSeqAIJGetArray(Bt,&aa);CHKERRQ(ierr); 924 ierr = MatGetRowIJ(Bt,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&flg);CHKERRQ(ierr); 925 if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Cannot get IJ structure"); 926 /* mumps requires ia and ja start at 1! */ 927 mumps->id.irhs_ptr = ia; 928 mumps->id.irhs_sparse = ja; 929 mumps->id.nz_rhs = ia[spnr] - 1; 930 mumps->id.rhs_sparse = (MumpsScalar*)aa; 931 } 932 /* handle condensation step of Schur complement (if any) */ 933 if (mumps->id.size_schur > 0 && (mumps->id.ICNTL(26) < 0 || mumps->id.ICNTL(26) > 2)) { 934 second_solve = PETSC_TRUE; 935 ierr = MatMumpsHandleSchur_Private(A,PETSC_FALSE);CHKERRQ(ierr); 936 } 937 /* solve phase */ 938 /*-------------*/ 939 mumps->id.job = JOB_SOLVE; 940 PetscMUMPS_c(mumps); 941 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 942 943 /* handle expansion step of Schur complement (if any) */ 944 if (second_solve) { 945 ierr = MatMumpsHandleSchur_Private(A,PETSC_TRUE);CHKERRQ(ierr); 946 } 947 if (Bt) { /* sparse B */ 948 ierr = MatSeqAIJRestoreArray(Bt,&aa);CHKERRQ(ierr); 949 ierr = MatRestoreRowIJ(Bt,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&flg);CHKERRQ(ierr); 950 if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Cannot restore IJ structure"); 951 } 952 ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr); 953 PetscFunctionReturn(0); 954 } 955 956 /*--------- parallel case: MUMPS requires rhs B to be centralized on the host! --------*/ 957 if (mumps->petsc_size > 1 && mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Parallel Schur complements not yet supported from PETSc\n"); 958 959 /* create x_seq to hold mumps local solution */ 960 isol_loc_save = mumps->id.isol_loc; /* save it for MatSovle() */ 961 sol_loc_save = mumps->id.sol_loc; 962 963 lsol_loc = mumps->id.lsol_loc; 964 nlsol_loc = nrhs*lsol_loc; /* length of sol_loc */ 965 ierr = PetscMalloc2(nlsol_loc,&sol_loc,lsol_loc,&isol_loc);CHKERRQ(ierr); 966 mumps->id.sol_loc = (MumpsScalar*)sol_loc; 967 mumps->id.isol_loc = isol_loc; 968 969 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nlsol_loc,(PetscScalar*)sol_loc,&x_seq);CHKERRQ(ierr); 970 971 /* scatter v_mpi to b_seq because MUMPS only supports centralized rhs */ 972 /* idx: maps from k-th index of v_mpi to (i,j)-th global entry of B; 973 iidx: inverse of idx, will be used by scattering mumps x_seq -> petsc X */ 974 ierr = PetscMalloc1(nrhs*M,&idx);CHKERRQ(ierr); 975 976 if (!Bt) { /* dense B */ 977 /* wrap dense rhs matrix B into a vector v_mpi */ 978 ierr = MatGetLocalSize(B,&m,NULL);CHKERRQ(ierr); 979 ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr); 980 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)B),1,nrhs*m,nrhs*M,(const PetscScalar*)bray,&v_mpi);CHKERRQ(ierr); 981 ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr); 982 983 /* scatter v_mpi to b_seq in proc[0]. MUMPS requires rhs to be centralized on the host! */ 984 if (!mumps->myid) { 985 ierr = MatGetOwnershipRanges(B,&rstart);CHKERRQ(ierr); 986 k = 0; 987 for (proc=0; proc<mumps->petsc_size; proc++){ 988 for (j=0; j<nrhs; j++){ 989 for (i=rstart[proc]; i<rstart[proc+1]; i++){ 990 idx[k++] = j*M + i; 991 } 992 } 993 } 994 995 ierr = VecCreateSeq(PETSC_COMM_SELF,nrhs*M,&b_seq);CHKERRQ(ierr); 996 ierr = ISCreateGeneral(PETSC_COMM_SELF,nrhs*M,idx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr); 997 ierr = ISCreateStride(PETSC_COMM_SELF,nrhs*M,0,1,&is_from);CHKERRQ(ierr); 998 } else { 999 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&b_seq);CHKERRQ(ierr); 1000 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_to);CHKERRQ(ierr); 1001 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_from);CHKERRQ(ierr); 1002 } 1003 ierr = VecScatterCreate(v_mpi,is_from,b_seq,is_to,&scat_rhs);CHKERRQ(ierr); 1004 ierr = VecScatterBegin(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1005 ierr = ISDestroy(&is_to);CHKERRQ(ierr); 1006 ierr = ISDestroy(&is_from);CHKERRQ(ierr); 1007 ierr = VecScatterEnd(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1008 1009 if (!mumps->myid) { /* define rhs on the host */ 1010 ierr = VecGetArray(b_seq,&bray);CHKERRQ(ierr); 1011 mumps->id.rhs = (MumpsScalar*)bray; 1012 ierr = VecRestoreArray(b_seq,&bray);CHKERRQ(ierr); 1013 } 1014 1015 } else { /* sparse B */ 1016 b = (Mat_MPIAIJ*)Bt->data; 1017 1018 /* wrap dense X into a vector v_mpi */ 1019 ierr = MatGetLocalSize(X,&m,NULL);CHKERRQ(ierr); 1020 ierr = MatDenseGetArray(X,&bray);CHKERRQ(ierr); 1021 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)X),1,nrhs*m,nrhs*M,(const PetscScalar*)bray,&v_mpi);CHKERRQ(ierr); 1022 ierr = MatDenseRestoreArray(X,&bray);CHKERRQ(ierr); 1023 1024 if (!mumps->myid) { 1025 ierr = MatSeqAIJGetArray(b->A,&aa);CHKERRQ(ierr); 1026 ierr = MatGetRowIJ(b->A,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&flg);CHKERRQ(ierr); 1027 if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Cannot get IJ structure"); 1028 /* mumps requires ia and ja start at 1! */ 1029 mumps->id.irhs_ptr = ia; 1030 mumps->id.irhs_sparse = ja; 1031 mumps->id.nz_rhs = ia[spnr] - 1; 1032 mumps->id.rhs_sparse = (MumpsScalar*)aa; 1033 } else { 1034 mumps->id.irhs_ptr = NULL; 1035 mumps->id.irhs_sparse = NULL; 1036 mumps->id.nz_rhs = 0; 1037 mumps->id.rhs_sparse = NULL; 1038 } 1039 } 1040 1041 /* solve phase */ 1042 /*-------------*/ 1043 mumps->id.job = JOB_SOLVE; 1044 PetscMUMPS_c(mumps); 1045 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 1046 1047 /* scatter mumps distributed solution to petsc vector v_mpi, which shares local arrays with solution matrix X */ 1048 ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr); 1049 ierr = VecPlaceArray(v_mpi,array);CHKERRQ(ierr); 1050 1051 /* create scatter scat_sol */ 1052 ierr = MatGetOwnershipRanges(X,&rstart);CHKERRQ(ierr); 1053 /* iidx: inverse of idx computed above, used for scattering mumps x_seq to petsc X */ 1054 iidx = idx; 1055 k = 0; 1056 for (proc=0; proc<mumps->petsc_size; proc++){ 1057 for (j=0; j<nrhs; j++){ 1058 for (i=rstart[proc]; i<rstart[proc+1]; i++) iidx[j*M + i] = k++; 1059 } 1060 } 1061 1062 ierr = PetscMalloc1(nlsol_loc,&idxx);CHKERRQ(ierr); 1063 ierr = ISCreateStride(PETSC_COMM_SELF,nlsol_loc,0,1,&is_from);CHKERRQ(ierr); 1064 for (i=0; i<lsol_loc; i++) { 1065 isol_loc[i] -= 1; /* change Fortran style to C style */ 1066 idxx[i] = iidx[isol_loc[i]]; 1067 for (j=1; j<nrhs; j++){ 1068 idxx[j*lsol_loc+i] = iidx[isol_loc[i]+j*M]; 1069 } 1070 } 1071 ierr = ISCreateGeneral(PETSC_COMM_SELF,nlsol_loc,idxx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr); 1072 ierr = VecScatterCreate(x_seq,is_from,v_mpi,is_to,&scat_sol);CHKERRQ(ierr); 1073 ierr = VecScatterBegin(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1074 ierr = ISDestroy(&is_from);CHKERRQ(ierr); 1075 ierr = ISDestroy(&is_to);CHKERRQ(ierr); 1076 ierr = VecScatterEnd(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1077 ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr); 1078 1079 /* free spaces */ 1080 mumps->id.sol_loc = sol_loc_save; 1081 mumps->id.isol_loc = isol_loc_save; 1082 1083 ierr = PetscFree2(sol_loc,isol_loc);CHKERRQ(ierr); 1084 ierr = PetscFree(idx);CHKERRQ(ierr); 1085 ierr = PetscFree(idxx);CHKERRQ(ierr); 1086 ierr = VecDestroy(&x_seq);CHKERRQ(ierr); 1087 ierr = VecDestroy(&v_mpi);CHKERRQ(ierr); 1088 if (Bt) { 1089 if (!mumps->myid) { 1090 b = (Mat_MPIAIJ*)Bt->data; 1091 ierr = MatSeqAIJRestoreArray(b->A,&aa);CHKERRQ(ierr); 1092 ierr = MatRestoreRowIJ(b->A,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&flg);CHKERRQ(ierr); 1093 if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Cannot restore IJ structure"); 1094 } 1095 } else { 1096 ierr = VecDestroy(&b_seq);CHKERRQ(ierr); 1097 ierr = VecScatterDestroy(&scat_rhs);CHKERRQ(ierr); 1098 } 1099 ierr = VecScatterDestroy(&scat_sol);CHKERRQ(ierr); 1100 ierr = PetscLogFlops(2.0*nrhs*mumps->id.RINFO(3));CHKERRQ(ierr); 1101 PetscFunctionReturn(0); 1102 } 1103 1104 PetscErrorCode MatMatTransposeSolve_MUMPS(Mat A,Mat Bt,Mat X) 1105 { 1106 PetscErrorCode ierr; 1107 PetscBool flg; 1108 Mat B; 1109 1110 PetscFunctionBegin; 1111 ierr = PetscObjectTypeCompareAny((PetscObject)Bt,&flg,MATSEQAIJ,MATMPIAIJ,NULL);CHKERRQ(ierr); 1112 if (!flg) SETERRQ(PetscObjectComm((PetscObject)Bt),PETSC_ERR_ARG_WRONG,"Matrix Bt must be MATAIJ matrix"); 1113 1114 /* Create B=Bt^T that uses Bt's data structure */ 1115 ierr = MatCreateTranspose(Bt,&B);CHKERRQ(ierr); 1116 1117 ierr = MatMatSolve_MUMPS(A,B,X);CHKERRQ(ierr); 1118 ierr = MatDestroy(&B);CHKERRQ(ierr); 1119 PetscFunctionReturn(0); 1120 } 1121 1122 #if !defined(PETSC_USE_COMPLEX) 1123 /* 1124 input: 1125 F: numeric factor 1126 output: 1127 nneg: total number of negative pivots 1128 nzero: total number of zero pivots 1129 npos: (global dimension of F) - nneg - nzero 1130 */ 1131 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos) 1132 { 1133 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1134 PetscErrorCode ierr; 1135 PetscMPIInt size; 1136 1137 PetscFunctionBegin; 1138 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)F),&size);CHKERRQ(ierr); 1139 /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */ 1140 if (size > 1 && mumps->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",mumps->id.INFOG(13)); 1141 1142 if (nneg) *nneg = mumps->id.INFOG(12); 1143 if (nzero || npos) { 1144 if (mumps->id.ICNTL(24) != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"-mat_mumps_icntl_24 must be set as 1 for null pivot row detection"); 1145 if (nzero) *nzero = mumps->id.INFOG(28); 1146 if (npos) *npos = F->rmap->N - (mumps->id.INFOG(12) + mumps->id.INFOG(28)); 1147 } 1148 PetscFunctionReturn(0); 1149 } 1150 #endif 1151 1152 PetscErrorCode MatMumpsGatherNonzerosOnMaster(MatReuse reuse,Mat_MUMPS *mumps) 1153 { 1154 PetscErrorCode ierr; 1155 PetscInt i,nz=0,*irn,*jcn=0; 1156 PetscScalar *val=0; 1157 PetscMPIInt mpinz,*recvcount=NULL,*displs=NULL; 1158 1159 PetscFunctionBegin; 1160 if (mumps->omp_comm_size > 1) { 1161 if (reuse == MAT_INITIAL_MATRIX) { 1162 /* master first gathers counts of nonzeros to receive */ 1163 if (mumps->is_omp_master) { ierr = PetscMalloc2(mumps->omp_comm_size,&recvcount,mumps->omp_comm_size,&displs);CHKERRQ(ierr); } 1164 ierr = PetscMPIIntCast(mumps->nz,&mpinz);CHKERRQ(ierr); 1165 ierr = MPI_Gather(&mpinz,1,MPI_INT,recvcount,1,MPI_INT,0/*root*/,mumps->omp_comm);CHKERRQ(ierr); 1166 1167 /* master allocates memory to receive nonzeros */ 1168 if (mumps->is_omp_master) { 1169 displs[0] = 0; 1170 for (i=1; i<mumps->omp_comm_size; i++) displs[i] = displs[i-1] + recvcount[i-1]; 1171 nz = displs[mumps->omp_comm_size-1] + recvcount[mumps->omp_comm_size-1]; 1172 ierr = PetscMalloc(2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar),&irn);CHKERRQ(ierr); 1173 jcn = irn + nz; 1174 val = (PetscScalar*)(jcn + nz); 1175 } 1176 1177 /* save the gatherv plan */ 1178 mumps->mpinz = mpinz; /* used as send count */ 1179 mumps->recvcount = recvcount; 1180 mumps->displs = displs; 1181 1182 /* master gathers nonzeros */ 1183 ierr = MPI_Gatherv(mumps->irn,mpinz,MPIU_INT,irn,mumps->recvcount,mumps->displs,MPIU_INT,0/*root*/,mumps->omp_comm);CHKERRQ(ierr); 1184 ierr = MPI_Gatherv(mumps->jcn,mpinz,MPIU_INT,jcn,mumps->recvcount,mumps->displs,MPIU_INT,0/*root*/,mumps->omp_comm);CHKERRQ(ierr); 1185 ierr = MPI_Gatherv(mumps->val,mpinz,MPIU_SCALAR,val,mumps->recvcount,mumps->displs,MPIU_SCALAR,0/*root*/,mumps->omp_comm);CHKERRQ(ierr); 1186 1187 /* master frees its row/col/val and replaces them with bigger arrays */ 1188 if (mumps->is_omp_master) { 1189 ierr = PetscFree(mumps->irn);CHKERRQ(ierr); /* irn/jcn/val are allocated together so free only irn */ 1190 mumps->nz = nz; /* it is a sum of mpinz over omp_comm */ 1191 mumps->irn = irn; 1192 mumps->jcn = jcn; 1193 mumps->val = val; 1194 } 1195 } else { 1196 ierr = MPI_Gatherv((mumps->is_omp_master?MPI_IN_PLACE:mumps->val),mumps->mpinz,MPIU_SCALAR,mumps->val,mumps->recvcount,mumps->displs,MPIU_SCALAR,0/*root*/,mumps->omp_comm);CHKERRQ(ierr); 1197 } 1198 } 1199 PetscFunctionReturn(0); 1200 } 1201 1202 PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info) 1203 { 1204 Mat_MUMPS *mumps =(Mat_MUMPS*)(F)->data; 1205 PetscErrorCode ierr; 1206 PetscBool isMPIAIJ; 1207 1208 PetscFunctionBegin; 1209 if (mumps->id.INFOG(1) < 0) { 1210 if (mumps->id.INFOG(1) == -6) { 1211 ierr = PetscInfo2(A,"MatFactorNumeric is called with singular matrix structure, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1212 } 1213 ierr = PetscInfo2(A,"MatFactorNumeric is called after analysis phase fails, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1214 PetscFunctionReturn(0); 1215 } 1216 1217 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_REUSE_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1218 ierr = MatMumpsGatherNonzerosOnMaster(MAT_REUSE_MATRIX,mumps);CHKERRQ(ierr); 1219 1220 /* numerical factorization phase */ 1221 /*-------------------------------*/ 1222 mumps->id.job = JOB_FACTNUMERIC; 1223 if (!mumps->id.ICNTL(18)) { /* A is centralized */ 1224 if (!mumps->myid) { 1225 mumps->id.a = (MumpsScalar*)mumps->val; 1226 } 1227 } else { 1228 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1229 } 1230 PetscMUMPS_c(mumps); 1231 if (mumps->id.INFOG(1) < 0) { 1232 if (A->erroriffailure) { 1233 SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2)); 1234 } else { 1235 if (mumps->id.INFOG(1) == -10) { /* numerically singular matrix */ 1236 ierr = PetscInfo2(F,"matrix is numerically singular, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1237 F->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1238 } else if (mumps->id.INFOG(1) == -13) { 1239 ierr = PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, cannot allocate required memory %d megabytes\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1240 F->factorerrortype = MAT_FACTOR_OUTMEMORY; 1241 } else if (mumps->id.INFOG(1) == -8 || mumps->id.INFOG(1) == -9 || (-16 < mumps->id.INFOG(1) && mumps->id.INFOG(1) < -10) ) { 1242 ierr = PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d, problem with workarray \n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1243 F->factorerrortype = MAT_FACTOR_OUTMEMORY; 1244 } else { 1245 ierr = PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1246 F->factorerrortype = MAT_FACTOR_OTHER; 1247 } 1248 } 1249 } 1250 if (!mumps->myid && mumps->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB," mumps->id.ICNTL(16):=%d\n",mumps->id.INFOG(16)); 1251 1252 F->assembled = PETSC_TRUE; 1253 mumps->matstruc = SAME_NONZERO_PATTERN; 1254 if (F->schur) { /* reset Schur status to unfactored */ 1255 if (mumps->id.ICNTL(19) == 1) { /* stored by rows */ 1256 mumps->id.ICNTL(19) = 2; 1257 ierr = MatTranspose(F->schur,MAT_INPLACE_MATRIX,&F->schur);CHKERRQ(ierr); 1258 } 1259 ierr = MatFactorRestoreSchurComplement(F,NULL,MAT_FACTOR_SCHUR_UNFACTORED);CHKERRQ(ierr); 1260 } 1261 1262 /* just to be sure that ICNTL(19) value returned by a call from MatMumpsGetIcntl is always consistent */ 1263 if (!mumps->sym && mumps->id.ICNTL(19) && mumps->id.ICNTL(19) != 1) mumps->id.ICNTL(19) = 3; 1264 1265 if (!mumps->is_omp_master) mumps->id.INFO(23) = 0; 1266 if (mumps->petsc_size > 1) { 1267 PetscInt lsol_loc; 1268 PetscScalar *sol_loc; 1269 1270 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr); 1271 1272 /* distributed solution; Create x_seq=sol_loc for repeated use */ 1273 if (mumps->x_seq) { 1274 ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr); 1275 ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr); 1276 ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr); 1277 } 1278 lsol_loc = mumps->id.INFO(23); /* length of sol_loc */ 1279 ierr = PetscMalloc2(lsol_loc,&sol_loc,lsol_loc,&mumps->id.isol_loc);CHKERRQ(ierr); 1280 mumps->id.lsol_loc = lsol_loc; 1281 mumps->id.sol_loc = (MumpsScalar*)sol_loc; 1282 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,lsol_loc,sol_loc,&mumps->x_seq);CHKERRQ(ierr); 1283 } 1284 ierr = PetscLogFlops(mumps->id.RINFO(2));CHKERRQ(ierr); 1285 PetscFunctionReturn(0); 1286 } 1287 1288 /* Sets MUMPS options from the options database */ 1289 PetscErrorCode PetscSetMUMPSFromOptions(Mat F, Mat A) 1290 { 1291 Mat_MUMPS *mumps = (Mat_MUMPS*)F->data; 1292 PetscErrorCode ierr; 1293 PetscInt icntl,info[40],i,ninfo=40; 1294 PetscBool flg; 1295 1296 PetscFunctionBegin; 1297 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr); 1298 ierr = PetscOptionsInt("-mat_mumps_icntl_1","ICNTL(1): output stream for error messages","None",mumps->id.ICNTL(1),&icntl,&flg);CHKERRQ(ierr); 1299 if (flg) mumps->id.ICNTL(1) = icntl; 1300 ierr = PetscOptionsInt("-mat_mumps_icntl_2","ICNTL(2): output stream for diagnostic printing, statistics, and warning","None",mumps->id.ICNTL(2),&icntl,&flg);CHKERRQ(ierr); 1301 if (flg) mumps->id.ICNTL(2) = icntl; 1302 ierr = PetscOptionsInt("-mat_mumps_icntl_3","ICNTL(3): output stream for global information, collected on the host","None",mumps->id.ICNTL(3),&icntl,&flg);CHKERRQ(ierr); 1303 if (flg) mumps->id.ICNTL(3) = icntl; 1304 1305 ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",mumps->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr); 1306 if (flg) mumps->id.ICNTL(4) = icntl; 1307 if (mumps->id.ICNTL(4) || PetscLogPrintInfo) mumps->id.ICNTL(3) = 6; /* resume MUMPS default id.ICNTL(3) = 6 */ 1308 1309 ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7)","None",mumps->id.ICNTL(6),&icntl,&flg);CHKERRQ(ierr); 1310 if (flg) mumps->id.ICNTL(6) = icntl; 1311 1312 ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis","None",mumps->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr); 1313 if (flg) { 1314 if (icntl== 1 && mumps->petsc_size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n"); 1315 else mumps->id.ICNTL(7) = icntl; 1316 } 1317 1318 ierr = PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 8 or 77)","None",mumps->id.ICNTL(8),&mumps->id.ICNTL(8),NULL);CHKERRQ(ierr); 1319 /* ierr = PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): computes the solution using A or A^T","None",mumps->id.ICNTL(9),&mumps->id.ICNTL(9),NULL);CHKERRQ(ierr); handled by MatSolveTranspose_MUMPS() */ 1320 ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",mumps->id.ICNTL(10),&mumps->id.ICNTL(10),NULL);CHKERRQ(ierr); 1321 ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to an error analysis (via -ksp_view)","None",mumps->id.ICNTL(11),&mumps->id.ICNTL(11),NULL);CHKERRQ(ierr); 1322 ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): an ordering strategy for symmetric matrices (0 to 3)","None",mumps->id.ICNTL(12),&mumps->id.ICNTL(12),NULL);CHKERRQ(ierr); 1323 ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting","None",mumps->id.ICNTL(13),&mumps->id.ICNTL(13),NULL);CHKERRQ(ierr); 1324 ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage increase in the estimated working space","None",mumps->id.ICNTL(14),&mumps->id.ICNTL(14),NULL);CHKERRQ(ierr); 1325 ierr = PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): computes the Schur complement","None",mumps->id.ICNTL(19),&mumps->id.ICNTL(19),NULL);CHKERRQ(ierr); 1326 if (mumps->id.ICNTL(19) <= 0 || mumps->id.ICNTL(19) > 3) { /* reset any schur data (if any) */ 1327 ierr = MatDestroy(&F->schur);CHKERRQ(ierr); 1328 ierr = MatMumpsResetSchur_Private(mumps);CHKERRQ(ierr); 1329 } 1330 /* ierr = PetscOptionsInt("-mat_mumps_icntl_20","ICNTL(20): the format (dense or sparse) of the right-hand sides","None",mumps->id.ICNTL(20),&mumps->id.ICNTL(20),NULL);CHKERRQ(ierr); -- sparse rhs is not supported in PETSc API */ 1331 /* ierr = PetscOptionsInt("-mat_mumps_icntl_21","ICNTL(21): the distribution (centralized or distributed) of the solution vectors","None",mumps->id.ICNTL(21),&mumps->id.ICNTL(21),NULL);CHKERRQ(ierr); we only use distributed solution vector */ 1332 1333 ierr = PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core factorization and solve (0 or 1)","None",mumps->id.ICNTL(22),&mumps->id.ICNTL(22),NULL);CHKERRQ(ierr); 1334 ierr = PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",mumps->id.ICNTL(23),&mumps->id.ICNTL(23),NULL);CHKERRQ(ierr); 1335 ierr = PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",mumps->id.ICNTL(24),&mumps->id.ICNTL(24),NULL);CHKERRQ(ierr); 1336 if (mumps->id.ICNTL(24)) { 1337 mumps->id.ICNTL(13) = 1; /* turn-off ScaLAPACK to help with the correct detection of null pivots */ 1338 } 1339 1340 ierr = PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): computes a solution of a deficient matrix and a null space basis","None",mumps->id.ICNTL(25),&mumps->id.ICNTL(25),NULL);CHKERRQ(ierr); 1341 ierr = PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): drives the solution phase if a Schur complement matrix","None",mumps->id.ICNTL(26),&mumps->id.ICNTL(26),NULL);CHKERRQ(ierr); 1342 ierr = PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): the blocking size for multiple right-hand sides","None",mumps->id.ICNTL(27),&mumps->id.ICNTL(27),NULL);CHKERRQ(ierr); 1343 ierr = PetscOptionsInt("-mat_mumps_icntl_28","ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering","None",mumps->id.ICNTL(28),&mumps->id.ICNTL(28),NULL);CHKERRQ(ierr); 1344 ierr = PetscOptionsInt("-mat_mumps_icntl_29","ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis","None",mumps->id.ICNTL(29),&mumps->id.ICNTL(29),NULL);CHKERRQ(ierr); 1345 /* ierr = PetscOptionsInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL);CHKERRQ(ierr); */ /* call MatMumpsGetInverse() directly */ 1346 ierr = PetscOptionsInt("-mat_mumps_icntl_31","ICNTL(31): indicates which factors may be discarded during factorization","None",mumps->id.ICNTL(31),&mumps->id.ICNTL(31),NULL);CHKERRQ(ierr); 1347 /* ierr = PetscOptionsInt("-mat_mumps_icntl_32","ICNTL(32): performs the forward elemination of the right-hand sides during factorization","None",mumps->id.ICNTL(32),&mumps->id.ICNTL(32),NULL);CHKERRQ(ierr); -- not supported by PETSc API */ 1348 ierr = PetscOptionsInt("-mat_mumps_icntl_33","ICNTL(33): compute determinant","None",mumps->id.ICNTL(33),&mumps->id.ICNTL(33),NULL);CHKERRQ(ierr); 1349 ierr = PetscOptionsInt("-mat_mumps_icntl_35","ICNTL(35): activates Block Lock Rank (BLR) based factorization","None",mumps->id.ICNTL(35),&mumps->id.ICNTL(35),NULL);CHKERRQ(ierr); 1350 1351 ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",mumps->id.CNTL(1),&mumps->id.CNTL(1),NULL);CHKERRQ(ierr); 1352 ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",mumps->id.CNTL(2),&mumps->id.CNTL(2),NULL);CHKERRQ(ierr); 1353 ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",mumps->id.CNTL(3),&mumps->id.CNTL(3),NULL);CHKERRQ(ierr); 1354 ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",mumps->id.CNTL(4),&mumps->id.CNTL(4),NULL);CHKERRQ(ierr); 1355 ierr = PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",mumps->id.CNTL(5),&mumps->id.CNTL(5),NULL);CHKERRQ(ierr); 1356 ierr = PetscOptionsReal("-mat_mumps_cntl_7","CNTL(7): dropping parameter used during BLR","None",mumps->id.CNTL(7),&mumps->id.CNTL(7),NULL);CHKERRQ(ierr); 1357 1358 ierr = PetscOptionsString("-mat_mumps_ooc_tmpdir", "out of core directory", "None", mumps->id.ooc_tmpdir, mumps->id.ooc_tmpdir, 256, NULL);CHKERRQ(ierr); 1359 1360 ierr = PetscOptionsIntArray("-mat_mumps_view_info","request INFO local to each processor","",info,&ninfo,NULL);CHKERRQ(ierr); 1361 if (ninfo) { 1362 if (ninfo > 40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"number of INFO %d must <= 40\n",ninfo); 1363 ierr = PetscMalloc1(ninfo,&mumps->info);CHKERRQ(ierr); 1364 mumps->ninfo = ninfo; 1365 for (i=0; i<ninfo; i++) { 1366 if (info[i] < 0 || info[i]>40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"index of INFO %d must between 1 and 40\n",ninfo); 1367 else mumps->info[i] = info[i]; 1368 } 1369 } 1370 1371 ierr = PetscOptionsEnd();CHKERRQ(ierr); 1372 PetscFunctionReturn(0); 1373 } 1374 1375 PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS *mumps) 1376 { 1377 PetscErrorCode ierr; 1378 PetscInt nthreads=1; 1379 1380 PetscFunctionBegin; 1381 mumps->petsc_comm = PetscObjectComm((PetscObject)A); 1382 ierr = MPI_Comm_size(mumps->petsc_comm,&mumps->petsc_size);CHKERRQ(ierr); 1383 ierr = MPI_Comm_rank(mumps->petsc_comm,&mumps->myid);CHKERRQ(ierr); /* so that code like "if (!myid)" still works even if mumps_comm is different */ 1384 1385 ierr = PetscOptionsGetInt(NULL,NULL,"-mumps_omp_num_threads",&nthreads,&mumps->use_petsc_omp_support);CHKERRQ(ierr); 1386 if (mumps->use_petsc_omp_support) { 1387 #if defined(PETSC_HAVE_OPENMP_SUPPORT) 1388 ierr = PetscOmpCtrlCreate(mumps->petsc_comm,nthreads,&mumps->omp_ctrl);CHKERRQ(ierr); 1389 ierr = PetscOmpCtrlGetOmpComms(mumps->omp_ctrl,&mumps->omp_comm,&mumps->mumps_comm,&mumps->is_omp_master);CHKERRQ(ierr); 1390 #else 1391 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"the system does not have PETSc OpenMP support but you added the -mumps_omp_num_threads option\n"); 1392 #endif 1393 } else { 1394 mumps->omp_comm = PETSC_COMM_SELF; 1395 mumps->mumps_comm = mumps->petsc_comm; 1396 mumps->is_omp_master = PETSC_TRUE; 1397 } 1398 ierr = MPI_Comm_size(mumps->omp_comm,&mumps->omp_comm_size);CHKERRQ(ierr); 1399 1400 mumps->id.comm_fortran = MPI_Comm_c2f(mumps->mumps_comm); 1401 mumps->id.job = JOB_INIT; 1402 mumps->id.par = 1; /* host participates factorizaton and solve */ 1403 mumps->id.sym = mumps->sym; 1404 1405 PetscMUMPS_c(mumps); 1406 1407 /* copy MUMPS default control values from master to slaves. Although slaves do not call MUMPS, they may access these values in code. 1408 For example, ICNTL(9) is initialized to 1 by MUMPS and slaves check ICNTL(9) in MatSolve_MUMPS. 1409 */ 1410 ierr = MPI_Bcast(mumps->id.icntl,40,MPIU_INT, 0,mumps->omp_comm);CHKERRQ(ierr); /* see MUMPS-5.1.2 Manual Section 9 */ 1411 ierr = MPI_Bcast(mumps->id.cntl, 15,MPIU_REAL,0,mumps->omp_comm);CHKERRQ(ierr); 1412 1413 mumps->scat_rhs = NULL; 1414 mumps->scat_sol = NULL; 1415 1416 /* set PETSc-MUMPS default options - override MUMPS default */ 1417 mumps->id.ICNTL(3) = 0; 1418 mumps->id.ICNTL(4) = 0; 1419 if (mumps->petsc_size == 1) { 1420 mumps->id.ICNTL(18) = 0; /* centralized assembled matrix input */ 1421 } else { 1422 mumps->id.ICNTL(18) = 3; /* distributed assembled matrix input */ 1423 mumps->id.ICNTL(20) = 0; /* rhs is in dense format */ 1424 mumps->id.ICNTL(21) = 1; /* distributed solution */ 1425 } 1426 1427 /* schur */ 1428 mumps->id.size_schur = 0; 1429 mumps->id.listvar_schur = NULL; 1430 mumps->id.schur = NULL; 1431 mumps->sizeredrhs = 0; 1432 mumps->schur_sol = NULL; 1433 mumps->schur_sizesol = 0; 1434 PetscFunctionReturn(0); 1435 } 1436 1437 PetscErrorCode MatFactorSymbolic_MUMPS_ReportIfError(Mat F,Mat A,const MatFactorInfo *info,Mat_MUMPS *mumps) 1438 { 1439 PetscErrorCode ierr; 1440 1441 PetscFunctionBegin; 1442 ierr = MPI_Bcast(mumps->id.infog, 40,MPIU_INT, 0,mumps->omp_comm);CHKERRQ(ierr); /* see MUMPS-5.1.2 manual p82 */ 1443 ierr = MPI_Bcast(mumps->id.rinfog,20,MPIU_REAL,0,mumps->omp_comm);CHKERRQ(ierr); 1444 if (mumps->id.INFOG(1) < 0) { 1445 if (A->erroriffailure) { 1446 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 1447 } else { 1448 if (mumps->id.INFOG(1) == -6) { 1449 ierr = PetscInfo2(F,"matrix is singular in structure, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1450 F->factorerrortype = MAT_FACTOR_STRUCT_ZEROPIVOT; 1451 } else if (mumps->id.INFOG(1) == -5 || mumps->id.INFOG(1) == -7) { 1452 ierr = PetscInfo2(F,"problem of workspace, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1453 F->factorerrortype = MAT_FACTOR_OUTMEMORY; 1454 } else { 1455 ierr = PetscInfo2(F,"Error reported by MUMPS in analysis phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1456 F->factorerrortype = MAT_FACTOR_OTHER; 1457 } 1458 } 1459 } 1460 PetscFunctionReturn(0); 1461 } 1462 1463 /* Note Petsc r(=c) permutation is used when mumps->id.ICNTL(7)==1 with centralized assembled matrix input; otherwise r and c are ignored */ 1464 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 1465 { 1466 Mat_MUMPS *mumps = (Mat_MUMPS*)F->data; 1467 PetscErrorCode ierr; 1468 Vec b; 1469 IS is_iden; 1470 const PetscInt M = A->rmap->N; 1471 1472 PetscFunctionBegin; 1473 mumps->matstruc = DIFFERENT_NONZERO_PATTERN; 1474 1475 /* Set MUMPS options from the options database */ 1476 ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr); 1477 1478 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1479 ierr = MatMumpsGatherNonzerosOnMaster(MAT_INITIAL_MATRIX,mumps);CHKERRQ(ierr); 1480 1481 /* analysis phase */ 1482 /*----------------*/ 1483 mumps->id.job = JOB_FACTSYMBOLIC; 1484 mumps->id.n = M; 1485 switch (mumps->id.ICNTL(18)) { 1486 case 0: /* centralized assembled matrix input */ 1487 if (!mumps->myid) { 1488 mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn; 1489 if (mumps->id.ICNTL(6)>1) { 1490 mumps->id.a = (MumpsScalar*)mumps->val; 1491 } 1492 if (mumps->id.ICNTL(7) == 1) { /* use user-provide matrix ordering - assuming r = c ordering */ 1493 /* 1494 PetscBool flag; 1495 ierr = ISEqual(r,c,&flag);CHKERRQ(ierr); 1496 if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"row_perm != col_perm"); 1497 ierr = ISView(r,PETSC_VIEWER_STDOUT_SELF); 1498 */ 1499 if (!mumps->myid) { 1500 const PetscInt *idx; 1501 PetscInt i,*perm_in; 1502 1503 ierr = PetscMalloc1(M,&perm_in);CHKERRQ(ierr); 1504 ierr = ISGetIndices(r,&idx);CHKERRQ(ierr); 1505 1506 mumps->id.perm_in = perm_in; 1507 for (i=0; i<M; i++) perm_in[i] = idx[i]+1; /* perm_in[]: start from 1, not 0! */ 1508 ierr = ISRestoreIndices(r,&idx);CHKERRQ(ierr); 1509 } 1510 } 1511 } 1512 break; 1513 case 3: /* distributed assembled matrix input (size>1) */ 1514 mumps->id.nz_loc = mumps->nz; 1515 mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn; 1516 if (mumps->id.ICNTL(6)>1) { 1517 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1518 } 1519 /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */ 1520 if (!mumps->myid) { 1521 ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->N,&mumps->b_seq);CHKERRQ(ierr); 1522 ierr = ISCreateStride(PETSC_COMM_SELF,A->rmap->N,0,1,&is_iden);CHKERRQ(ierr); 1523 } else { 1524 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr); 1525 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr); 1526 } 1527 ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr); 1528 ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr); 1529 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 1530 ierr = VecDestroy(&b);CHKERRQ(ierr); 1531 break; 1532 } 1533 PetscMUMPS_c(mumps); 1534 ierr = MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);CHKERRQ(ierr); 1535 1536 F->ops->lufactornumeric = MatFactorNumeric_MUMPS; 1537 F->ops->solve = MatSolve_MUMPS; 1538 F->ops->solvetranspose = MatSolveTranspose_MUMPS; 1539 F->ops->matsolve = MatMatSolve_MUMPS; 1540 F->ops->mattransposesolve = MatMatTransposeSolve_MUMPS; 1541 PetscFunctionReturn(0); 1542 } 1543 1544 /* Note the Petsc r and c permutations are ignored */ 1545 PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 1546 { 1547 Mat_MUMPS *mumps = (Mat_MUMPS*)F->data; 1548 PetscErrorCode ierr; 1549 Vec b; 1550 IS is_iden; 1551 const PetscInt M = A->rmap->N; 1552 1553 PetscFunctionBegin; 1554 mumps->matstruc = DIFFERENT_NONZERO_PATTERN; 1555 1556 /* Set MUMPS options from the options database */ 1557 ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr); 1558 1559 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1560 ierr = MatMumpsGatherNonzerosOnMaster(MAT_INITIAL_MATRIX,mumps);CHKERRQ(ierr); 1561 1562 /* analysis phase */ 1563 /*----------------*/ 1564 mumps->id.job = JOB_FACTSYMBOLIC; 1565 mumps->id.n = M; 1566 switch (mumps->id.ICNTL(18)) { 1567 case 0: /* centralized assembled matrix input */ 1568 if (!mumps->myid) { 1569 mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn; 1570 if (mumps->id.ICNTL(6)>1) { 1571 mumps->id.a = (MumpsScalar*)mumps->val; 1572 } 1573 } 1574 break; 1575 case 3: /* distributed assembled matrix input (size>1) */ 1576 mumps->id.nz_loc = mumps->nz; 1577 mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn; 1578 if (mumps->id.ICNTL(6)>1) { 1579 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1580 } 1581 /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */ 1582 if (!mumps->myid) { 1583 ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr); 1584 ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr); 1585 } else { 1586 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr); 1587 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr); 1588 } 1589 ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr); 1590 ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr); 1591 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 1592 ierr = VecDestroy(&b);CHKERRQ(ierr); 1593 break; 1594 } 1595 PetscMUMPS_c(mumps); 1596 ierr = MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);CHKERRQ(ierr); 1597 1598 F->ops->lufactornumeric = MatFactorNumeric_MUMPS; 1599 F->ops->solve = MatSolve_MUMPS; 1600 F->ops->solvetranspose = MatSolveTranspose_MUMPS; 1601 PetscFunctionReturn(0); 1602 } 1603 1604 /* Note the Petsc r permutation and factor info are ignored */ 1605 PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info) 1606 { 1607 Mat_MUMPS *mumps = (Mat_MUMPS*)F->data; 1608 PetscErrorCode ierr; 1609 Vec b; 1610 IS is_iden; 1611 const PetscInt M = A->rmap->N; 1612 1613 PetscFunctionBegin; 1614 mumps->matstruc = DIFFERENT_NONZERO_PATTERN; 1615 1616 /* Set MUMPS options from the options database */ 1617 ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr); 1618 1619 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1620 ierr = MatMumpsGatherNonzerosOnMaster(MAT_INITIAL_MATRIX,mumps);CHKERRQ(ierr); 1621 1622 /* analysis phase */ 1623 /*----------------*/ 1624 mumps->id.job = JOB_FACTSYMBOLIC; 1625 mumps->id.n = M; 1626 switch (mumps->id.ICNTL(18)) { 1627 case 0: /* centralized assembled matrix input */ 1628 if (!mumps->myid) { 1629 mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn; 1630 if (mumps->id.ICNTL(6)>1) { 1631 mumps->id.a = (MumpsScalar*)mumps->val; 1632 } 1633 } 1634 break; 1635 case 3: /* distributed assembled matrix input (size>1) */ 1636 mumps->id.nz_loc = mumps->nz; 1637 mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn; 1638 if (mumps->id.ICNTL(6)>1) { 1639 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1640 } 1641 /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */ 1642 if (!mumps->myid) { 1643 ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr); 1644 ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr); 1645 } else { 1646 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr); 1647 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr); 1648 } 1649 ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr); 1650 ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr); 1651 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 1652 ierr = VecDestroy(&b);CHKERRQ(ierr); 1653 break; 1654 } 1655 PetscMUMPS_c(mumps); 1656 ierr = MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);CHKERRQ(ierr); 1657 1658 F->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS; 1659 F->ops->solve = MatSolve_MUMPS; 1660 F->ops->solvetranspose = MatSolve_MUMPS; 1661 F->ops->matsolve = MatMatSolve_MUMPS; 1662 #if defined(PETSC_USE_COMPLEX) 1663 F->ops->getinertia = NULL; 1664 #else 1665 F->ops->getinertia = MatGetInertia_SBAIJMUMPS; 1666 #endif 1667 PetscFunctionReturn(0); 1668 } 1669 1670 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer) 1671 { 1672 PetscErrorCode ierr; 1673 PetscBool iascii; 1674 PetscViewerFormat format; 1675 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 1676 1677 PetscFunctionBegin; 1678 /* check if matrix is mumps type */ 1679 if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0); 1680 1681 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1682 if (iascii) { 1683 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1684 if (format == PETSC_VIEWER_ASCII_INFO) { 1685 ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr); 1686 ierr = PetscViewerASCIIPrintf(viewer," SYM (matrix type): %d \n",mumps->id.sym);CHKERRQ(ierr); 1687 ierr = PetscViewerASCIIPrintf(viewer," PAR (host participation): %d \n",mumps->id.par);CHKERRQ(ierr); 1688 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(1) (output for error): %d \n",mumps->id.ICNTL(1));CHKERRQ(ierr); 1689 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(2) (output of diagnostic msg): %d \n",mumps->id.ICNTL(2));CHKERRQ(ierr); 1690 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(3) (output for global info): %d \n",mumps->id.ICNTL(3));CHKERRQ(ierr); 1691 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(4) (level of printing): %d \n",mumps->id.ICNTL(4));CHKERRQ(ierr); 1692 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(5) (input mat struct): %d \n",mumps->id.ICNTL(5));CHKERRQ(ierr); 1693 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(6) (matrix prescaling): %d \n",mumps->id.ICNTL(6));CHKERRQ(ierr); 1694 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(7) (sequential matrix ordering):%d \n",mumps->id.ICNTL(7));CHKERRQ(ierr); 1695 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(8) (scaling strategy): %d \n",mumps->id.ICNTL(8));CHKERRQ(ierr); 1696 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(10) (max num of refinements): %d \n",mumps->id.ICNTL(10));CHKERRQ(ierr); 1697 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(11) (error analysis): %d \n",mumps->id.ICNTL(11));CHKERRQ(ierr); 1698 if (mumps->id.ICNTL(11)>0) { 1699 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(4) (inf norm of input mat): %g\n",mumps->id.RINFOG(4));CHKERRQ(ierr); 1700 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(5) (inf norm of solution): %g\n",mumps->id.RINFOG(5));CHKERRQ(ierr); 1701 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(6) (inf norm of residual): %g\n",mumps->id.RINFOG(6));CHKERRQ(ierr); 1702 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",mumps->id.RINFOG(7),mumps->id.RINFOG(8));CHKERRQ(ierr); 1703 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(9) (error estimate): %g \n",mumps->id.RINFOG(9));CHKERRQ(ierr); 1704 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",mumps->id.RINFOG(10),mumps->id.RINFOG(11));CHKERRQ(ierr); 1705 } 1706 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(12) (efficiency control): %d \n",mumps->id.ICNTL(12));CHKERRQ(ierr); 1707 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(13) (efficiency control): %d \n",mumps->id.ICNTL(13));CHKERRQ(ierr); 1708 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(14) (percentage of estimated workspace increase): %d \n",mumps->id.ICNTL(14));CHKERRQ(ierr); 1709 /* ICNTL(15-17) not used */ 1710 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(18) (input mat struct): %d \n",mumps->id.ICNTL(18));CHKERRQ(ierr); 1711 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(19) (Schur complement info): %d \n",mumps->id.ICNTL(19));CHKERRQ(ierr); 1712 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(20) (rhs sparse pattern): %d \n",mumps->id.ICNTL(20));CHKERRQ(ierr); 1713 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(21) (solution struct): %d \n",mumps->id.ICNTL(21));CHKERRQ(ierr); 1714 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(22) (in-core/out-of-core facility): %d \n",mumps->id.ICNTL(22));CHKERRQ(ierr); 1715 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(23) (max size of memory can be allocated locally):%d \n",mumps->id.ICNTL(23));CHKERRQ(ierr); 1716 1717 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(24) (detection of null pivot rows): %d \n",mumps->id.ICNTL(24));CHKERRQ(ierr); 1718 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(25) (computation of a null space basis): %d \n",mumps->id.ICNTL(25));CHKERRQ(ierr); 1719 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(26) (Schur options for rhs or solution): %d \n",mumps->id.ICNTL(26));CHKERRQ(ierr); 1720 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(27) (experimental parameter): %d \n",mumps->id.ICNTL(27));CHKERRQ(ierr); 1721 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(28) (use parallel or sequential ordering): %d \n",mumps->id.ICNTL(28));CHKERRQ(ierr); 1722 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(29) (parallel ordering): %d \n",mumps->id.ICNTL(29));CHKERRQ(ierr); 1723 1724 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(30) (user-specified set of entries in inv(A)): %d \n",mumps->id.ICNTL(30));CHKERRQ(ierr); 1725 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(31) (factors is discarded in the solve phase): %d \n",mumps->id.ICNTL(31));CHKERRQ(ierr); 1726 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(33) (compute determinant): %d \n",mumps->id.ICNTL(33));CHKERRQ(ierr); 1727 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(35) (activate BLR based factorization): %d \n",mumps->id.ICNTL(35));CHKERRQ(ierr); 1728 1729 ierr = PetscViewerASCIIPrintf(viewer," CNTL(1) (relative pivoting threshold): %g \n",mumps->id.CNTL(1));CHKERRQ(ierr); 1730 ierr = PetscViewerASCIIPrintf(viewer," CNTL(2) (stopping criterion of refinement): %g \n",mumps->id.CNTL(2));CHKERRQ(ierr); 1731 ierr = PetscViewerASCIIPrintf(viewer," CNTL(3) (absolute pivoting threshold): %g \n",mumps->id.CNTL(3));CHKERRQ(ierr); 1732 ierr = PetscViewerASCIIPrintf(viewer," CNTL(4) (value of static pivoting): %g \n",mumps->id.CNTL(4));CHKERRQ(ierr); 1733 ierr = PetscViewerASCIIPrintf(viewer," CNTL(5) (fixation for null pivots): %g \n",mumps->id.CNTL(5));CHKERRQ(ierr); 1734 ierr = PetscViewerASCIIPrintf(viewer," CNTL(7) (dropping parameter for BLR): %g \n",mumps->id.CNTL(7));CHKERRQ(ierr); 1735 1736 /* infomation local to each processor */ 1737 ierr = PetscViewerASCIIPrintf(viewer, " RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr); 1738 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 1739 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(1));CHKERRQ(ierr); 1740 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1741 ierr = PetscViewerASCIIPrintf(viewer, " RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr); 1742 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(2));CHKERRQ(ierr); 1743 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1744 ierr = PetscViewerASCIIPrintf(viewer, " RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr); 1745 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(3));CHKERRQ(ierr); 1746 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1747 1748 ierr = PetscViewerASCIIPrintf(viewer, " INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr); 1749 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(15));CHKERRQ(ierr); 1750 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1751 1752 ierr = PetscViewerASCIIPrintf(viewer, " INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr); 1753 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(16));CHKERRQ(ierr); 1754 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1755 1756 ierr = PetscViewerASCIIPrintf(viewer, " INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr); 1757 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(23));CHKERRQ(ierr); 1758 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1759 1760 if (mumps->ninfo && mumps->ninfo <= 40){ 1761 PetscInt i; 1762 for (i=0; i<mumps->ninfo; i++){ 1763 ierr = PetscViewerASCIIPrintf(viewer, " INFO(%d): \n",mumps->info[i]);CHKERRQ(ierr); 1764 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(mumps->info[i]));CHKERRQ(ierr); 1765 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1766 } 1767 } 1768 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 1769 1770 if (!mumps->myid) { /* information from the host */ 1771 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",mumps->id.RINFOG(1));CHKERRQ(ierr); 1772 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",mumps->id.RINFOG(2));CHKERRQ(ierr); 1773 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",mumps->id.RINFOG(3));CHKERRQ(ierr); 1774 ierr = PetscViewerASCIIPrintf(viewer," (RINFOG(12) RINFOG(13))*2^INFOG(34) (determinant): (%g,%g)*(2^%d)\n",mumps->id.RINFOG(12),mumps->id.RINFOG(13),mumps->id.INFOG(34));CHKERRQ(ierr); 1775 1776 ierr = PetscViewerASCIIPrintf(viewer," INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(3));CHKERRQ(ierr); 1777 ierr = PetscViewerASCIIPrintf(viewer," INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(4));CHKERRQ(ierr); 1778 ierr = PetscViewerASCIIPrintf(viewer," INFOG(5) (estimated maximum front size in the complete tree): %d \n",mumps->id.INFOG(5));CHKERRQ(ierr); 1779 ierr = PetscViewerASCIIPrintf(viewer," INFOG(6) (number of nodes in the complete tree): %d \n",mumps->id.INFOG(6));CHKERRQ(ierr); 1780 ierr = PetscViewerASCIIPrintf(viewer," INFOG(7) (ordering option effectively use after analysis): %d \n",mumps->id.INFOG(7));CHKERRQ(ierr); 1781 ierr = PetscViewerASCIIPrintf(viewer," INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",mumps->id.INFOG(8));CHKERRQ(ierr); 1782 ierr = PetscViewerASCIIPrintf(viewer," INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",mumps->id.INFOG(9));CHKERRQ(ierr); 1783 ierr = PetscViewerASCIIPrintf(viewer," INFOG(10) (total integer space store the matrix factors after factorization): %d \n",mumps->id.INFOG(10));CHKERRQ(ierr); 1784 ierr = PetscViewerASCIIPrintf(viewer," INFOG(11) (order of largest frontal matrix after factorization): %d \n",mumps->id.INFOG(11));CHKERRQ(ierr); 1785 ierr = PetscViewerASCIIPrintf(viewer," INFOG(12) (number of off-diagonal pivots): %d \n",mumps->id.INFOG(12));CHKERRQ(ierr); 1786 ierr = PetscViewerASCIIPrintf(viewer," INFOG(13) (number of delayed pivots after factorization): %d \n",mumps->id.INFOG(13));CHKERRQ(ierr); 1787 ierr = PetscViewerASCIIPrintf(viewer," INFOG(14) (number of memory compress after factorization): %d \n",mumps->id.INFOG(14));CHKERRQ(ierr); 1788 ierr = PetscViewerASCIIPrintf(viewer," INFOG(15) (number of steps of iterative refinement after solution): %d \n",mumps->id.INFOG(15));CHKERRQ(ierr); 1789 ierr = PetscViewerASCIIPrintf(viewer," INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",mumps->id.INFOG(16));CHKERRQ(ierr); 1790 ierr = PetscViewerASCIIPrintf(viewer," INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",mumps->id.INFOG(17));CHKERRQ(ierr); 1791 ierr = PetscViewerASCIIPrintf(viewer," INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",mumps->id.INFOG(18));CHKERRQ(ierr); 1792 ierr = PetscViewerASCIIPrintf(viewer," INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",mumps->id.INFOG(19));CHKERRQ(ierr); 1793 ierr = PetscViewerASCIIPrintf(viewer," INFOG(20) (estimated number of entries in the factors): %d \n",mumps->id.INFOG(20));CHKERRQ(ierr); 1794 ierr = PetscViewerASCIIPrintf(viewer," INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",mumps->id.INFOG(21));CHKERRQ(ierr); 1795 ierr = PetscViewerASCIIPrintf(viewer," INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",mumps->id.INFOG(22));CHKERRQ(ierr); 1796 ierr = PetscViewerASCIIPrintf(viewer," INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",mumps->id.INFOG(23));CHKERRQ(ierr); 1797 ierr = PetscViewerASCIIPrintf(viewer," INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",mumps->id.INFOG(24));CHKERRQ(ierr); 1798 ierr = PetscViewerASCIIPrintf(viewer," INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",mumps->id.INFOG(25));CHKERRQ(ierr); 1799 ierr = PetscViewerASCIIPrintf(viewer," INFOG(28) (after factorization: number of null pivots encountered): %d\n",mumps->id.INFOG(28));CHKERRQ(ierr); 1800 ierr = PetscViewerASCIIPrintf(viewer," INFOG(29) (after factorization: effective number of entries in the factors (sum over all processors)): %d\n",mumps->id.INFOG(29));CHKERRQ(ierr); 1801 ierr = PetscViewerASCIIPrintf(viewer," INFOG(30, 31) (after solution: size in Mbytes of memory used during solution phase): %d, %d\n",mumps->id.INFOG(30),mumps->id.INFOG(31));CHKERRQ(ierr); 1802 ierr = PetscViewerASCIIPrintf(viewer," INFOG(32) (after analysis: type of analysis done): %d\n",mumps->id.INFOG(32));CHKERRQ(ierr); 1803 ierr = PetscViewerASCIIPrintf(viewer," INFOG(33) (value used for ICNTL(8)): %d\n",mumps->id.INFOG(33));CHKERRQ(ierr); 1804 ierr = PetscViewerASCIIPrintf(viewer," INFOG(34) (exponent of the determinant if determinant is requested): %d\n",mumps->id.INFOG(34));CHKERRQ(ierr); 1805 } 1806 } 1807 } 1808 PetscFunctionReturn(0); 1809 } 1810 1811 PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info) 1812 { 1813 Mat_MUMPS *mumps =(Mat_MUMPS*)A->data; 1814 1815 PetscFunctionBegin; 1816 info->block_size = 1.0; 1817 info->nz_allocated = mumps->id.INFOG(20); 1818 info->nz_used = mumps->id.INFOG(20); 1819 info->nz_unneeded = 0.0; 1820 info->assemblies = 0.0; 1821 info->mallocs = 0.0; 1822 info->memory = 0.0; 1823 info->fill_ratio_given = 0; 1824 info->fill_ratio_needed = 0; 1825 info->factor_mallocs = 0; 1826 PetscFunctionReturn(0); 1827 } 1828 1829 /* -------------------------------------------------------------------------------------------*/ 1830 PetscErrorCode MatFactorSetSchurIS_MUMPS(Mat F, IS is) 1831 { 1832 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1833 const PetscInt *idxs; 1834 PetscInt size,i; 1835 PetscErrorCode ierr; 1836 1837 PetscFunctionBegin; 1838 ierr = ISGetLocalSize(is,&size);CHKERRQ(ierr); 1839 if (mumps->petsc_size > 1) { 1840 PetscBool ls,gs; /* gs is false if any rank other than root has non-empty IS */ 1841 1842 ls = mumps->myid ? (size ? PETSC_FALSE : PETSC_TRUE) : PETSC_TRUE; /* always true on root; false on others if their size != 0 */ 1843 ierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,mumps->petsc_comm);CHKERRQ(ierr); 1844 if (!gs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MUMPS distributed parallel Schur complements not yet supported from PETSc\n"); 1845 } 1846 if (mumps->id.size_schur != size) { 1847 ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr); 1848 mumps->id.size_schur = size; 1849 mumps->id.schur_lld = size; 1850 ierr = PetscMalloc2(size,&mumps->id.listvar_schur,size*size,&mumps->id.schur);CHKERRQ(ierr); 1851 } 1852 1853 /* Schur complement matrix */ 1854 ierr = MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.size_schur,(PetscScalar*)mumps->id.schur,&F->schur);CHKERRQ(ierr); 1855 if (mumps->sym == 1) { 1856 ierr = MatSetOption(F->schur,MAT_SPD,PETSC_TRUE);CHKERRQ(ierr); 1857 } 1858 1859 /* MUMPS expects Fortran style indices */ 1860 ierr = ISGetIndices(is,&idxs);CHKERRQ(ierr); 1861 ierr = PetscMemcpy(mumps->id.listvar_schur,idxs,size*sizeof(PetscInt));CHKERRQ(ierr); 1862 for (i=0;i<size;i++) mumps->id.listvar_schur[i]++; 1863 ierr = ISRestoreIndices(is,&idxs);CHKERRQ(ierr); 1864 if (mumps->petsc_size > 1) { 1865 mumps->id.ICNTL(19) = 1; /* MUMPS returns Schur centralized on the host */ 1866 } else { 1867 if (F->factortype == MAT_FACTOR_LU) { 1868 mumps->id.ICNTL(19) = 3; /* MUMPS returns full matrix */ 1869 } else { 1870 mumps->id.ICNTL(19) = 2; /* MUMPS returns lower triangular part */ 1871 } 1872 } 1873 /* set a special value of ICNTL (not handled my MUMPS) to be used in the solve phase by PETSc */ 1874 mumps->id.ICNTL(26) = -1; 1875 PetscFunctionReturn(0); 1876 } 1877 1878 /* -------------------------------------------------------------------------------------------*/ 1879 PetscErrorCode MatFactorCreateSchurComplement_MUMPS(Mat F,Mat* S) 1880 { 1881 Mat St; 1882 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1883 PetscScalar *array; 1884 #if defined(PETSC_USE_COMPLEX) 1885 PetscScalar im = PetscSqrtScalar((PetscScalar)-1.0); 1886 #endif 1887 PetscErrorCode ierr; 1888 1889 PetscFunctionBegin; 1890 if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatFactorSetSchurIS to enable it"); 1891 ierr = MatCreate(PETSC_COMM_SELF,&St);CHKERRQ(ierr); 1892 ierr = MatSetSizes(St,PETSC_DECIDE,PETSC_DECIDE,mumps->id.size_schur,mumps->id.size_schur);CHKERRQ(ierr); 1893 ierr = MatSetType(St,MATDENSE);CHKERRQ(ierr); 1894 ierr = MatSetUp(St);CHKERRQ(ierr); 1895 ierr = MatDenseGetArray(St,&array);CHKERRQ(ierr); 1896 if (!mumps->sym) { /* MUMPS always return a full matrix */ 1897 if (mumps->id.ICNTL(19) == 1) { /* stored by rows */ 1898 PetscInt i,j,N=mumps->id.size_schur; 1899 for (i=0;i<N;i++) { 1900 for (j=0;j<N;j++) { 1901 #if !defined(PETSC_USE_COMPLEX) 1902 PetscScalar val = mumps->id.schur[i*N+j]; 1903 #else 1904 PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i; 1905 #endif 1906 array[j*N+i] = val; 1907 } 1908 } 1909 } else { /* stored by columns */ 1910 ierr = PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));CHKERRQ(ierr); 1911 } 1912 } else { /* either full or lower-triangular (not packed) */ 1913 if (mumps->id.ICNTL(19) == 2) { /* lower triangular stored by columns */ 1914 PetscInt i,j,N=mumps->id.size_schur; 1915 for (i=0;i<N;i++) { 1916 for (j=i;j<N;j++) { 1917 #if !defined(PETSC_USE_COMPLEX) 1918 PetscScalar val = mumps->id.schur[i*N+j]; 1919 #else 1920 PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i; 1921 #endif 1922 array[i*N+j] = val; 1923 array[j*N+i] = val; 1924 } 1925 } 1926 } else if (mumps->id.ICNTL(19) == 3) { /* full matrix */ 1927 ierr = PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));CHKERRQ(ierr); 1928 } else { /* ICNTL(19) == 1 lower triangular stored by rows */ 1929 PetscInt i,j,N=mumps->id.size_schur; 1930 for (i=0;i<N;i++) { 1931 for (j=0;j<i+1;j++) { 1932 #if !defined(PETSC_USE_COMPLEX) 1933 PetscScalar val = mumps->id.schur[i*N+j]; 1934 #else 1935 PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i; 1936 #endif 1937 array[i*N+j] = val; 1938 array[j*N+i] = val; 1939 } 1940 } 1941 } 1942 } 1943 ierr = MatDenseRestoreArray(St,&array);CHKERRQ(ierr); 1944 *S = St; 1945 PetscFunctionReturn(0); 1946 } 1947 1948 /* -------------------------------------------------------------------------------------------*/ 1949 PetscErrorCode MatMumpsSetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt ival) 1950 { 1951 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1952 1953 PetscFunctionBegin; 1954 mumps->id.ICNTL(icntl) = ival; 1955 PetscFunctionReturn(0); 1956 } 1957 1958 PetscErrorCode MatMumpsGetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt *ival) 1959 { 1960 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1961 1962 PetscFunctionBegin; 1963 *ival = mumps->id.ICNTL(icntl); 1964 PetscFunctionReturn(0); 1965 } 1966 1967 /*@ 1968 MatMumpsSetIcntl - Set MUMPS parameter ICNTL() 1969 1970 Logically Collective on Mat 1971 1972 Input Parameters: 1973 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 1974 . icntl - index of MUMPS parameter array ICNTL() 1975 - ival - value of MUMPS ICNTL(icntl) 1976 1977 Options Database: 1978 . -mat_mumps_icntl_<icntl> <ival> 1979 1980 Level: beginner 1981 1982 References: 1983 . MUMPS Users' Guide 1984 1985 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 1986 @*/ 1987 PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt icntl,PetscInt ival) 1988 { 1989 PetscErrorCode ierr; 1990 1991 PetscFunctionBegin; 1992 PetscValidType(F,1); 1993 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 1994 PetscValidLogicalCollectiveInt(F,icntl,2); 1995 PetscValidLogicalCollectiveInt(F,ival,3); 1996 ierr = PetscTryMethod(F,"MatMumpsSetIcntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));CHKERRQ(ierr); 1997 PetscFunctionReturn(0); 1998 } 1999 2000 /*@ 2001 MatMumpsGetIcntl - Get MUMPS parameter ICNTL() 2002 2003 Logically Collective on Mat 2004 2005 Input Parameters: 2006 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2007 - icntl - index of MUMPS parameter array ICNTL() 2008 2009 Output Parameter: 2010 . ival - value of MUMPS ICNTL(icntl) 2011 2012 Level: beginner 2013 2014 References: 2015 . MUMPS Users' Guide 2016 2017 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2018 @*/ 2019 PetscErrorCode MatMumpsGetIcntl(Mat F,PetscInt icntl,PetscInt *ival) 2020 { 2021 PetscErrorCode ierr; 2022 2023 PetscFunctionBegin; 2024 PetscValidType(F,1); 2025 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2026 PetscValidLogicalCollectiveInt(F,icntl,2); 2027 PetscValidIntPointer(ival,3); 2028 ierr = PetscUseMethod(F,"MatMumpsGetIcntl_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr); 2029 PetscFunctionReturn(0); 2030 } 2031 2032 /* -------------------------------------------------------------------------------------------*/ 2033 PetscErrorCode MatMumpsSetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal val) 2034 { 2035 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2036 2037 PetscFunctionBegin; 2038 mumps->id.CNTL(icntl) = val; 2039 PetscFunctionReturn(0); 2040 } 2041 2042 PetscErrorCode MatMumpsGetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal *val) 2043 { 2044 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2045 2046 PetscFunctionBegin; 2047 *val = mumps->id.CNTL(icntl); 2048 PetscFunctionReturn(0); 2049 } 2050 2051 /*@ 2052 MatMumpsSetCntl - Set MUMPS parameter CNTL() 2053 2054 Logically Collective on Mat 2055 2056 Input Parameters: 2057 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2058 . icntl - index of MUMPS parameter array CNTL() 2059 - val - value of MUMPS CNTL(icntl) 2060 2061 Options Database: 2062 . -mat_mumps_cntl_<icntl> <val> 2063 2064 Level: beginner 2065 2066 References: 2067 . MUMPS Users' Guide 2068 2069 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2070 @*/ 2071 PetscErrorCode MatMumpsSetCntl(Mat F,PetscInt icntl,PetscReal val) 2072 { 2073 PetscErrorCode ierr; 2074 2075 PetscFunctionBegin; 2076 PetscValidType(F,1); 2077 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2078 PetscValidLogicalCollectiveInt(F,icntl,2); 2079 PetscValidLogicalCollectiveReal(F,val,3); 2080 ierr = PetscTryMethod(F,"MatMumpsSetCntl_C",(Mat,PetscInt,PetscReal),(F,icntl,val));CHKERRQ(ierr); 2081 PetscFunctionReturn(0); 2082 } 2083 2084 /*@ 2085 MatMumpsGetCntl - Get MUMPS parameter CNTL() 2086 2087 Logically Collective on Mat 2088 2089 Input Parameters: 2090 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2091 - icntl - index of MUMPS parameter array CNTL() 2092 2093 Output Parameter: 2094 . val - value of MUMPS CNTL(icntl) 2095 2096 Level: beginner 2097 2098 References: 2099 . MUMPS Users' Guide 2100 2101 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2102 @*/ 2103 PetscErrorCode MatMumpsGetCntl(Mat F,PetscInt icntl,PetscReal *val) 2104 { 2105 PetscErrorCode ierr; 2106 2107 PetscFunctionBegin; 2108 PetscValidType(F,1); 2109 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2110 PetscValidLogicalCollectiveInt(F,icntl,2); 2111 PetscValidRealPointer(val,3); 2112 ierr = PetscUseMethod(F,"MatMumpsGetCntl_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr); 2113 PetscFunctionReturn(0); 2114 } 2115 2116 PetscErrorCode MatMumpsGetInfo_MUMPS(Mat F,PetscInt icntl,PetscInt *info) 2117 { 2118 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2119 2120 PetscFunctionBegin; 2121 *info = mumps->id.INFO(icntl); 2122 PetscFunctionReturn(0); 2123 } 2124 2125 PetscErrorCode MatMumpsGetInfog_MUMPS(Mat F,PetscInt icntl,PetscInt *infog) 2126 { 2127 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2128 2129 PetscFunctionBegin; 2130 *infog = mumps->id.INFOG(icntl); 2131 PetscFunctionReturn(0); 2132 } 2133 2134 PetscErrorCode MatMumpsGetRinfo_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfo) 2135 { 2136 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2137 2138 PetscFunctionBegin; 2139 *rinfo = mumps->id.RINFO(icntl); 2140 PetscFunctionReturn(0); 2141 } 2142 2143 PetscErrorCode MatMumpsGetRinfog_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfog) 2144 { 2145 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2146 2147 PetscFunctionBegin; 2148 *rinfog = mumps->id.RINFOG(icntl); 2149 PetscFunctionReturn(0); 2150 } 2151 2152 PetscErrorCode MatMumpsGetInverse_MUMPS(Mat F,Mat spRHS) 2153 { 2154 PetscErrorCode ierr; 2155 Mat Bt = NULL,Btseq = NULL; 2156 PetscBool flg; 2157 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2158 PetscScalar *aa; 2159 PetscInt spnr,*ia,*ja; 2160 2161 PetscFunctionBegin; 2162 PetscValidIntPointer(spRHS,2); 2163 ierr = PetscObjectTypeCompare((PetscObject)spRHS,MATTRANSPOSEMAT,&flg);CHKERRQ(ierr); 2164 if (flg) { 2165 ierr = MatTransposeGetMat(spRHS,&Bt);CHKERRQ(ierr); 2166 } else SETERRQ(PetscObjectComm((PetscObject)spRHS),PETSC_ERR_ARG_WRONG,"Matrix spRHS must be type MATTRANSPOSEMAT matrix"); 2167 2168 ierr = MatMumpsSetIcntl(F,30,1);CHKERRQ(ierr); 2169 2170 if (mumps->petsc_size > 1) { 2171 Mat_MPIAIJ *b = (Mat_MPIAIJ*)Bt->data; 2172 Btseq = b->A; 2173 } else { 2174 Btseq = Bt; 2175 } 2176 2177 if (!mumps->myid) { 2178 ierr = MatSeqAIJGetArray(Btseq,&aa);CHKERRQ(ierr); 2179 ierr = MatGetRowIJ(Btseq,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&flg);CHKERRQ(ierr); 2180 if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Cannot get IJ structure"); 2181 2182 mumps->id.irhs_ptr = ia; 2183 mumps->id.irhs_sparse = ja; 2184 mumps->id.nz_rhs = ia[spnr] - 1; 2185 mumps->id.rhs_sparse = (MumpsScalar*)aa; 2186 } else { 2187 mumps->id.irhs_ptr = NULL; 2188 mumps->id.irhs_sparse = NULL; 2189 mumps->id.nz_rhs = 0; 2190 mumps->id.rhs_sparse = NULL; 2191 } 2192 mumps->id.ICNTL(20) = 1; /* rhs is sparse */ 2193 mumps->id.ICNTL(21) = 0; /* solution is in assembled centralized format */ 2194 2195 /* solve phase */ 2196 /*-------------*/ 2197 mumps->id.job = JOB_SOLVE; 2198 PetscMUMPS_c(mumps); 2199 if (mumps->id.INFOG(1) < 0) 2200 SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2)); 2201 2202 if (!mumps->myid) { 2203 ierr = MatSeqAIJRestoreArray(Btseq,&aa);CHKERRQ(ierr); 2204 ierr = MatRestoreRowIJ(Btseq,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&flg);CHKERRQ(ierr); 2205 if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Cannot get IJ structure"); 2206 } 2207 PetscFunctionReturn(0); 2208 } 2209 2210 /*@ 2211 MatMumpsGetInverse - Get user-specified set of entries in inverse of A 2212 2213 Logically Collective on Mat 2214 2215 Input Parameters: 2216 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2217 - spRHS - sequential sparse matrix in MATTRANSPOSEMAT format holding specified indices in processor[0] 2218 2219 Output Parameter: 2220 . spRHS - requested entries of inverse of A 2221 2222 Level: beginner 2223 2224 References: 2225 . MUMPS Users' Guide 2226 2227 .seealso: MatGetFactor(), MatCreateTranspose() 2228 @*/ 2229 PetscErrorCode MatMumpsGetInverse(Mat F,Mat spRHS) 2230 { 2231 PetscErrorCode ierr; 2232 2233 PetscFunctionBegin; 2234 PetscValidType(F,1); 2235 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2236 ierr = PetscUseMethod(F,"MatMumpsGetInverse_C",(Mat,Mat),(F,spRHS));CHKERRQ(ierr); 2237 PetscFunctionReturn(0); 2238 } 2239 2240 PetscErrorCode MatMumpsGetInverseTranspose_MUMPS(Mat F,Mat spRHST) 2241 { 2242 PetscErrorCode ierr; 2243 Mat spRHS; 2244 2245 PetscFunctionBegin; 2246 ierr = MatCreateTranspose(spRHST,&spRHS);CHKERRQ(ierr); 2247 ierr = MatMumpsGetInverse_MUMPS(F,spRHS);CHKERRQ(ierr); 2248 ierr = MatDestroy(&spRHS);CHKERRQ(ierr); 2249 PetscFunctionReturn(0); 2250 } 2251 2252 /*@ 2253 MatMumpsGetInverseTranspose - Get user-specified set of entries in inverse of matrix A^T 2254 2255 Logically Collective on Mat 2256 2257 Input Parameters: 2258 + F - the factored matrix of A obtained by calling MatGetFactor() from PETSc-MUMPS interface 2259 - spRHST - sequential sparse matrix in MATAIJ format holding specified indices of A^T in processor[0] 2260 2261 Output Parameter: 2262 . spRHST - requested entries of inverse of A^T 2263 2264 Level: beginner 2265 2266 References: 2267 . MUMPS Users' Guide 2268 2269 .seealso: MatGetFactor(), MatCreateTranspose(), MatMumpsGetInverse() 2270 @*/ 2271 PetscErrorCode MatMumpsGetInverseTranspose(Mat F,Mat spRHST) 2272 { 2273 PetscErrorCode ierr; 2274 PetscBool flg; 2275 2276 PetscFunctionBegin; 2277 PetscValidType(F,1); 2278 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2279 ierr = PetscObjectTypeCompareAny((PetscObject)spRHST,&flg,MATSEQAIJ,MATMPIAIJ,NULL);CHKERRQ(ierr); 2280 if (!flg) SETERRQ(PetscObjectComm((PetscObject)spRHST),PETSC_ERR_ARG_WRONG,"Matrix spRHST must be MATAIJ matrix"); 2281 2282 ierr = PetscUseMethod(F,"MatMumpsGetInverseTranspose_C",(Mat,Mat),(F,spRHST));CHKERRQ(ierr); 2283 PetscFunctionReturn(0); 2284 } 2285 2286 /*@ 2287 MatMumpsGetInfo - Get MUMPS parameter INFO() 2288 2289 Logically Collective on Mat 2290 2291 Input Parameters: 2292 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2293 - icntl - index of MUMPS parameter array INFO() 2294 2295 Output Parameter: 2296 . ival - value of MUMPS INFO(icntl) 2297 2298 Level: beginner 2299 2300 References: 2301 . MUMPS Users' Guide 2302 2303 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2304 @*/ 2305 PetscErrorCode MatMumpsGetInfo(Mat F,PetscInt icntl,PetscInt *ival) 2306 { 2307 PetscErrorCode ierr; 2308 2309 PetscFunctionBegin; 2310 PetscValidType(F,1); 2311 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2312 PetscValidIntPointer(ival,3); 2313 ierr = PetscUseMethod(F,"MatMumpsGetInfo_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr); 2314 PetscFunctionReturn(0); 2315 } 2316 2317 /*@ 2318 MatMumpsGetInfog - Get MUMPS parameter INFOG() 2319 2320 Logically Collective on Mat 2321 2322 Input Parameters: 2323 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2324 - icntl - index of MUMPS parameter array INFOG() 2325 2326 Output Parameter: 2327 . ival - value of MUMPS INFOG(icntl) 2328 2329 Level: beginner 2330 2331 References: 2332 . MUMPS Users' Guide 2333 2334 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2335 @*/ 2336 PetscErrorCode MatMumpsGetInfog(Mat F,PetscInt icntl,PetscInt *ival) 2337 { 2338 PetscErrorCode ierr; 2339 2340 PetscFunctionBegin; 2341 PetscValidType(F,1); 2342 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2343 PetscValidIntPointer(ival,3); 2344 ierr = PetscUseMethod(F,"MatMumpsGetInfog_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr); 2345 PetscFunctionReturn(0); 2346 } 2347 2348 /*@ 2349 MatMumpsGetRinfo - Get MUMPS parameter RINFO() 2350 2351 Logically Collective on Mat 2352 2353 Input Parameters: 2354 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2355 - icntl - index of MUMPS parameter array RINFO() 2356 2357 Output Parameter: 2358 . val - value of MUMPS RINFO(icntl) 2359 2360 Level: beginner 2361 2362 References: 2363 . MUMPS Users' Guide 2364 2365 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2366 @*/ 2367 PetscErrorCode MatMumpsGetRinfo(Mat F,PetscInt icntl,PetscReal *val) 2368 { 2369 PetscErrorCode ierr; 2370 2371 PetscFunctionBegin; 2372 PetscValidType(F,1); 2373 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2374 PetscValidRealPointer(val,3); 2375 ierr = PetscUseMethod(F,"MatMumpsGetRinfo_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr); 2376 PetscFunctionReturn(0); 2377 } 2378 2379 /*@ 2380 MatMumpsGetRinfog - Get MUMPS parameter RINFOG() 2381 2382 Logically Collective on Mat 2383 2384 Input Parameters: 2385 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2386 - icntl - index of MUMPS parameter array RINFOG() 2387 2388 Output Parameter: 2389 . val - value of MUMPS RINFOG(icntl) 2390 2391 Level: beginner 2392 2393 References: 2394 . MUMPS Users' Guide 2395 2396 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2397 @*/ 2398 PetscErrorCode MatMumpsGetRinfog(Mat F,PetscInt icntl,PetscReal *val) 2399 { 2400 PetscErrorCode ierr; 2401 2402 PetscFunctionBegin; 2403 PetscValidType(F,1); 2404 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2405 PetscValidRealPointer(val,3); 2406 ierr = PetscUseMethod(F,"MatMumpsGetRinfog_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr); 2407 PetscFunctionReturn(0); 2408 } 2409 2410 /*MC 2411 MATSOLVERMUMPS - A matrix type providing direct solvers (LU and Cholesky) for 2412 distributed and sequential matrices via the external package MUMPS. 2413 2414 Works with MATAIJ and MATSBAIJ matrices 2415 2416 Use ./configure --download-mumps --download-scalapack --download-parmetis --download-metis --download-ptscotch to have PETSc installed with MUMPS 2417 2418 Use -pc_type cholesky or lu -pc_factor_mat_solver_type mumps to use this direct solver 2419 2420 Options Database Keys: 2421 + -mat_mumps_icntl_1 - ICNTL(1): output stream for error messages 2422 . -mat_mumps_icntl_2 - ICNTL(2): output stream for diagnostic printing, statistics, and warning 2423 . -mat_mumps_icntl_3 - ICNTL(3): output stream for global information, collected on the host 2424 . -mat_mumps_icntl_4 - ICNTL(4): level of printing (0 to 4) 2425 . -mat_mumps_icntl_6 - ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7) 2426 . -mat_mumps_icntl_7 - ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis 2427 . -mat_mumps_icntl_8 - ICNTL(8): scaling strategy (-2 to 8 or 77) 2428 . -mat_mumps_icntl_10 - ICNTL(10): max num of refinements 2429 . -mat_mumps_icntl_11 - ICNTL(11): statistics related to an error analysis (via -ksp_view) 2430 . -mat_mumps_icntl_12 - ICNTL(12): an ordering strategy for symmetric matrices (0 to 3) 2431 . -mat_mumps_icntl_13 - ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting 2432 . -mat_mumps_icntl_14 - ICNTL(14): percentage increase in the estimated working space 2433 . -mat_mumps_icntl_19 - ICNTL(19): computes the Schur complement 2434 . -mat_mumps_icntl_22 - ICNTL(22): in-core/out-of-core factorization and solve (0 or 1) 2435 . -mat_mumps_icntl_23 - ICNTL(23): max size of the working memory (MB) that can allocate per processor 2436 . -mat_mumps_icntl_24 - ICNTL(24): detection of null pivot rows (0 or 1) 2437 . -mat_mumps_icntl_25 - ICNTL(25): compute a solution of a deficient matrix and a null space basis 2438 . -mat_mumps_icntl_26 - ICNTL(26): drives the solution phase if a Schur complement matrix 2439 . -mat_mumps_icntl_28 - ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering 2440 . -mat_mumps_icntl_29 - ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis 2441 . -mat_mumps_icntl_30 - ICNTL(30): compute user-specified set of entries in inv(A) 2442 . -mat_mumps_icntl_31 - ICNTL(31): indicates which factors may be discarded during factorization 2443 . -mat_mumps_icntl_33 - ICNTL(33): compute determinant 2444 . -mat_mumps_cntl_1 - CNTL(1): relative pivoting threshold 2445 . -mat_mumps_cntl_2 - CNTL(2): stopping criterion of refinement 2446 . -mat_mumps_cntl_3 - CNTL(3): absolute pivoting threshold 2447 . -mat_mumps_cntl_4 - CNTL(4): value for static pivoting 2448 - -mat_mumps_cntl_5 - CNTL(5): fixation for null pivots 2449 2450 Level: beginner 2451 2452 Notes: 2453 When a MUMPS factorization fails inside a KSP solve, for example with a KSP_DIVERGED_PCSETUP_FAILED, one can find the MUMPS information about the failure by calling 2454 $ KSPGetPC(ksp,&pc); 2455 $ PCFactorGetMatrix(pc,&mat); 2456 $ MatMumpsGetInfo(mat,....); 2457 $ MatMumpsGetInfog(mat,....); etc. 2458 Or you can run with -ksp_error_if_not_converged and the program will be stopped and the information printed in the error message. 2459 2460 .seealso: PCFactorSetMatSolverType(), MatSolverType, MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog(), KSPGetPC(), PCGetFactor(), PCFactorGetMatrix() 2461 2462 M*/ 2463 2464 static PetscErrorCode MatFactorGetSolverType_mumps(Mat A,MatSolverType *type) 2465 { 2466 PetscFunctionBegin; 2467 *type = MATSOLVERMUMPS; 2468 PetscFunctionReturn(0); 2469 } 2470 2471 /* MatGetFactor for Seq and MPI AIJ matrices */ 2472 static PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F) 2473 { 2474 Mat B; 2475 PetscErrorCode ierr; 2476 Mat_MUMPS *mumps; 2477 PetscBool isSeqAIJ; 2478 2479 PetscFunctionBegin; 2480 /* Create the factorization matrix */ 2481 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr); 2482 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 2483 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 2484 ierr = PetscStrallocpy("mumps",&((PetscObject)B)->type_name);CHKERRQ(ierr); 2485 ierr = MatSetUp(B);CHKERRQ(ierr); 2486 2487 ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr); 2488 2489 B->ops->view = MatView_MUMPS; 2490 B->ops->getinfo = MatGetInfo_MUMPS; 2491 2492 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_mumps);CHKERRQ(ierr); 2493 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);CHKERRQ(ierr); 2494 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);CHKERRQ(ierr); 2495 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr); 2496 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr); 2497 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr); 2498 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr); 2499 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr); 2500 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr); 2501 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr); 2502 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr); 2503 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverse_C",MatMumpsGetInverse_MUMPS);CHKERRQ(ierr); 2504 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverseTranspose_C",MatMumpsGetInverseTranspose_MUMPS);CHKERRQ(ierr); 2505 2506 if (ftype == MAT_FACTOR_LU) { 2507 B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS; 2508 B->factortype = MAT_FACTOR_LU; 2509 if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij; 2510 else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij; 2511 mumps->sym = 0; 2512 } else { 2513 B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS; 2514 B->factortype = MAT_FACTOR_CHOLESKY; 2515 if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij; 2516 else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij; 2517 #if defined(PETSC_USE_COMPLEX) 2518 mumps->sym = 2; 2519 #else 2520 if (A->spd_set && A->spd) mumps->sym = 1; 2521 else mumps->sym = 2; 2522 #endif 2523 } 2524 2525 /* set solvertype */ 2526 ierr = PetscFree(B->solvertype);CHKERRQ(ierr); 2527 ierr = PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);CHKERRQ(ierr); 2528 2529 B->ops->destroy = MatDestroy_MUMPS; 2530 B->data = (void*)mumps; 2531 2532 ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr); 2533 2534 *F = B; 2535 PetscFunctionReturn(0); 2536 } 2537 2538 /* MatGetFactor for Seq and MPI SBAIJ matrices */ 2539 static PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F) 2540 { 2541 Mat B; 2542 PetscErrorCode ierr; 2543 Mat_MUMPS *mumps; 2544 PetscBool isSeqSBAIJ; 2545 2546 PetscFunctionBegin; 2547 if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix"); 2548 if (A->rmap->bs > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead"); 2549 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr); 2550 /* Create the factorization matrix */ 2551 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 2552 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 2553 ierr = PetscStrallocpy("mumps",&((PetscObject)B)->type_name);CHKERRQ(ierr); 2554 ierr = MatSetUp(B);CHKERRQ(ierr); 2555 2556 ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr); 2557 if (isSeqSBAIJ) { 2558 mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij; 2559 } else { 2560 mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij; 2561 } 2562 2563 B->ops->getinfo = MatGetInfo_External; 2564 B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS; 2565 B->ops->view = MatView_MUMPS; 2566 2567 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_mumps);CHKERRQ(ierr); 2568 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);CHKERRQ(ierr); 2569 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);CHKERRQ(ierr); 2570 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr); 2571 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr); 2572 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr); 2573 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr); 2574 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr); 2575 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr); 2576 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr); 2577 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr); 2578 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverse_C",MatMumpsGetInverse_MUMPS);CHKERRQ(ierr); 2579 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverseTranspose_C",MatMumpsGetInverseTranspose_MUMPS);CHKERRQ(ierr); 2580 2581 B->factortype = MAT_FACTOR_CHOLESKY; 2582 #if defined(PETSC_USE_COMPLEX) 2583 mumps->sym = 2; 2584 #else 2585 if (A->spd_set && A->spd) mumps->sym = 1; 2586 else mumps->sym = 2; 2587 #endif 2588 2589 /* set solvertype */ 2590 ierr = PetscFree(B->solvertype);CHKERRQ(ierr); 2591 ierr = PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);CHKERRQ(ierr); 2592 2593 B->ops->destroy = MatDestroy_MUMPS; 2594 B->data = (void*)mumps; 2595 2596 ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr); 2597 2598 *F = B; 2599 PetscFunctionReturn(0); 2600 } 2601 2602 static PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F) 2603 { 2604 Mat B; 2605 PetscErrorCode ierr; 2606 Mat_MUMPS *mumps; 2607 PetscBool isSeqBAIJ; 2608 2609 PetscFunctionBegin; 2610 /* Create the factorization matrix */ 2611 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);CHKERRQ(ierr); 2612 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 2613 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 2614 ierr = PetscStrallocpy("mumps",&((PetscObject)B)->type_name);CHKERRQ(ierr); 2615 ierr = MatSetUp(B);CHKERRQ(ierr); 2616 2617 ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr); 2618 if (ftype == MAT_FACTOR_LU) { 2619 B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS; 2620 B->factortype = MAT_FACTOR_LU; 2621 if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij; 2622 else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij; 2623 mumps->sym = 0; 2624 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n"); 2625 2626 B->ops->getinfo = MatGetInfo_External; 2627 B->ops->view = MatView_MUMPS; 2628 2629 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_mumps);CHKERRQ(ierr); 2630 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);CHKERRQ(ierr); 2631 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);CHKERRQ(ierr); 2632 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr); 2633 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr); 2634 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr); 2635 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr); 2636 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr); 2637 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr); 2638 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr); 2639 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr); 2640 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverse_C",MatMumpsGetInverse_MUMPS);CHKERRQ(ierr); 2641 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInverseTranspose_C",MatMumpsGetInverseTranspose_MUMPS);CHKERRQ(ierr); 2642 2643 /* set solvertype */ 2644 ierr = PetscFree(B->solvertype);CHKERRQ(ierr); 2645 ierr = PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);CHKERRQ(ierr); 2646 2647 B->ops->destroy = MatDestroy_MUMPS; 2648 B->data = (void*)mumps; 2649 2650 ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr); 2651 2652 *F = B; 2653 PetscFunctionReturn(0); 2654 } 2655 2656 /* MatGetFactor for Seq and MPI SELL matrices */ 2657 static PetscErrorCode MatGetFactor_sell_mumps(Mat A,MatFactorType ftype,Mat *F) 2658 { 2659 Mat B; 2660 PetscErrorCode ierr; 2661 Mat_MUMPS *mumps; 2662 PetscBool isSeqSELL; 2663 2664 PetscFunctionBegin; 2665 /* Create the factorization matrix */ 2666 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSELL,&isSeqSELL);CHKERRQ(ierr); 2667 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 2668 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 2669 ierr = PetscStrallocpy("mumps",&((PetscObject)B)->type_name);CHKERRQ(ierr); 2670 ierr = MatSetUp(B);CHKERRQ(ierr); 2671 2672 ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr); 2673 2674 B->ops->view = MatView_MUMPS; 2675 B->ops->getinfo = MatGetInfo_MUMPS; 2676 2677 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_mumps);CHKERRQ(ierr); 2678 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);CHKERRQ(ierr); 2679 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);CHKERRQ(ierr); 2680 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr); 2681 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr); 2682 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr); 2683 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr); 2684 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr); 2685 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr); 2686 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr); 2687 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr); 2688 2689 if (ftype == MAT_FACTOR_LU) { 2690 B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS; 2691 B->factortype = MAT_FACTOR_LU; 2692 if (isSeqSELL) mumps->ConvertToTriples = MatConvertToTriples_seqsell_seqaij; 2693 else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented"); 2694 mumps->sym = 0; 2695 } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented"); 2696 2697 /* set solvertype */ 2698 ierr = PetscFree(B->solvertype);CHKERRQ(ierr); 2699 ierr = PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);CHKERRQ(ierr); 2700 2701 B->ops->destroy = MatDestroy_MUMPS; 2702 B->data = (void*)mumps; 2703 2704 ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr); 2705 2706 *F = B; 2707 PetscFunctionReturn(0); 2708 } 2709 2710 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MUMPS(void) 2711 { 2712 PetscErrorCode ierr; 2713 2714 PetscFunctionBegin; 2715 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr); 2716 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr); 2717 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr); 2718 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr); 2719 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATMPISBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr); 2720 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr); 2721 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr); 2722 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr); 2723 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr); 2724 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQSBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr); 2725 ierr = MatSolverTypeRegister(MATSOLVERMUMPS,MATSEQSELL,MAT_FACTOR_LU,MatGetFactor_sell_mumps);CHKERRQ(ierr); 2726 PetscFunctionReturn(0); 2727 } 2728 2729 #undef PETSC_HAVE_OPENMP_SUPPORT 2730 2731