1 2 /* 3 Provides an interface to the MUMPS sparse solver 4 */ 5 6 #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I "petscmat.h" I*/ 7 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h> 8 #include <petscblaslapack.h> 9 10 EXTERN_C_BEGIN 11 #if defined(PETSC_USE_COMPLEX) 12 #if defined(PETSC_USE_REAL_SINGLE) 13 #include <cmumps_c.h> 14 #else 15 #include <zmumps_c.h> 16 #endif 17 #else 18 #if defined(PETSC_USE_REAL_SINGLE) 19 #include <smumps_c.h> 20 #else 21 #include <dmumps_c.h> 22 #endif 23 #endif 24 EXTERN_C_END 25 #define JOB_INIT -1 26 #define JOB_FACTSYMBOLIC 1 27 #define JOB_FACTNUMERIC 2 28 #define JOB_SOLVE 3 29 #define JOB_END -2 30 31 /* calls to MUMPS */ 32 #if defined(PETSC_USE_COMPLEX) 33 #if defined(PETSC_USE_REAL_SINGLE) 34 #define PetscMUMPS_c cmumps_c 35 #else 36 #define PetscMUMPS_c zmumps_c 37 #endif 38 #else 39 #if defined(PETSC_USE_REAL_SINGLE) 40 #define PetscMUMPS_c smumps_c 41 #else 42 #define PetscMUMPS_c dmumps_c 43 #endif 44 #endif 45 46 /* declare MumpsScalar */ 47 #if defined(PETSC_USE_COMPLEX) 48 #if defined(PETSC_USE_REAL_SINGLE) 49 #define MumpsScalar mumps_complex 50 #else 51 #define MumpsScalar mumps_double_complex 52 #endif 53 #else 54 #define MumpsScalar PetscScalar 55 #endif 56 57 /* macros s.t. indices match MUMPS documentation */ 58 #define ICNTL(I) icntl[(I)-1] 59 #define CNTL(I) cntl[(I)-1] 60 #define INFOG(I) infog[(I)-1] 61 #define INFO(I) info[(I)-1] 62 #define RINFOG(I) rinfog[(I)-1] 63 #define RINFO(I) rinfo[(I)-1] 64 65 typedef struct { 66 #if defined(PETSC_USE_COMPLEX) 67 #if defined(PETSC_USE_REAL_SINGLE) 68 CMUMPS_STRUC_C id; 69 #else 70 ZMUMPS_STRUC_C id; 71 #endif 72 #else 73 #if defined(PETSC_USE_REAL_SINGLE) 74 SMUMPS_STRUC_C id; 75 #else 76 DMUMPS_STRUC_C id; 77 #endif 78 #endif 79 80 MatStructure matstruc; 81 PetscMPIInt myid,size; 82 PetscInt *irn,*jcn,nz,sym; 83 PetscScalar *val; 84 MPI_Comm comm_mumps; 85 PetscBool isAIJ; 86 PetscInt ICNTL9_pre; /* check if ICNTL(9) is changed from previous MatSolve */ 87 VecScatter scat_rhs, scat_sol; /* used by MatSolve() */ 88 Vec b_seq,x_seq; 89 PetscInt ninfo,*info; /* display INFO */ 90 PetscInt sizeredrhs; 91 PetscInt *schur_pivots; 92 PetscInt schur_B_lwork; 93 PetscScalar *schur_work; 94 PetscScalar *schur_sol; 95 PetscInt schur_sizesol; 96 PetscBool schur_factored; 97 PetscBool schur_inverted; 98 PetscInt schur_sym; 99 100 PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**); 101 } Mat_MUMPS; 102 103 extern PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*); 104 105 #undef __FUNCT__ 106 #define __FUNCT__ "MatMumpsResetSchur_Private" 107 static PetscErrorCode MatMumpsResetSchur_Private(Mat_MUMPS* mumps) 108 { 109 PetscErrorCode ierr; 110 111 PetscFunctionBegin; 112 ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr); 113 ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr); 114 ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr); 115 ierr = PetscFree(mumps->schur_pivots);CHKERRQ(ierr); 116 ierr = PetscFree(mumps->schur_work);CHKERRQ(ierr); 117 mumps->id.size_schur = 0; 118 mumps->id.ICNTL(19) = 0; 119 PetscFunctionReturn(0); 120 } 121 122 #undef __FUNCT__ 123 #define __FUNCT__ "MatMumpsFactorSchur_Private" 124 static PetscErrorCode MatMumpsFactorSchur_Private(Mat_MUMPS* mumps) 125 { 126 PetscBLASInt B_N,B_ierr,B_slda; 127 PetscErrorCode ierr; 128 129 PetscFunctionBegin; 130 if (mumps->schur_factored) { 131 PetscFunctionReturn(0); 132 } 133 ierr = PetscBLASIntCast(mumps->id.size_schur,&B_N);CHKERRQ(ierr); 134 ierr = PetscBLASIntCast(mumps->id.schur_lld,&B_slda);CHKERRQ(ierr); 135 if (!mumps->sym) { /* MUMPS always return a full Schur matrix */ 136 if (!mumps->schur_pivots) { 137 ierr = PetscMalloc1(B_N,&mumps->schur_pivots);CHKERRQ(ierr); 138 } 139 ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr); 140 PetscStackCallBLAS("LAPACKgetrf",LAPACKgetrf_(&B_N,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,&B_ierr)); 141 ierr = PetscFPTrapPop();CHKERRQ(ierr); 142 if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRF Lapack routine %d",(int)B_ierr); 143 } else { /* either full or lower-triangular (not packed) */ 144 char ord[2]; 145 if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */ 146 sprintf(ord,"L"); 147 } else { /* ICNTL(19) == 1 lower triangular stored by rows */ 148 sprintf(ord,"U"); 149 } 150 if (mumps->schur_sym == 2) { 151 if (!mumps->schur_pivots) { 152 PetscScalar lwork; 153 154 ierr = PetscMalloc1(B_N,&mumps->schur_pivots);CHKERRQ(ierr); 155 mumps->schur_B_lwork=-1; 156 ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr); 157 PetscStackCallBLAS("LAPACKsytrf",LAPACKsytrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,&lwork,&mumps->schur_B_lwork,&B_ierr)); 158 ierr = PetscFPTrapPop();CHKERRQ(ierr); 159 if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in query to SYTRF Lapack routine %d",(int)B_ierr); 160 ierr = PetscBLASIntCast((PetscInt)PetscRealPart(lwork),&mumps->schur_B_lwork);CHKERRQ(ierr); 161 ierr = PetscMalloc1(mumps->schur_B_lwork,&mumps->schur_work);CHKERRQ(ierr); 162 } 163 ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr); 164 PetscStackCallBLAS("LAPACKsytrf",LAPACKsytrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,mumps->schur_work,&mumps->schur_B_lwork,&B_ierr)); 165 ierr = PetscFPTrapPop();CHKERRQ(ierr); 166 if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRF Lapack routine %d",(int)B_ierr); 167 } else { 168 ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr); 169 PetscStackCallBLAS("LAPACKpotrf",LAPACKpotrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,&B_ierr)); 170 ierr = PetscFPTrapPop();CHKERRQ(ierr); 171 if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRF Lapack routine %d",(int)B_ierr); 172 } 173 } 174 mumps->schur_factored = PETSC_TRUE; 175 PetscFunctionReturn(0); 176 } 177 178 #undef __FUNCT__ 179 #define __FUNCT__ "MatMumpsInvertSchur_Private" 180 static PetscErrorCode MatMumpsInvertSchur_Private(Mat_MUMPS* mumps) 181 { 182 PetscBLASInt B_N,B_ierr,B_slda; 183 PetscErrorCode ierr; 184 185 PetscFunctionBegin; 186 ierr = MatMumpsFactorSchur_Private(mumps);CHKERRQ(ierr); 187 ierr = PetscBLASIntCast(mumps->id.size_schur,&B_N);CHKERRQ(ierr); 188 ierr = PetscBLASIntCast(mumps->id.schur_lld,&B_slda);CHKERRQ(ierr); 189 if (!mumps->sym) { /* MUMPS always return a full Schur matrix */ 190 if (!mumps->schur_work) { 191 PetscScalar lwork; 192 193 mumps->schur_B_lwork = -1; 194 ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr); 195 PetscStackCallBLAS("LAPACKgetri",LAPACKgetri_(&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,&lwork,&mumps->schur_B_lwork,&B_ierr)); 196 ierr = PetscFPTrapPop();CHKERRQ(ierr); 197 if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in query to GETRI Lapack routine %d",(int)B_ierr); 198 ierr = PetscBLASIntCast((PetscInt)PetscRealPart(lwork),&mumps->schur_B_lwork);CHKERRQ(ierr); 199 ierr = PetscMalloc1(mumps->schur_B_lwork,&mumps->schur_work);CHKERRQ(ierr); 200 } 201 ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr); 202 PetscStackCallBLAS("LAPACKgetri",LAPACKgetri_(&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,mumps->schur_work,&mumps->schur_B_lwork,&B_ierr)); 203 ierr = PetscFPTrapPop();CHKERRQ(ierr); 204 if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRI Lapack routine %d",(int)B_ierr); 205 } else { /* either full or lower-triangular (not packed) */ 206 char ord[2]; 207 if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */ 208 sprintf(ord,"L"); 209 } else { /* ICNTL(19) == 1 lower triangular stored by rows */ 210 sprintf(ord,"U"); 211 } 212 if (mumps->schur_sym == 2) { 213 ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr); 214 PetscStackCallBLAS("LAPACKsytri",LAPACKsytri_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,mumps->schur_work,&B_ierr)); 215 ierr = PetscFPTrapPop();CHKERRQ(ierr); 216 if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRI Lapack routine %d",(int)B_ierr); 217 } else { 218 ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr); 219 PetscStackCallBLAS("LAPACKpotri",LAPACKpotri_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_N,&B_ierr)); 220 ierr = PetscFPTrapPop();CHKERRQ(ierr); 221 if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRI Lapack routine %d",(int)B_ierr); 222 } 223 } 224 mumps->schur_inverted = PETSC_TRUE; 225 PetscFunctionReturn(0); 226 } 227 228 #undef __FUNCT__ 229 #define __FUNCT__ "MatMumpsSolveSchur_Private" 230 static PetscErrorCode MatMumpsSolveSchur_Private(Mat_MUMPS* mumps, PetscBool sol_in_redrhs) 231 { 232 PetscBLASInt B_N,B_Nrhs,B_ierr,B_slda,B_rlda; 233 PetscScalar one=1.,zero=0.; 234 PetscErrorCode ierr; 235 236 PetscFunctionBegin; 237 ierr = MatMumpsFactorSchur_Private(mumps);CHKERRQ(ierr); 238 ierr = PetscBLASIntCast(mumps->id.size_schur,&B_N);CHKERRQ(ierr); 239 ierr = PetscBLASIntCast(mumps->id.schur_lld,&B_slda);CHKERRQ(ierr); 240 ierr = PetscBLASIntCast(mumps->id.nrhs,&B_Nrhs);CHKERRQ(ierr); 241 ierr = PetscBLASIntCast(mumps->id.lredrhs,&B_rlda);CHKERRQ(ierr); 242 if (mumps->schur_inverted) { 243 PetscInt sizesol = B_Nrhs*B_N; 244 if (!mumps->schur_sol || sizesol > mumps->schur_sizesol) { 245 ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr); 246 ierr = PetscMalloc1(sizesol,&mumps->schur_sol);CHKERRQ(ierr); 247 mumps->schur_sizesol = sizesol; 248 } 249 if (!mumps->sym) { 250 char type[2]; 251 if (mumps->id.ICNTL(19) == 1) { /* stored by rows */ 252 if (!mumps->id.ICNTL(9)) { /* transpose solve */ 253 sprintf(type,"N"); 254 } else { 255 sprintf(type,"T"); 256 } 257 } else { /* stored by columns */ 258 if (!mumps->id.ICNTL(9)) { /* transpose solve */ 259 sprintf(type,"T"); 260 } else { 261 sprintf(type,"N"); 262 } 263 } 264 PetscStackCallBLAS("BLASgemm",BLASgemm_(type,"N",&B_N,&B_Nrhs,&B_N,&one,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&zero,mumps->schur_sol,&B_rlda)); 265 } else { 266 char ord[2]; 267 if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */ 268 sprintf(ord,"L"); 269 } else { /* ICNTL(19) == 1 lower triangular stored by rows */ 270 sprintf(ord,"U"); 271 } 272 PetscStackCallBLAS("BLASsymm",BLASsymm_("L",ord,&B_N,&B_Nrhs,&one,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&zero,mumps->schur_sol,&B_rlda)); 273 } 274 if (sol_in_redrhs) { 275 ierr = PetscMemcpy(mumps->id.redrhs,mumps->schur_sol,sizesol*sizeof(PetscScalar));CHKERRQ(ierr); 276 } 277 } else { /* Schur complement has not yet been inverted */ 278 MumpsScalar *orhs=NULL; 279 280 if (!sol_in_redrhs) { 281 PetscInt sizesol = B_Nrhs*B_N; 282 if (!mumps->schur_sol || sizesol > mumps->schur_sizesol) { 283 ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr); 284 ierr = PetscMalloc1(sizesol,&mumps->schur_sol);CHKERRQ(ierr); 285 mumps->schur_sizesol = sizesol; 286 } 287 orhs = mumps->id.redrhs; 288 ierr = PetscMemcpy(mumps->schur_sol,mumps->id.redrhs,sizesol*sizeof(PetscScalar));CHKERRQ(ierr); 289 mumps->id.redrhs = (MumpsScalar*)mumps->schur_sol; 290 } 291 if (!mumps->sym) { /* MUMPS always return a full Schur matrix */ 292 char type[2]; 293 if (mumps->id.ICNTL(19) == 1) { /* stored by rows */ 294 if (!mumps->id.ICNTL(9)) { /* transpose solve */ 295 sprintf(type,"N"); 296 } else { 297 sprintf(type,"T"); 298 } 299 } else { /* stored by columns */ 300 if (!mumps->id.ICNTL(9)) { /* transpose solve */ 301 sprintf(type,"T"); 302 } else { 303 sprintf(type,"N"); 304 } 305 } 306 ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr); 307 PetscStackCallBLAS("LAPACKgetrs",LAPACKgetrs_(type,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr)); 308 ierr = PetscFPTrapPop();CHKERRQ(ierr); 309 if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRS Lapack routine %d",(int)B_ierr); 310 } else { /* either full or lower-triangular (not packed) */ 311 char ord[2]; 312 if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */ 313 sprintf(ord,"L"); 314 } else { /* ICNTL(19) == 1 lower triangular stored by rows */ 315 sprintf(ord,"U"); 316 } 317 if (mumps->schur_sym == 2) { 318 ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr); 319 PetscStackCallBLAS("LAPACKsytrs",LAPACKsytrs_(ord,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr)); 320 ierr = PetscFPTrapPop();CHKERRQ(ierr); 321 if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRS Lapack routine %d",(int)B_ierr); 322 } else { 323 ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr); 324 PetscStackCallBLAS("LAPACKpotrs",LAPACKpotrs_(ord,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr)); 325 ierr = PetscFPTrapPop();CHKERRQ(ierr); 326 if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRS Lapack routine %d",(int)B_ierr); 327 } 328 } 329 if (!sol_in_redrhs) { 330 mumps->id.redrhs = orhs; 331 } 332 } 333 PetscFunctionReturn(0); 334 } 335 336 #undef __FUNCT__ 337 #define __FUNCT__ "MatMumpsHandleSchur_Private" 338 static PetscErrorCode MatMumpsHandleSchur_Private(Mat_MUMPS* mumps, PetscBool expansion) 339 { 340 PetscErrorCode ierr; 341 342 PetscFunctionBegin; 343 if (!mumps->id.ICNTL(19)) { /* do nothing when Schur complement has not been computed */ 344 PetscFunctionReturn(0); 345 } 346 if (!expansion) { /* prepare for the condensation step */ 347 PetscInt sizeredrhs = mumps->id.nrhs*mumps->id.size_schur; 348 /* allocate MUMPS internal array to store reduced right-hand sides */ 349 if (!mumps->id.redrhs || sizeredrhs > mumps->sizeredrhs) { 350 ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr); 351 mumps->id.lredrhs = mumps->id.size_schur; 352 ierr = PetscMalloc1(mumps->id.nrhs*mumps->id.lredrhs,&mumps->id.redrhs);CHKERRQ(ierr); 353 mumps->sizeredrhs = mumps->id.nrhs*mumps->id.lredrhs; 354 } 355 mumps->id.ICNTL(26) = 1; /* condensation phase */ 356 } else { /* prepare for the expansion step */ 357 /* solve Schur complement (this has to be done by the MUMPS user, so basically us) */ 358 ierr = MatMumpsSolveSchur_Private(mumps,PETSC_TRUE);CHKERRQ(ierr); 359 mumps->id.ICNTL(26) = 2; /* expansion phase */ 360 PetscMUMPS_c(&mumps->id); 361 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 362 /* restore defaults */ 363 mumps->id.ICNTL(26) = -1; 364 /* free MUMPS internal array for redrhs if we have solved for multiple rhs in order to save memory space */ 365 if (mumps->id.nrhs > 1) { 366 ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr); 367 mumps->id.lredrhs = 0; 368 mumps->sizeredrhs = 0; 369 } 370 } 371 PetscFunctionReturn(0); 372 } 373 374 /* 375 MatConvertToTriples_A_B - convert Petsc matrix to triples: row[nz], col[nz], val[nz] 376 377 input: 378 A - matrix in aij,baij or sbaij (bs=1) format 379 shift - 0: C style output triple; 1: Fortran style output triple. 380 reuse - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple 381 MAT_REUSE_MATRIX: only the values in v array are updated 382 output: 383 nnz - dim of r, c, and v (number of local nonzero entries of A) 384 r, c, v - row and col index, matrix values (matrix triples) 385 386 The returned values r, c, and sometimes v are obtained in a single PetscMalloc(). Then in MatDestroy_MUMPS() it is 387 freed with PetscFree((mumps->irn); This is not ideal code, the fact that v is ONLY sometimes part of mumps->irn means 388 that the PetscMalloc() cannot easily be replaced with a PetscMalloc3(). 389 390 */ 391 392 #undef __FUNCT__ 393 #define __FUNCT__ "MatConvertToTriples_seqaij_seqaij" 394 PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 395 { 396 const PetscInt *ai,*aj,*ajj,M=A->rmap->n; 397 PetscInt nz,rnz,i,j; 398 PetscErrorCode ierr; 399 PetscInt *row,*col; 400 Mat_SeqAIJ *aa=(Mat_SeqAIJ*)A->data; 401 402 PetscFunctionBegin; 403 *v=aa->a; 404 if (reuse == MAT_INITIAL_MATRIX) { 405 nz = aa->nz; 406 ai = aa->i; 407 aj = aa->j; 408 *nnz = nz; 409 ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr); 410 col = row + nz; 411 412 nz = 0; 413 for (i=0; i<M; i++) { 414 rnz = ai[i+1] - ai[i]; 415 ajj = aj + ai[i]; 416 for (j=0; j<rnz; j++) { 417 row[nz] = i+shift; col[nz++] = ajj[j] + shift; 418 } 419 } 420 *r = row; *c = col; 421 } 422 PetscFunctionReturn(0); 423 } 424 425 #undef __FUNCT__ 426 #define __FUNCT__ "MatConvertToTriples_seqbaij_seqaij" 427 PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 428 { 429 Mat_SeqBAIJ *aa=(Mat_SeqBAIJ*)A->data; 430 const PetscInt *ai,*aj,*ajj,bs2 = aa->bs2; 431 PetscInt bs,M,nz,idx=0,rnz,i,j,k,m; 432 PetscErrorCode ierr; 433 PetscInt *row,*col; 434 435 PetscFunctionBegin; 436 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 437 M = A->rmap->N/bs; 438 *v = aa->a; 439 if (reuse == MAT_INITIAL_MATRIX) { 440 ai = aa->i; aj = aa->j; 441 nz = bs2*aa->nz; 442 *nnz = nz; 443 ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr); 444 col = row + nz; 445 446 for (i=0; i<M; i++) { 447 ajj = aj + ai[i]; 448 rnz = ai[i+1] - ai[i]; 449 for (k=0; k<rnz; k++) { 450 for (j=0; j<bs; j++) { 451 for (m=0; m<bs; m++) { 452 row[idx] = i*bs + m + shift; 453 col[idx++] = bs*(ajj[k]) + j + shift; 454 } 455 } 456 } 457 } 458 *r = row; *c = col; 459 } 460 PetscFunctionReturn(0); 461 } 462 463 #undef __FUNCT__ 464 #define __FUNCT__ "MatConvertToTriples_seqsbaij_seqsbaij" 465 PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 466 { 467 const PetscInt *ai, *aj,*ajj,M=A->rmap->n; 468 PetscInt nz,rnz,i,j; 469 PetscErrorCode ierr; 470 PetscInt *row,*col; 471 Mat_SeqSBAIJ *aa=(Mat_SeqSBAIJ*)A->data; 472 473 PetscFunctionBegin; 474 *v = aa->a; 475 if (reuse == MAT_INITIAL_MATRIX) { 476 nz = aa->nz; 477 ai = aa->i; 478 aj = aa->j; 479 *v = aa->a; 480 *nnz = nz; 481 ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr); 482 col = row + nz; 483 484 nz = 0; 485 for (i=0; i<M; i++) { 486 rnz = ai[i+1] - ai[i]; 487 ajj = aj + ai[i]; 488 for (j=0; j<rnz; j++) { 489 row[nz] = i+shift; col[nz++] = ajj[j] + shift; 490 } 491 } 492 *r = row; *c = col; 493 } 494 PetscFunctionReturn(0); 495 } 496 497 #undef __FUNCT__ 498 #define __FUNCT__ "MatConvertToTriples_seqaij_seqsbaij" 499 PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 500 { 501 const PetscInt *ai,*aj,*ajj,*adiag,M=A->rmap->n; 502 PetscInt nz,rnz,i,j; 503 const PetscScalar *av,*v1; 504 PetscScalar *val; 505 PetscErrorCode ierr; 506 PetscInt *row,*col; 507 Mat_SeqAIJ *aa=(Mat_SeqAIJ*)A->data; 508 PetscBool missing; 509 510 PetscFunctionBegin; 511 ai =aa->i; aj=aa->j;av=aa->a; 512 adiag=aa->diag; 513 ierr = MatMissingDiagonal_SeqAIJ(A,&missing,&i);CHKERRQ(ierr); 514 if (reuse == MAT_INITIAL_MATRIX) { 515 /* count nz in the uppper triangular part of A */ 516 nz = 0; 517 if (missing) { 518 for (i=0; i<M; i++) { 519 if (PetscUnlikely(adiag[i] >= ai[i+1])) { 520 for (j=ai[i];j<ai[i+1];j++) { 521 if (aj[j] < i) continue; 522 nz++; 523 } 524 } else { 525 nz += ai[i+1] - adiag[i]; 526 } 527 } 528 } else { 529 for (i=0; i<M; i++) nz += ai[i+1] - adiag[i]; 530 } 531 *nnz = nz; 532 533 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 534 col = row + nz; 535 val = (PetscScalar*)(col + nz); 536 537 nz = 0; 538 if (missing) { 539 for (i=0; i<M; i++) { 540 if (PetscUnlikely(adiag[i] >= ai[i+1])) { 541 for (j=ai[i];j<ai[i+1];j++) { 542 if (aj[j] < i) continue; 543 row[nz] = i+shift; 544 col[nz] = aj[j]+shift; 545 val[nz] = av[j]; 546 nz++; 547 } 548 } else { 549 rnz = ai[i+1] - adiag[i]; 550 ajj = aj + adiag[i]; 551 v1 = av + adiag[i]; 552 for (j=0; j<rnz; j++) { 553 row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j]; 554 } 555 } 556 } 557 } else { 558 for (i=0; i<M; i++) { 559 rnz = ai[i+1] - adiag[i]; 560 ajj = aj + adiag[i]; 561 v1 = av + adiag[i]; 562 for (j=0; j<rnz; j++) { 563 row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j]; 564 } 565 } 566 } 567 *r = row; *c = col; *v = val; 568 } else { 569 nz = 0; val = *v; 570 if (missing) { 571 for (i=0; i <M; i++) { 572 if (PetscUnlikely(adiag[i] >= ai[i+1])) { 573 for (j=ai[i];j<ai[i+1];j++) { 574 if (aj[j] < i) continue; 575 val[nz++] = av[j]; 576 } 577 } else { 578 rnz = ai[i+1] - adiag[i]; 579 v1 = av + adiag[i]; 580 for (j=0; j<rnz; j++) { 581 val[nz++] = v1[j]; 582 } 583 } 584 } 585 } else { 586 for (i=0; i <M; i++) { 587 rnz = ai[i+1] - adiag[i]; 588 v1 = av + adiag[i]; 589 for (j=0; j<rnz; j++) { 590 val[nz++] = v1[j]; 591 } 592 } 593 } 594 } 595 PetscFunctionReturn(0); 596 } 597 598 #undef __FUNCT__ 599 #define __FUNCT__ "MatConvertToTriples_mpisbaij_mpisbaij" 600 PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 601 { 602 const PetscInt *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj; 603 PetscErrorCode ierr; 604 PetscInt rstart,nz,i,j,jj,irow,countA,countB; 605 PetscInt *row,*col; 606 const PetscScalar *av, *bv,*v1,*v2; 607 PetscScalar *val; 608 Mat_MPISBAIJ *mat = (Mat_MPISBAIJ*)A->data; 609 Mat_SeqSBAIJ *aa = (Mat_SeqSBAIJ*)(mat->A)->data; 610 Mat_SeqBAIJ *bb = (Mat_SeqBAIJ*)(mat->B)->data; 611 612 PetscFunctionBegin; 613 ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart; 614 av=aa->a; bv=bb->a; 615 616 garray = mat->garray; 617 618 if (reuse == MAT_INITIAL_MATRIX) { 619 nz = aa->nz + bb->nz; 620 *nnz = nz; 621 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 622 col = row + nz; 623 val = (PetscScalar*)(col + nz); 624 625 *r = row; *c = col; *v = val; 626 } else { 627 row = *r; col = *c; val = *v; 628 } 629 630 jj = 0; irow = rstart; 631 for (i=0; i<m; i++) { 632 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 633 countA = ai[i+1] - ai[i]; 634 countB = bi[i+1] - bi[i]; 635 bjj = bj + bi[i]; 636 v1 = av + ai[i]; 637 v2 = bv + bi[i]; 638 639 /* A-part */ 640 for (j=0; j<countA; j++) { 641 if (reuse == MAT_INITIAL_MATRIX) { 642 row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift; 643 } 644 val[jj++] = v1[j]; 645 } 646 647 /* B-part */ 648 for (j=0; j < countB; j++) { 649 if (reuse == MAT_INITIAL_MATRIX) { 650 row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift; 651 } 652 val[jj++] = v2[j]; 653 } 654 irow++; 655 } 656 PetscFunctionReturn(0); 657 } 658 659 #undef __FUNCT__ 660 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpiaij" 661 PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 662 { 663 const PetscInt *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj; 664 PetscErrorCode ierr; 665 PetscInt rstart,nz,i,j,jj,irow,countA,countB; 666 PetscInt *row,*col; 667 const PetscScalar *av, *bv,*v1,*v2; 668 PetscScalar *val; 669 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 670 Mat_SeqAIJ *aa = (Mat_SeqAIJ*)(mat->A)->data; 671 Mat_SeqAIJ *bb = (Mat_SeqAIJ*)(mat->B)->data; 672 673 PetscFunctionBegin; 674 ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart; 675 av=aa->a; bv=bb->a; 676 677 garray = mat->garray; 678 679 if (reuse == MAT_INITIAL_MATRIX) { 680 nz = aa->nz + bb->nz; 681 *nnz = nz; 682 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 683 col = row + nz; 684 val = (PetscScalar*)(col + nz); 685 686 *r = row; *c = col; *v = val; 687 } else { 688 row = *r; col = *c; val = *v; 689 } 690 691 jj = 0; irow = rstart; 692 for (i=0; i<m; i++) { 693 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 694 countA = ai[i+1] - ai[i]; 695 countB = bi[i+1] - bi[i]; 696 bjj = bj + bi[i]; 697 v1 = av + ai[i]; 698 v2 = bv + bi[i]; 699 700 /* A-part */ 701 for (j=0; j<countA; j++) { 702 if (reuse == MAT_INITIAL_MATRIX) { 703 row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift; 704 } 705 val[jj++] = v1[j]; 706 } 707 708 /* B-part */ 709 for (j=0; j < countB; j++) { 710 if (reuse == MAT_INITIAL_MATRIX) { 711 row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift; 712 } 713 val[jj++] = v2[j]; 714 } 715 irow++; 716 } 717 PetscFunctionReturn(0); 718 } 719 720 #undef __FUNCT__ 721 #define __FUNCT__ "MatConvertToTriples_mpibaij_mpiaij" 722 PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 723 { 724 Mat_MPIBAIJ *mat = (Mat_MPIBAIJ*)A->data; 725 Mat_SeqBAIJ *aa = (Mat_SeqBAIJ*)(mat->A)->data; 726 Mat_SeqBAIJ *bb = (Mat_SeqBAIJ*)(mat->B)->data; 727 const PetscInt *ai = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj; 728 const PetscInt *garray = mat->garray,mbs=mat->mbs,rstart=A->rmap->rstart; 729 const PetscInt bs2=mat->bs2; 730 PetscErrorCode ierr; 731 PetscInt bs,nz,i,j,k,n,jj,irow,countA,countB,idx; 732 PetscInt *row,*col; 733 const PetscScalar *av=aa->a, *bv=bb->a,*v1,*v2; 734 PetscScalar *val; 735 736 PetscFunctionBegin; 737 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 738 if (reuse == MAT_INITIAL_MATRIX) { 739 nz = bs2*(aa->nz + bb->nz); 740 *nnz = nz; 741 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 742 col = row + nz; 743 val = (PetscScalar*)(col + nz); 744 745 *r = row; *c = col; *v = val; 746 } else { 747 row = *r; col = *c; val = *v; 748 } 749 750 jj = 0; irow = rstart; 751 for (i=0; i<mbs; i++) { 752 countA = ai[i+1] - ai[i]; 753 countB = bi[i+1] - bi[i]; 754 ajj = aj + ai[i]; 755 bjj = bj + bi[i]; 756 v1 = av + bs2*ai[i]; 757 v2 = bv + bs2*bi[i]; 758 759 idx = 0; 760 /* A-part */ 761 for (k=0; k<countA; k++) { 762 for (j=0; j<bs; j++) { 763 for (n=0; n<bs; n++) { 764 if (reuse == MAT_INITIAL_MATRIX) { 765 row[jj] = irow + n + shift; 766 col[jj] = rstart + bs*ajj[k] + j + shift; 767 } 768 val[jj++] = v1[idx++]; 769 } 770 } 771 } 772 773 idx = 0; 774 /* B-part */ 775 for (k=0; k<countB; k++) { 776 for (j=0; j<bs; j++) { 777 for (n=0; n<bs; n++) { 778 if (reuse == MAT_INITIAL_MATRIX) { 779 row[jj] = irow + n + shift; 780 col[jj] = bs*garray[bjj[k]] + j + shift; 781 } 782 val[jj++] = v2[idx++]; 783 } 784 } 785 } 786 irow += bs; 787 } 788 PetscFunctionReturn(0); 789 } 790 791 #undef __FUNCT__ 792 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpisbaij" 793 PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v) 794 { 795 const PetscInt *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj; 796 PetscErrorCode ierr; 797 PetscInt rstart,nz,nza,nzb,i,j,jj,irow,countA,countB; 798 PetscInt *row,*col; 799 const PetscScalar *av, *bv,*v1,*v2; 800 PetscScalar *val; 801 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 802 Mat_SeqAIJ *aa =(Mat_SeqAIJ*)(mat->A)->data; 803 Mat_SeqAIJ *bb =(Mat_SeqAIJ*)(mat->B)->data; 804 805 PetscFunctionBegin; 806 ai=aa->i; aj=aa->j; adiag=aa->diag; 807 bi=bb->i; bj=bb->j; garray = mat->garray; 808 av=aa->a; bv=bb->a; 809 810 rstart = A->rmap->rstart; 811 812 if (reuse == MAT_INITIAL_MATRIX) { 813 nza = 0; /* num of upper triangular entries in mat->A, including diagonals */ 814 nzb = 0; /* num of upper triangular entries in mat->B */ 815 for (i=0; i<m; i++) { 816 nza += (ai[i+1] - adiag[i]); 817 countB = bi[i+1] - bi[i]; 818 bjj = bj + bi[i]; 819 for (j=0; j<countB; j++) { 820 if (garray[bjj[j]] > rstart) nzb++; 821 } 822 } 823 824 nz = nza + nzb; /* total nz of upper triangular part of mat */ 825 *nnz = nz; 826 ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr); 827 col = row + nz; 828 val = (PetscScalar*)(col + nz); 829 830 *r = row; *c = col; *v = val; 831 } else { 832 row = *r; col = *c; val = *v; 833 } 834 835 jj = 0; irow = rstart; 836 for (i=0; i<m; i++) { 837 ajj = aj + adiag[i]; /* ptr to the beginning of the diagonal of this row */ 838 v1 = av + adiag[i]; 839 countA = ai[i+1] - adiag[i]; 840 countB = bi[i+1] - bi[i]; 841 bjj = bj + bi[i]; 842 v2 = bv + bi[i]; 843 844 /* A-part */ 845 for (j=0; j<countA; j++) { 846 if (reuse == MAT_INITIAL_MATRIX) { 847 row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift; 848 } 849 val[jj++] = v1[j]; 850 } 851 852 /* B-part */ 853 for (j=0; j < countB; j++) { 854 if (garray[bjj[j]] > rstart) { 855 if (reuse == MAT_INITIAL_MATRIX) { 856 row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift; 857 } 858 val[jj++] = v2[j]; 859 } 860 } 861 irow++; 862 } 863 PetscFunctionReturn(0); 864 } 865 866 #undef __FUNCT__ 867 #define __FUNCT__ "MatDestroy_MUMPS" 868 PetscErrorCode MatDestroy_MUMPS(Mat A) 869 { 870 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 871 PetscErrorCode ierr; 872 873 PetscFunctionBegin; 874 ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr); 875 ierr = VecScatterDestroy(&mumps->scat_rhs);CHKERRQ(ierr); 876 ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr); 877 ierr = VecDestroy(&mumps->b_seq);CHKERRQ(ierr); 878 ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr); 879 ierr = PetscFree(mumps->id.perm_in);CHKERRQ(ierr); 880 ierr = PetscFree(mumps->irn);CHKERRQ(ierr); 881 ierr = PetscFree(mumps->info);CHKERRQ(ierr); 882 ierr = MatMumpsResetSchur_Private(mumps);CHKERRQ(ierr); 883 mumps->id.job = JOB_END; 884 PetscMUMPS_c(&mumps->id); 885 ierr = MPI_Comm_free(&mumps->comm_mumps);CHKERRQ(ierr); 886 ierr = PetscFree(A->data);CHKERRQ(ierr); 887 888 /* clear composed functions */ 889 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);CHKERRQ(ierr); 890 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorSetSchurIS_C",NULL);CHKERRQ(ierr); 891 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorInvertSchurComplement_C",NULL);CHKERRQ(ierr); 892 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorCreateSchurComplement_C",NULL);CHKERRQ(ierr); 893 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSchurComplement_C",NULL);CHKERRQ(ierr); 894 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorSolveSchurComplement_C",NULL);CHKERRQ(ierr); 895 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorSolveSchurComplementTranspose_C",NULL);CHKERRQ(ierr); 896 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorFactorizeSchurComplement_C",NULL);CHKERRQ(ierr); 897 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorSetSchurComplementSolverType_C",NULL);CHKERRQ(ierr); 898 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetIcntl_C",NULL);CHKERRQ(ierr); 899 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetIcntl_C",NULL);CHKERRQ(ierr); 900 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetCntl_C",NULL);CHKERRQ(ierr); 901 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetCntl_C",NULL);CHKERRQ(ierr); 902 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfo_C",NULL);CHKERRQ(ierr); 903 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfog_C",NULL);CHKERRQ(ierr); 904 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfo_C",NULL);CHKERRQ(ierr); 905 ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfog_C",NULL);CHKERRQ(ierr); 906 PetscFunctionReturn(0); 907 } 908 909 #undef __FUNCT__ 910 #define __FUNCT__ "MatSolve_MUMPS" 911 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x) 912 { 913 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 914 PetscScalar *array; 915 Vec b_seq; 916 IS is_iden,is_petsc; 917 PetscErrorCode ierr; 918 PetscInt i; 919 PetscBool second_solve = PETSC_FALSE; 920 static PetscBool cite1 = PETSC_FALSE,cite2 = PETSC_FALSE; 921 922 PetscFunctionBegin; 923 ierr = PetscCitationsRegister("@article{MUMPS01,\n author = {P.~R. Amestoy and I.~S. Duff and J.-Y. L'Excellent and J. Koster},\n title = {A fully asynchronous multifrontal solver using distributed dynamic scheduling},\n journal = {SIAM Journal on Matrix Analysis and Applications},\n volume = {23},\n number = {1},\n pages = {15--41},\n year = {2001}\n}\n",&cite1);CHKERRQ(ierr); 924 ierr = PetscCitationsRegister("@article{MUMPS02,\n author = {P.~R. Amestoy and A. Guermouche and J.-Y. L'Excellent and S. Pralet},\n title = {Hybrid scheduling for the parallel solution of linear systems},\n journal = {Parallel Computing},\n volume = {32},\n number = {2},\n pages = {136--156},\n year = {2006}\n}\n",&cite2);CHKERRQ(ierr); 925 926 if (A->factorerrortype) { 927 ierr = PetscInfo2(A,"MatSolve is called with singular matrix factor, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 928 ierr = VecSetInf(x);CHKERRQ(ierr); 929 PetscFunctionReturn(0); 930 } 931 932 mumps->id.nrhs = 1; 933 b_seq = mumps->b_seq; 934 if (mumps->size > 1) { 935 /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */ 936 ierr = VecScatterBegin(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 937 ierr = VecScatterEnd(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 938 if (!mumps->myid) {ierr = VecGetArray(b_seq,&array);CHKERRQ(ierr);} 939 } else { /* size == 1 */ 940 ierr = VecCopy(b,x);CHKERRQ(ierr); 941 ierr = VecGetArray(x,&array);CHKERRQ(ierr); 942 } 943 if (!mumps->myid) { /* define rhs on the host */ 944 mumps->id.nrhs = 1; 945 mumps->id.rhs = (MumpsScalar*)array; 946 } 947 948 /* 949 handle condensation step of Schur complement (if any) 950 We set by default ICNTL(26) == -1 when Schur indices have been provided by the user. 951 According to MUMPS (5.0.0) manual, any value should be harmful during the factorization phase 952 Unless the user provides a valid value for ICNTL(26), MatSolve and MatMatSolve routines solve the full system. 953 This requires an extra call to PetscMUMPS_c and the computation of the factors for S 954 */ 955 if (mumps->id.ICNTL(26) < 0 || mumps->id.ICNTL(26) > 2) { 956 second_solve = PETSC_TRUE; 957 ierr = MatMumpsHandleSchur_Private(mumps,PETSC_FALSE);CHKERRQ(ierr); 958 } 959 /* solve phase */ 960 /*-------------*/ 961 mumps->id.job = JOB_SOLVE; 962 PetscMUMPS_c(&mumps->id); 963 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 964 965 /* handle expansion step of Schur complement (if any) */ 966 if (second_solve) { 967 ierr = MatMumpsHandleSchur_Private(mumps,PETSC_TRUE);CHKERRQ(ierr); 968 } 969 970 if (mumps->size > 1) { /* convert mumps distributed solution to petsc mpi x */ 971 if (mumps->scat_sol && mumps->ICNTL9_pre != mumps->id.ICNTL(9)) { 972 /* when id.ICNTL(9) changes, the contents of lsol_loc may change (not its size, lsol_loc), recreates scat_sol */ 973 ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr); 974 } 975 if (!mumps->scat_sol) { /* create scatter scat_sol */ 976 ierr = ISCreateStride(PETSC_COMM_SELF,mumps->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */ 977 for (i=0; i<mumps->id.lsol_loc; i++) { 978 mumps->id.isol_loc[i] -= 1; /* change Fortran style to C style */ 979 } 980 ierr = ISCreateGeneral(PETSC_COMM_SELF,mumps->id.lsol_loc,mumps->id.isol_loc,PETSC_COPY_VALUES,&is_petsc);CHKERRQ(ierr); /* to */ 981 ierr = VecScatterCreate(mumps->x_seq,is_iden,x,is_petsc,&mumps->scat_sol);CHKERRQ(ierr); 982 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 983 ierr = ISDestroy(&is_petsc);CHKERRQ(ierr); 984 985 mumps->ICNTL9_pre = mumps->id.ICNTL(9); /* save current value of id.ICNTL(9) */ 986 } 987 988 ierr = VecScatterBegin(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 989 ierr = VecScatterEnd(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 990 } 991 PetscFunctionReturn(0); 992 } 993 994 #undef __FUNCT__ 995 #define __FUNCT__ "MatSolveTranspose_MUMPS" 996 PetscErrorCode MatSolveTranspose_MUMPS(Mat A,Vec b,Vec x) 997 { 998 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 999 PetscErrorCode ierr; 1000 1001 PetscFunctionBegin; 1002 mumps->id.ICNTL(9) = 0; 1003 ierr = MatSolve_MUMPS(A,b,x);CHKERRQ(ierr); 1004 mumps->id.ICNTL(9) = 1; 1005 PetscFunctionReturn(0); 1006 } 1007 1008 #undef __FUNCT__ 1009 #define __FUNCT__ "MatMatSolve_MUMPS" 1010 PetscErrorCode MatMatSolve_MUMPS(Mat A,Mat B,Mat X) 1011 { 1012 PetscErrorCode ierr; 1013 PetscBool flg; 1014 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 1015 PetscInt i,nrhs,M; 1016 PetscScalar *array,*bray; 1017 1018 PetscFunctionBegin; 1019 ierr = PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 1020 if (!flg) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix"); 1021 ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 1022 if (!flg) SETERRQ(PetscObjectComm((PetscObject)X),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix"); 1023 if (B->rmap->n != X->rmap->n) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B and X must have same row distribution"); 1024 1025 ierr = MatGetSize(B,&M,&nrhs);CHKERRQ(ierr); 1026 mumps->id.nrhs = nrhs; 1027 mumps->id.lrhs = M; 1028 1029 if (mumps->size == 1) { 1030 PetscBool second_solve = PETSC_FALSE; 1031 /* copy B to X */ 1032 ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr); 1033 ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr); 1034 ierr = PetscMemcpy(array,bray,M*nrhs*sizeof(PetscScalar));CHKERRQ(ierr); 1035 ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr); 1036 mumps->id.rhs = (MumpsScalar*)array; 1037 1038 /* handle condensation step of Schur complement (if any) */ 1039 if (mumps->id.ICNTL(26) < 0 || mumps->id.ICNTL(26) > 2) { 1040 second_solve = PETSC_TRUE; 1041 ierr = MatMumpsHandleSchur_Private(mumps,PETSC_FALSE);CHKERRQ(ierr); 1042 } 1043 /* solve phase */ 1044 /*-------------*/ 1045 mumps->id.job = JOB_SOLVE; 1046 PetscMUMPS_c(&mumps->id); 1047 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 1048 1049 /* handle expansion step of Schur complement (if any) */ 1050 if (second_solve) { 1051 ierr = MatMumpsHandleSchur_Private(mumps,PETSC_TRUE);CHKERRQ(ierr); 1052 } 1053 ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr); 1054 } else { /*--------- parallel case --------*/ 1055 PetscInt lsol_loc,nlsol_loc,*isol_loc,*idx,*iidx,*idxx,*isol_loc_save; 1056 MumpsScalar *sol_loc,*sol_loc_save; 1057 IS is_to,is_from; 1058 PetscInt k,proc,j,m; 1059 const PetscInt *rstart; 1060 Vec v_mpi,b_seq,x_seq; 1061 VecScatter scat_rhs,scat_sol; 1062 1063 /* create x_seq to hold local solution */ 1064 isol_loc_save = mumps->id.isol_loc; /* save it for MatSovle() */ 1065 sol_loc_save = mumps->id.sol_loc; 1066 1067 lsol_loc = mumps->id.INFO(23); 1068 nlsol_loc = nrhs*lsol_loc; /* length of sol_loc */ 1069 ierr = PetscMalloc2(nlsol_loc,&sol_loc,nlsol_loc,&isol_loc);CHKERRQ(ierr); 1070 mumps->id.sol_loc = (MumpsScalar*)sol_loc; 1071 mumps->id.isol_loc = isol_loc; 1072 1073 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nlsol_loc,(PetscScalar*)sol_loc,&x_seq);CHKERRQ(ierr); 1074 1075 /* copy rhs matrix B into vector v_mpi */ 1076 ierr = MatGetLocalSize(B,&m,NULL);CHKERRQ(ierr); 1077 ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr); 1078 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)B),1,nrhs*m,nrhs*M,(const PetscScalar*)bray,&v_mpi);CHKERRQ(ierr); 1079 ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr); 1080 1081 /* scatter v_mpi to b_seq because MUMPS only supports centralized rhs */ 1082 /* idx: maps from k-th index of v_mpi to (i,j)-th global entry of B; 1083 iidx: inverse of idx, will be used by scattering xx_seq -> X */ 1084 ierr = PetscMalloc2(nrhs*M,&idx,nrhs*M,&iidx);CHKERRQ(ierr); 1085 ierr = MatGetOwnershipRanges(B,&rstart);CHKERRQ(ierr); 1086 k = 0; 1087 for (proc=0; proc<mumps->size; proc++){ 1088 for (j=0; j<nrhs; j++){ 1089 for (i=rstart[proc]; i<rstart[proc+1]; i++){ 1090 iidx[j*M + i] = k; 1091 idx[k++] = j*M + i; 1092 } 1093 } 1094 } 1095 1096 if (!mumps->myid) { 1097 ierr = VecCreateSeq(PETSC_COMM_SELF,nrhs*M,&b_seq);CHKERRQ(ierr); 1098 ierr = ISCreateGeneral(PETSC_COMM_SELF,nrhs*M,idx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr); 1099 ierr = ISCreateStride(PETSC_COMM_SELF,nrhs*M,0,1,&is_from);CHKERRQ(ierr); 1100 } else { 1101 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&b_seq);CHKERRQ(ierr); 1102 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_to);CHKERRQ(ierr); 1103 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_from);CHKERRQ(ierr); 1104 } 1105 ierr = VecScatterCreate(v_mpi,is_from,b_seq,is_to,&scat_rhs);CHKERRQ(ierr); 1106 ierr = VecScatterBegin(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1107 ierr = ISDestroy(&is_to);CHKERRQ(ierr); 1108 ierr = ISDestroy(&is_from);CHKERRQ(ierr); 1109 ierr = VecScatterEnd(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1110 1111 if (!mumps->myid) { /* define rhs on the host */ 1112 ierr = VecGetArray(b_seq,&bray);CHKERRQ(ierr); 1113 mumps->id.rhs = (MumpsScalar*)bray; 1114 ierr = VecRestoreArray(b_seq,&bray);CHKERRQ(ierr); 1115 } 1116 1117 /* solve phase */ 1118 /*-------------*/ 1119 mumps->id.job = JOB_SOLVE; 1120 PetscMUMPS_c(&mumps->id); 1121 if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 1122 1123 /* scatter mumps distributed solution to petsc vector v_mpi, which shares local arrays with solution matrix X */ 1124 ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr); 1125 ierr = VecPlaceArray(v_mpi,array);CHKERRQ(ierr); 1126 1127 /* create scatter scat_sol */ 1128 ierr = PetscMalloc1(nlsol_loc,&idxx);CHKERRQ(ierr); 1129 ierr = ISCreateStride(PETSC_COMM_SELF,nlsol_loc,0,1,&is_from);CHKERRQ(ierr); 1130 for (i=0; i<lsol_loc; i++) { 1131 isol_loc[i] -= 1; /* change Fortran style to C style */ 1132 idxx[i] = iidx[isol_loc[i]]; 1133 for (j=1; j<nrhs; j++){ 1134 idxx[j*lsol_loc+i] = iidx[isol_loc[i]+j*M]; 1135 } 1136 } 1137 ierr = ISCreateGeneral(PETSC_COMM_SELF,nlsol_loc,idxx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr); 1138 ierr = VecScatterCreate(x_seq,is_from,v_mpi,is_to,&scat_sol);CHKERRQ(ierr); 1139 ierr = VecScatterBegin(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1140 ierr = ISDestroy(&is_from);CHKERRQ(ierr); 1141 ierr = ISDestroy(&is_to);CHKERRQ(ierr); 1142 ierr = VecScatterEnd(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1143 ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr); 1144 1145 /* free spaces */ 1146 mumps->id.sol_loc = sol_loc_save; 1147 mumps->id.isol_loc = isol_loc_save; 1148 1149 ierr = PetscFree2(sol_loc,isol_loc);CHKERRQ(ierr); 1150 ierr = PetscFree2(idx,iidx);CHKERRQ(ierr); 1151 ierr = PetscFree(idxx);CHKERRQ(ierr); 1152 ierr = VecDestroy(&x_seq);CHKERRQ(ierr); 1153 ierr = VecDestroy(&v_mpi);CHKERRQ(ierr); 1154 ierr = VecDestroy(&b_seq);CHKERRQ(ierr); 1155 ierr = VecScatterDestroy(&scat_rhs);CHKERRQ(ierr); 1156 ierr = VecScatterDestroy(&scat_sol);CHKERRQ(ierr); 1157 } 1158 PetscFunctionReturn(0); 1159 } 1160 1161 #if !defined(PETSC_USE_COMPLEX) 1162 /* 1163 input: 1164 F: numeric factor 1165 output: 1166 nneg: total number of negative pivots 1167 nzero: total number of zero pivots 1168 npos: (global dimension of F) - nneg - nzero 1169 */ 1170 #undef __FUNCT__ 1171 #define __FUNCT__ "MatGetInertia_SBAIJMUMPS" 1172 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos) 1173 { 1174 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1175 PetscErrorCode ierr; 1176 PetscMPIInt size; 1177 1178 PetscFunctionBegin; 1179 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)F),&size);CHKERRQ(ierr); 1180 /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */ 1181 if (size > 1 && mumps->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",mumps->id.INFOG(13)); 1182 1183 if (nneg) *nneg = mumps->id.INFOG(12); 1184 if (nzero || npos) { 1185 if (mumps->id.ICNTL(24) != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"-mat_mumps_icntl_24 must be set as 1 for null pivot row detection"); 1186 if (nzero) *nzero = mumps->id.INFOG(28); 1187 if (npos) *npos = F->rmap->N - (mumps->id.INFOG(12) + mumps->id.INFOG(28)); 1188 } 1189 PetscFunctionReturn(0); 1190 } 1191 #endif 1192 1193 #undef __FUNCT__ 1194 #define __FUNCT__ "MatFactorNumeric_MUMPS" 1195 PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info) 1196 { 1197 Mat_MUMPS *mumps =(Mat_MUMPS*)(F)->data; 1198 PetscErrorCode ierr; 1199 PetscBool isMPIAIJ; 1200 1201 PetscFunctionBegin; 1202 if (mumps->id.INFOG(1) < 0) { 1203 if (mumps->id.INFOG(1) == -6) { 1204 ierr = PetscInfo2(A,"MatFactorNumeric is called with singular matrix structure, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1205 } 1206 ierr = PetscInfo2(A,"MatFactorNumeric is called after analysis phase fails, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1207 PetscFunctionReturn(0); 1208 } 1209 1210 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_REUSE_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1211 1212 /* numerical factorization phase */ 1213 /*-------------------------------*/ 1214 mumps->id.job = JOB_FACTNUMERIC; 1215 if (!mumps->id.ICNTL(18)) { /* A is centralized */ 1216 if (!mumps->myid) { 1217 mumps->id.a = (MumpsScalar*)mumps->val; 1218 } 1219 } else { 1220 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1221 } 1222 PetscMUMPS_c(&mumps->id); 1223 if (mumps->id.INFOG(1) < 0) { 1224 if (A->erroriffailure) { 1225 SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2)); 1226 } else { 1227 if (mumps->id.INFOG(1) == -10) { /* numerically singular matrix */ 1228 ierr = PetscInfo2(F,"matrix is numerically singular, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1229 F->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1230 } else if (mumps->id.INFOG(1) == -13) { 1231 ierr = PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, cannot allocate required memory %d megabytes\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1232 F->factorerrortype = MAT_FACTOR_OUTMEMORY; 1233 } else if (mumps->id.INFOG(1) == -8 || mumps->id.INFOG(1) == -9 || (-16 < mumps->id.INFOG(1) && mumps->id.INFOG(1) < -10) ) { 1234 ierr = PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d, problem with workarray \n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1235 F->factorerrortype = MAT_FACTOR_OUTMEMORY; 1236 } else { 1237 ierr = PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1238 F->factorerrortype = MAT_FACTOR_OTHER; 1239 } 1240 } 1241 } 1242 if (!mumps->myid && mumps->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB," mumps->id.ICNTL(16):=%d\n",mumps->id.INFOG(16)); 1243 1244 (F)->assembled = PETSC_TRUE; 1245 mumps->matstruc = SAME_NONZERO_PATTERN; 1246 mumps->schur_factored = PETSC_FALSE; 1247 mumps->schur_inverted = PETSC_FALSE; 1248 1249 /* just to be sure that ICNTL(19) value returned by a call from MatMumpsGetIcntl is always consistent */ 1250 if (!mumps->sym && mumps->id.ICNTL(19) && mumps->id.ICNTL(19) != 1) mumps->id.ICNTL(19) = 3; 1251 1252 if (mumps->size > 1) { 1253 PetscInt lsol_loc; 1254 PetscScalar *sol_loc; 1255 1256 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr); 1257 1258 /* distributed solution; Create x_seq=sol_loc for repeated use */ 1259 if (mumps->x_seq) { 1260 ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr); 1261 ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr); 1262 ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr); 1263 } 1264 lsol_loc = mumps->id.INFO(23); /* length of sol_loc */ 1265 ierr = PetscMalloc2(lsol_loc,&sol_loc,lsol_loc,&mumps->id.isol_loc);CHKERRQ(ierr); 1266 mumps->id.lsol_loc = lsol_loc; 1267 mumps->id.sol_loc = (MumpsScalar*)sol_loc; 1268 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,lsol_loc,sol_loc,&mumps->x_seq);CHKERRQ(ierr); 1269 } 1270 PetscFunctionReturn(0); 1271 } 1272 1273 /* Sets MUMPS options from the options database */ 1274 #undef __FUNCT__ 1275 #define __FUNCT__ "PetscSetMUMPSFromOptions" 1276 PetscErrorCode PetscSetMUMPSFromOptions(Mat F, Mat A) 1277 { 1278 Mat_MUMPS *mumps = (Mat_MUMPS*)F->data; 1279 PetscErrorCode ierr; 1280 PetscInt icntl,info[40],i,ninfo=40; 1281 PetscBool flg; 1282 1283 PetscFunctionBegin; 1284 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr); 1285 ierr = PetscOptionsInt("-mat_mumps_icntl_1","ICNTL(1): output stream for error messages","None",mumps->id.ICNTL(1),&icntl,&flg);CHKERRQ(ierr); 1286 if (flg) mumps->id.ICNTL(1) = icntl; 1287 ierr = PetscOptionsInt("-mat_mumps_icntl_2","ICNTL(2): output stream for diagnostic printing, statistics, and warning","None",mumps->id.ICNTL(2),&icntl,&flg);CHKERRQ(ierr); 1288 if (flg) mumps->id.ICNTL(2) = icntl; 1289 ierr = PetscOptionsInt("-mat_mumps_icntl_3","ICNTL(3): output stream for global information, collected on the host","None",mumps->id.ICNTL(3),&icntl,&flg);CHKERRQ(ierr); 1290 if (flg) mumps->id.ICNTL(3) = icntl; 1291 1292 ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",mumps->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr); 1293 if (flg) mumps->id.ICNTL(4) = icntl; 1294 if (mumps->id.ICNTL(4) || PetscLogPrintInfo) mumps->id.ICNTL(3) = 6; /* resume MUMPS default id.ICNTL(3) = 6 */ 1295 1296 ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7)","None",mumps->id.ICNTL(6),&icntl,&flg);CHKERRQ(ierr); 1297 if (flg) mumps->id.ICNTL(6) = icntl; 1298 1299 ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis","None",mumps->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr); 1300 if (flg) { 1301 if (icntl== 1 && mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n"); 1302 else mumps->id.ICNTL(7) = icntl; 1303 } 1304 1305 ierr = PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 8 or 77)","None",mumps->id.ICNTL(8),&mumps->id.ICNTL(8),NULL);CHKERRQ(ierr); 1306 /* ierr = PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): computes the solution using A or A^T","None",mumps->id.ICNTL(9),&mumps->id.ICNTL(9),NULL);CHKERRQ(ierr); handled by MatSolveTranspose_MUMPS() */ 1307 ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",mumps->id.ICNTL(10),&mumps->id.ICNTL(10),NULL);CHKERRQ(ierr); 1308 ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to an error analysis (via -ksp_view)","None",mumps->id.ICNTL(11),&mumps->id.ICNTL(11),NULL);CHKERRQ(ierr); 1309 ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): an ordering strategy for symmetric matrices (0 to 3)","None",mumps->id.ICNTL(12),&mumps->id.ICNTL(12),NULL);CHKERRQ(ierr); 1310 ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting","None",mumps->id.ICNTL(13),&mumps->id.ICNTL(13),NULL);CHKERRQ(ierr); 1311 ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage increase in the estimated working space","None",mumps->id.ICNTL(14),&mumps->id.ICNTL(14),NULL);CHKERRQ(ierr); 1312 ierr = PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): computes the Schur complement","None",mumps->id.ICNTL(19),&mumps->id.ICNTL(19),NULL);CHKERRQ(ierr); 1313 if (mumps->id.ICNTL(19) <= 0 || mumps->id.ICNTL(19) > 3) { /* reset any schur data (if any) */ 1314 ierr = MatMumpsResetSchur_Private(mumps);CHKERRQ(ierr); 1315 } 1316 /* ierr = PetscOptionsInt("-mat_mumps_icntl_20","ICNTL(20): the format (dense or sparse) of the right-hand sides","None",mumps->id.ICNTL(20),&mumps->id.ICNTL(20),NULL);CHKERRQ(ierr); -- sparse rhs is not supported in PETSc API */ 1317 /* ierr = PetscOptionsInt("-mat_mumps_icntl_21","ICNTL(21): the distribution (centralized or distributed) of the solution vectors","None",mumps->id.ICNTL(21),&mumps->id.ICNTL(21),NULL);CHKERRQ(ierr); we only use distributed solution vector */ 1318 1319 ierr = PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core factorization and solve (0 or 1)","None",mumps->id.ICNTL(22),&mumps->id.ICNTL(22),NULL);CHKERRQ(ierr); 1320 ierr = PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",mumps->id.ICNTL(23),&mumps->id.ICNTL(23),NULL);CHKERRQ(ierr); 1321 ierr = PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",mumps->id.ICNTL(24),&mumps->id.ICNTL(24),NULL);CHKERRQ(ierr); 1322 if (mumps->id.ICNTL(24)) { 1323 mumps->id.ICNTL(13) = 1; /* turn-off ScaLAPACK to help with the correct detection of null pivots */ 1324 } 1325 1326 ierr = PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): compute a solution of a deficient matrix and a null space basis","None",mumps->id.ICNTL(25),&mumps->id.ICNTL(25),NULL);CHKERRQ(ierr); 1327 ierr = PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): drives the solution phase if a Schur complement matrix","None",mumps->id.ICNTL(26),&mumps->id.ICNTL(26),NULL);CHKERRQ(ierr); 1328 ierr = PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): the blocking size for multiple right-hand sides","None",mumps->id.ICNTL(27),&mumps->id.ICNTL(27),NULL);CHKERRQ(ierr); 1329 ierr = PetscOptionsInt("-mat_mumps_icntl_28","ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering","None",mumps->id.ICNTL(28),&mumps->id.ICNTL(28),NULL);CHKERRQ(ierr); 1330 ierr = PetscOptionsInt("-mat_mumps_icntl_29","ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis","None",mumps->id.ICNTL(29),&mumps->id.ICNTL(29),NULL);CHKERRQ(ierr); 1331 ierr = PetscOptionsInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL);CHKERRQ(ierr); 1332 ierr = PetscOptionsInt("-mat_mumps_icntl_31","ICNTL(31): indicates which factors may be discarded during factorization","None",mumps->id.ICNTL(31),&mumps->id.ICNTL(31),NULL);CHKERRQ(ierr); 1333 /* ierr = PetscOptionsInt("-mat_mumps_icntl_32","ICNTL(32): performs the forward elemination of the right-hand sides during factorization","None",mumps->id.ICNTL(32),&mumps->id.ICNTL(32),NULL);CHKERRQ(ierr); -- not supported by PETSc API */ 1334 ierr = PetscOptionsInt("-mat_mumps_icntl_33","ICNTL(33): compute determinant","None",mumps->id.ICNTL(33),&mumps->id.ICNTL(33),NULL);CHKERRQ(ierr); 1335 1336 ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",mumps->id.CNTL(1),&mumps->id.CNTL(1),NULL);CHKERRQ(ierr); 1337 ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",mumps->id.CNTL(2),&mumps->id.CNTL(2),NULL);CHKERRQ(ierr); 1338 ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",mumps->id.CNTL(3),&mumps->id.CNTL(3),NULL);CHKERRQ(ierr); 1339 ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",mumps->id.CNTL(4),&mumps->id.CNTL(4),NULL);CHKERRQ(ierr); 1340 ierr = PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",mumps->id.CNTL(5),&mumps->id.CNTL(5),NULL);CHKERRQ(ierr); 1341 1342 ierr = PetscOptionsString("-mat_mumps_ooc_tmpdir", "out of core directory", "None", mumps->id.ooc_tmpdir, mumps->id.ooc_tmpdir, 256, NULL);CHKERRQ(ierr); 1343 1344 ierr = PetscOptionsIntArray("-mat_mumps_view_info","request INFO local to each processor","",info,&ninfo,NULL);CHKERRQ(ierr); 1345 if (ninfo) { 1346 if (ninfo > 40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"number of INFO %d must <= 40\n",ninfo); 1347 ierr = PetscMalloc1(ninfo,&mumps->info);CHKERRQ(ierr); 1348 mumps->ninfo = ninfo; 1349 for (i=0; i<ninfo; i++) { 1350 if (info[i] < 0 || info[i]>40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"index of INFO %d must between 1 and 40\n",ninfo); 1351 else mumps->info[i] = info[i]; 1352 } 1353 } 1354 1355 ierr = PetscOptionsEnd();CHKERRQ(ierr); 1356 PetscFunctionReturn(0); 1357 } 1358 1359 #undef __FUNCT__ 1360 #define __FUNCT__ "PetscInitializeMUMPS" 1361 PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS *mumps) 1362 { 1363 PetscErrorCode ierr; 1364 1365 PetscFunctionBegin; 1366 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A), &mumps->myid);CHKERRQ(ierr); 1367 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&mumps->size);CHKERRQ(ierr); 1368 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(mumps->comm_mumps));CHKERRQ(ierr); 1369 1370 mumps->id.comm_fortran = MPI_Comm_c2f(mumps->comm_mumps); 1371 1372 mumps->id.job = JOB_INIT; 1373 mumps->id.par = 1; /* host participates factorizaton and solve */ 1374 mumps->id.sym = mumps->sym; 1375 PetscMUMPS_c(&mumps->id); 1376 1377 mumps->scat_rhs = NULL; 1378 mumps->scat_sol = NULL; 1379 1380 /* set PETSc-MUMPS default options - override MUMPS default */ 1381 mumps->id.ICNTL(3) = 0; 1382 mumps->id.ICNTL(4) = 0; 1383 if (mumps->size == 1) { 1384 mumps->id.ICNTL(18) = 0; /* centralized assembled matrix input */ 1385 } else { 1386 mumps->id.ICNTL(18) = 3; /* distributed assembled matrix input */ 1387 mumps->id.ICNTL(20) = 0; /* rhs is in dense format */ 1388 mumps->id.ICNTL(21) = 1; /* distributed solution */ 1389 } 1390 1391 /* schur */ 1392 mumps->id.size_schur = 0; 1393 mumps->id.listvar_schur = NULL; 1394 mumps->id.schur = NULL; 1395 mumps->sizeredrhs = 0; 1396 mumps->schur_pivots = NULL; 1397 mumps->schur_work = NULL; 1398 mumps->schur_sol = NULL; 1399 mumps->schur_sizesol = 0; 1400 mumps->schur_factored = PETSC_FALSE; 1401 mumps->schur_inverted = PETSC_FALSE; 1402 mumps->schur_sym = mumps->id.sym; 1403 PetscFunctionReturn(0); 1404 } 1405 1406 #undef __FUNCT__ 1407 #define __FUNCT__ "MatFactorSymbolic_MUMPS_ReportIfError" 1408 PetscErrorCode MatFactorSymbolic_MUMPS_ReportIfError(Mat F,Mat A,const MatFactorInfo *info,Mat_MUMPS *mumps) 1409 { 1410 PetscErrorCode ierr; 1411 1412 PetscFunctionBegin; 1413 if (mumps->id.INFOG(1) < 0) { 1414 if (A->erroriffailure) { 1415 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1)); 1416 } else { 1417 if (mumps->id.INFOG(1) == -6) { 1418 ierr = PetscInfo2(F,"matrix is singular in structure, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1419 F->factorerrortype = MAT_FACTOR_STRUCT_ZEROPIVOT; 1420 } else if (mumps->id.INFOG(1) == -5 || mumps->id.INFOG(1) == -7) { 1421 ierr = PetscInfo2(F,"problem of workspace, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1422 F->factorerrortype = MAT_FACTOR_OUTMEMORY; 1423 } else { 1424 ierr = PetscInfo2(F,"Error reported by MUMPS in analysis phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));CHKERRQ(ierr); 1425 F->factorerrortype = MAT_FACTOR_OTHER; 1426 } 1427 } 1428 } 1429 PetscFunctionReturn(0); 1430 } 1431 1432 /* Note Petsc r(=c) permutation is used when mumps->id.ICNTL(7)==1 with centralized assembled matrix input; otherwise r and c are ignored */ 1433 #undef __FUNCT__ 1434 #define __FUNCT__ "MatLUFactorSymbolic_AIJMUMPS" 1435 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 1436 { 1437 Mat_MUMPS *mumps = (Mat_MUMPS*)F->data; 1438 PetscErrorCode ierr; 1439 Vec b; 1440 IS is_iden; 1441 const PetscInt M = A->rmap->N; 1442 1443 PetscFunctionBegin; 1444 mumps->matstruc = DIFFERENT_NONZERO_PATTERN; 1445 1446 /* Set MUMPS options from the options database */ 1447 ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr); 1448 1449 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1450 1451 /* analysis phase */ 1452 /*----------------*/ 1453 mumps->id.job = JOB_FACTSYMBOLIC; 1454 mumps->id.n = M; 1455 switch (mumps->id.ICNTL(18)) { 1456 case 0: /* centralized assembled matrix input */ 1457 if (!mumps->myid) { 1458 mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn; 1459 if (mumps->id.ICNTL(6)>1) { 1460 mumps->id.a = (MumpsScalar*)mumps->val; 1461 } 1462 if (mumps->id.ICNTL(7) == 1) { /* use user-provide matrix ordering - assuming r = c ordering */ 1463 /* 1464 PetscBool flag; 1465 ierr = ISEqual(r,c,&flag);CHKERRQ(ierr); 1466 if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"row_perm != col_perm"); 1467 ierr = ISView(r,PETSC_VIEWER_STDOUT_SELF); 1468 */ 1469 if (!mumps->myid) { 1470 const PetscInt *idx; 1471 PetscInt i,*perm_in; 1472 1473 ierr = PetscMalloc1(M,&perm_in);CHKERRQ(ierr); 1474 ierr = ISGetIndices(r,&idx);CHKERRQ(ierr); 1475 1476 mumps->id.perm_in = perm_in; 1477 for (i=0; i<M; i++) perm_in[i] = idx[i]+1; /* perm_in[]: start from 1, not 0! */ 1478 ierr = ISRestoreIndices(r,&idx);CHKERRQ(ierr); 1479 } 1480 } 1481 } 1482 break; 1483 case 3: /* distributed assembled matrix input (size>1) */ 1484 mumps->id.nz_loc = mumps->nz; 1485 mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn; 1486 if (mumps->id.ICNTL(6)>1) { 1487 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1488 } 1489 /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */ 1490 if (!mumps->myid) { 1491 ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->N,&mumps->b_seq);CHKERRQ(ierr); 1492 ierr = ISCreateStride(PETSC_COMM_SELF,A->rmap->N,0,1,&is_iden);CHKERRQ(ierr); 1493 } else { 1494 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr); 1495 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr); 1496 } 1497 ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr); 1498 ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr); 1499 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 1500 ierr = VecDestroy(&b);CHKERRQ(ierr); 1501 break; 1502 } 1503 PetscMUMPS_c(&mumps->id); 1504 ierr = MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);CHKERRQ(ierr); 1505 1506 F->ops->lufactornumeric = MatFactorNumeric_MUMPS; 1507 F->ops->solve = MatSolve_MUMPS; 1508 F->ops->solvetranspose = MatSolveTranspose_MUMPS; 1509 F->ops->matsolve = MatMatSolve_MUMPS; 1510 PetscFunctionReturn(0); 1511 } 1512 1513 /* Note the Petsc r and c permutations are ignored */ 1514 #undef __FUNCT__ 1515 #define __FUNCT__ "MatLUFactorSymbolic_BAIJMUMPS" 1516 PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 1517 { 1518 Mat_MUMPS *mumps = (Mat_MUMPS*)F->data; 1519 PetscErrorCode ierr; 1520 Vec b; 1521 IS is_iden; 1522 const PetscInt M = A->rmap->N; 1523 1524 PetscFunctionBegin; 1525 mumps->matstruc = DIFFERENT_NONZERO_PATTERN; 1526 1527 /* Set MUMPS options from the options database */ 1528 ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr); 1529 1530 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1531 1532 /* analysis phase */ 1533 /*----------------*/ 1534 mumps->id.job = JOB_FACTSYMBOLIC; 1535 mumps->id.n = M; 1536 switch (mumps->id.ICNTL(18)) { 1537 case 0: /* centralized assembled matrix input */ 1538 if (!mumps->myid) { 1539 mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn; 1540 if (mumps->id.ICNTL(6)>1) { 1541 mumps->id.a = (MumpsScalar*)mumps->val; 1542 } 1543 } 1544 break; 1545 case 3: /* distributed assembled matrix input (size>1) */ 1546 mumps->id.nz_loc = mumps->nz; 1547 mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn; 1548 if (mumps->id.ICNTL(6)>1) { 1549 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1550 } 1551 /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */ 1552 if (!mumps->myid) { 1553 ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr); 1554 ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr); 1555 } else { 1556 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr); 1557 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr); 1558 } 1559 ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr); 1560 ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr); 1561 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 1562 ierr = VecDestroy(&b);CHKERRQ(ierr); 1563 break; 1564 } 1565 PetscMUMPS_c(&mumps->id); 1566 ierr = MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);CHKERRQ(ierr); 1567 1568 F->ops->lufactornumeric = MatFactorNumeric_MUMPS; 1569 F->ops->solve = MatSolve_MUMPS; 1570 F->ops->solvetranspose = MatSolveTranspose_MUMPS; 1571 PetscFunctionReturn(0); 1572 } 1573 1574 /* Note the Petsc r permutation and factor info are ignored */ 1575 #undef __FUNCT__ 1576 #define __FUNCT__ "MatCholeskyFactorSymbolic_MUMPS" 1577 PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info) 1578 { 1579 Mat_MUMPS *mumps = (Mat_MUMPS*)F->data; 1580 PetscErrorCode ierr; 1581 Vec b; 1582 IS is_iden; 1583 const PetscInt M = A->rmap->N; 1584 1585 PetscFunctionBegin; 1586 mumps->matstruc = DIFFERENT_NONZERO_PATTERN; 1587 1588 /* Set MUMPS options from the options database */ 1589 ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr); 1590 1591 ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr); 1592 1593 /* analysis phase */ 1594 /*----------------*/ 1595 mumps->id.job = JOB_FACTSYMBOLIC; 1596 mumps->id.n = M; 1597 switch (mumps->id.ICNTL(18)) { 1598 case 0: /* centralized assembled matrix input */ 1599 if (!mumps->myid) { 1600 mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn; 1601 if (mumps->id.ICNTL(6)>1) { 1602 mumps->id.a = (MumpsScalar*)mumps->val; 1603 } 1604 } 1605 break; 1606 case 3: /* distributed assembled matrix input (size>1) */ 1607 mumps->id.nz_loc = mumps->nz; 1608 mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn; 1609 if (mumps->id.ICNTL(6)>1) { 1610 mumps->id.a_loc = (MumpsScalar*)mumps->val; 1611 } 1612 /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */ 1613 if (!mumps->myid) { 1614 ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr); 1615 ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr); 1616 } else { 1617 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr); 1618 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr); 1619 } 1620 ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr); 1621 ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr); 1622 ierr = ISDestroy(&is_iden);CHKERRQ(ierr); 1623 ierr = VecDestroy(&b);CHKERRQ(ierr); 1624 break; 1625 } 1626 PetscMUMPS_c(&mumps->id); 1627 ierr = MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);CHKERRQ(ierr); 1628 1629 F->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS; 1630 F->ops->solve = MatSolve_MUMPS; 1631 F->ops->solvetranspose = MatSolve_MUMPS; 1632 F->ops->matsolve = MatMatSolve_MUMPS; 1633 #if defined(PETSC_USE_COMPLEX) 1634 F->ops->getinertia = NULL; 1635 #else 1636 F->ops->getinertia = MatGetInertia_SBAIJMUMPS; 1637 #endif 1638 PetscFunctionReturn(0); 1639 } 1640 1641 #undef __FUNCT__ 1642 #define __FUNCT__ "MatView_MUMPS" 1643 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer) 1644 { 1645 PetscErrorCode ierr; 1646 PetscBool iascii; 1647 PetscViewerFormat format; 1648 Mat_MUMPS *mumps=(Mat_MUMPS*)A->data; 1649 1650 PetscFunctionBegin; 1651 /* check if matrix is mumps type */ 1652 if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0); 1653 1654 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1655 if (iascii) { 1656 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1657 if (format == PETSC_VIEWER_ASCII_INFO) { 1658 ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr); 1659 ierr = PetscViewerASCIIPrintf(viewer," SYM (matrix type): %d \n",mumps->id.sym);CHKERRQ(ierr); 1660 ierr = PetscViewerASCIIPrintf(viewer," PAR (host participation): %d \n",mumps->id.par);CHKERRQ(ierr); 1661 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(1) (output for error): %d \n",mumps->id.ICNTL(1));CHKERRQ(ierr); 1662 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(2) (output of diagnostic msg): %d \n",mumps->id.ICNTL(2));CHKERRQ(ierr); 1663 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(3) (output for global info): %d \n",mumps->id.ICNTL(3));CHKERRQ(ierr); 1664 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(4) (level of printing): %d \n",mumps->id.ICNTL(4));CHKERRQ(ierr); 1665 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(5) (input mat struct): %d \n",mumps->id.ICNTL(5));CHKERRQ(ierr); 1666 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(6) (matrix prescaling): %d \n",mumps->id.ICNTL(6));CHKERRQ(ierr); 1667 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(7) (sequentia matrix ordering):%d \n",mumps->id.ICNTL(7));CHKERRQ(ierr); 1668 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(8) (scalling strategy): %d \n",mumps->id.ICNTL(8));CHKERRQ(ierr); 1669 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(10) (max num of refinements): %d \n",mumps->id.ICNTL(10));CHKERRQ(ierr); 1670 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(11) (error analysis): %d \n",mumps->id.ICNTL(11));CHKERRQ(ierr); 1671 if (mumps->id.ICNTL(11)>0) { 1672 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(4) (inf norm of input mat): %g\n",mumps->id.RINFOG(4));CHKERRQ(ierr); 1673 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(5) (inf norm of solution): %g\n",mumps->id.RINFOG(5));CHKERRQ(ierr); 1674 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(6) (inf norm of residual): %g\n",mumps->id.RINFOG(6));CHKERRQ(ierr); 1675 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",mumps->id.RINFOG(7),mumps->id.RINFOG(8));CHKERRQ(ierr); 1676 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(9) (error estimate): %g \n",mumps->id.RINFOG(9));CHKERRQ(ierr); 1677 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",mumps->id.RINFOG(10),mumps->id.RINFOG(11));CHKERRQ(ierr); 1678 } 1679 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(12) (efficiency control): %d \n",mumps->id.ICNTL(12));CHKERRQ(ierr); 1680 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(13) (efficiency control): %d \n",mumps->id.ICNTL(13));CHKERRQ(ierr); 1681 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(14) (percentage of estimated workspace increase): %d \n",mumps->id.ICNTL(14));CHKERRQ(ierr); 1682 /* ICNTL(15-17) not used */ 1683 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(18) (input mat struct): %d \n",mumps->id.ICNTL(18));CHKERRQ(ierr); 1684 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(19) (Shur complement info): %d \n",mumps->id.ICNTL(19));CHKERRQ(ierr); 1685 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(20) (rhs sparse pattern): %d \n",mumps->id.ICNTL(20));CHKERRQ(ierr); 1686 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(21) (solution struct): %d \n",mumps->id.ICNTL(21));CHKERRQ(ierr); 1687 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(22) (in-core/out-of-core facility): %d \n",mumps->id.ICNTL(22));CHKERRQ(ierr); 1688 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(23) (max size of memory can be allocated locally):%d \n",mumps->id.ICNTL(23));CHKERRQ(ierr); 1689 1690 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(24) (detection of null pivot rows): %d \n",mumps->id.ICNTL(24));CHKERRQ(ierr); 1691 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(25) (computation of a null space basis): %d \n",mumps->id.ICNTL(25));CHKERRQ(ierr); 1692 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(26) (Schur options for rhs or solution): %d \n",mumps->id.ICNTL(26));CHKERRQ(ierr); 1693 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(27) (experimental parameter): %d \n",mumps->id.ICNTL(27));CHKERRQ(ierr); 1694 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(28) (use parallel or sequential ordering): %d \n",mumps->id.ICNTL(28));CHKERRQ(ierr); 1695 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(29) (parallel ordering): %d \n",mumps->id.ICNTL(29));CHKERRQ(ierr); 1696 1697 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(30) (user-specified set of entries in inv(A)): %d \n",mumps->id.ICNTL(30));CHKERRQ(ierr); 1698 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(31) (factors is discarded in the solve phase): %d \n",mumps->id.ICNTL(31));CHKERRQ(ierr); 1699 ierr = PetscViewerASCIIPrintf(viewer," ICNTL(33) (compute determinant): %d \n",mumps->id.ICNTL(33));CHKERRQ(ierr); 1700 1701 ierr = PetscViewerASCIIPrintf(viewer," CNTL(1) (relative pivoting threshold): %g \n",mumps->id.CNTL(1));CHKERRQ(ierr); 1702 ierr = PetscViewerASCIIPrintf(viewer," CNTL(2) (stopping criterion of refinement): %g \n",mumps->id.CNTL(2));CHKERRQ(ierr); 1703 ierr = PetscViewerASCIIPrintf(viewer," CNTL(3) (absolute pivoting threshold): %g \n",mumps->id.CNTL(3));CHKERRQ(ierr); 1704 ierr = PetscViewerASCIIPrintf(viewer," CNTL(4) (value of static pivoting): %g \n",mumps->id.CNTL(4));CHKERRQ(ierr); 1705 ierr = PetscViewerASCIIPrintf(viewer," CNTL(5) (fixation for null pivots): %g \n",mumps->id.CNTL(5));CHKERRQ(ierr); 1706 1707 /* infomation local to each processor */ 1708 ierr = PetscViewerASCIIPrintf(viewer, " RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr); 1709 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 1710 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(1));CHKERRQ(ierr); 1711 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1712 ierr = PetscViewerASCIIPrintf(viewer, " RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr); 1713 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(2));CHKERRQ(ierr); 1714 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1715 ierr = PetscViewerASCIIPrintf(viewer, " RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr); 1716 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(3));CHKERRQ(ierr); 1717 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1718 1719 ierr = PetscViewerASCIIPrintf(viewer, " INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr); 1720 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(15));CHKERRQ(ierr); 1721 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1722 1723 ierr = PetscViewerASCIIPrintf(viewer, " INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr); 1724 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(16));CHKERRQ(ierr); 1725 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1726 1727 ierr = PetscViewerASCIIPrintf(viewer, " INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr); 1728 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(23));CHKERRQ(ierr); 1729 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1730 1731 if (mumps->ninfo && mumps->ninfo <= 40){ 1732 PetscInt i; 1733 for (i=0; i<mumps->ninfo; i++){ 1734 ierr = PetscViewerASCIIPrintf(viewer, " INFO(%d): \n",mumps->info[i]);CHKERRQ(ierr); 1735 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(mumps->info[i]));CHKERRQ(ierr); 1736 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1737 } 1738 } 1739 1740 1741 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 1742 1743 if (!mumps->myid) { /* information from the host */ 1744 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",mumps->id.RINFOG(1));CHKERRQ(ierr); 1745 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",mumps->id.RINFOG(2));CHKERRQ(ierr); 1746 ierr = PetscViewerASCIIPrintf(viewer," RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",mumps->id.RINFOG(3));CHKERRQ(ierr); 1747 ierr = PetscViewerASCIIPrintf(viewer," (RINFOG(12) RINFOG(13))*2^INFOG(34) (determinant): (%g,%g)*(2^%d)\n",mumps->id.RINFOG(12),mumps->id.RINFOG(13),mumps->id.INFOG(34));CHKERRQ(ierr); 1748 1749 ierr = PetscViewerASCIIPrintf(viewer," INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(3));CHKERRQ(ierr); 1750 ierr = PetscViewerASCIIPrintf(viewer," INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(4));CHKERRQ(ierr); 1751 ierr = PetscViewerASCIIPrintf(viewer," INFOG(5) (estimated maximum front size in the complete tree): %d \n",mumps->id.INFOG(5));CHKERRQ(ierr); 1752 ierr = PetscViewerASCIIPrintf(viewer," INFOG(6) (number of nodes in the complete tree): %d \n",mumps->id.INFOG(6));CHKERRQ(ierr); 1753 ierr = PetscViewerASCIIPrintf(viewer," INFOG(7) (ordering option effectively use after analysis): %d \n",mumps->id.INFOG(7));CHKERRQ(ierr); 1754 ierr = PetscViewerASCIIPrintf(viewer," INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",mumps->id.INFOG(8));CHKERRQ(ierr); 1755 ierr = PetscViewerASCIIPrintf(viewer," INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",mumps->id.INFOG(9));CHKERRQ(ierr); 1756 ierr = PetscViewerASCIIPrintf(viewer," INFOG(10) (total integer space store the matrix factors after factorization): %d \n",mumps->id.INFOG(10));CHKERRQ(ierr); 1757 ierr = PetscViewerASCIIPrintf(viewer," INFOG(11) (order of largest frontal matrix after factorization): %d \n",mumps->id.INFOG(11));CHKERRQ(ierr); 1758 ierr = PetscViewerASCIIPrintf(viewer," INFOG(12) (number of off-diagonal pivots): %d \n",mumps->id.INFOG(12));CHKERRQ(ierr); 1759 ierr = PetscViewerASCIIPrintf(viewer," INFOG(13) (number of delayed pivots after factorization): %d \n",mumps->id.INFOG(13));CHKERRQ(ierr); 1760 ierr = PetscViewerASCIIPrintf(viewer," INFOG(14) (number of memory compress after factorization): %d \n",mumps->id.INFOG(14));CHKERRQ(ierr); 1761 ierr = PetscViewerASCIIPrintf(viewer," INFOG(15) (number of steps of iterative refinement after solution): %d \n",mumps->id.INFOG(15));CHKERRQ(ierr); 1762 ierr = PetscViewerASCIIPrintf(viewer," INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",mumps->id.INFOG(16));CHKERRQ(ierr); 1763 ierr = PetscViewerASCIIPrintf(viewer," INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",mumps->id.INFOG(17));CHKERRQ(ierr); 1764 ierr = PetscViewerASCIIPrintf(viewer," INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",mumps->id.INFOG(18));CHKERRQ(ierr); 1765 ierr = PetscViewerASCIIPrintf(viewer," INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",mumps->id.INFOG(19));CHKERRQ(ierr); 1766 ierr = PetscViewerASCIIPrintf(viewer," INFOG(20) (estimated number of entries in the factors): %d \n",mumps->id.INFOG(20));CHKERRQ(ierr); 1767 ierr = PetscViewerASCIIPrintf(viewer," INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",mumps->id.INFOG(21));CHKERRQ(ierr); 1768 ierr = PetscViewerASCIIPrintf(viewer," INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",mumps->id.INFOG(22));CHKERRQ(ierr); 1769 ierr = PetscViewerASCIIPrintf(viewer," INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",mumps->id.INFOG(23));CHKERRQ(ierr); 1770 ierr = PetscViewerASCIIPrintf(viewer," INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",mumps->id.INFOG(24));CHKERRQ(ierr); 1771 ierr = PetscViewerASCIIPrintf(viewer," INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",mumps->id.INFOG(25));CHKERRQ(ierr); 1772 ierr = PetscViewerASCIIPrintf(viewer," INFOG(28) (after factorization: number of null pivots encountered): %d\n",mumps->id.INFOG(28));CHKERRQ(ierr); 1773 ierr = PetscViewerASCIIPrintf(viewer," INFOG(29) (after factorization: effective number of entries in the factors (sum over all processors)): %d\n",mumps->id.INFOG(29));CHKERRQ(ierr); 1774 ierr = PetscViewerASCIIPrintf(viewer," INFOG(30, 31) (after solution: size in Mbytes of memory used during solution phase): %d, %d\n",mumps->id.INFOG(30),mumps->id.INFOG(31));CHKERRQ(ierr); 1775 ierr = PetscViewerASCIIPrintf(viewer," INFOG(32) (after analysis: type of analysis done): %d\n",mumps->id.INFOG(32));CHKERRQ(ierr); 1776 ierr = PetscViewerASCIIPrintf(viewer," INFOG(33) (value used for ICNTL(8)): %d\n",mumps->id.INFOG(33));CHKERRQ(ierr); 1777 ierr = PetscViewerASCIIPrintf(viewer," INFOG(34) (exponent of the determinant if determinant is requested): %d\n",mumps->id.INFOG(34));CHKERRQ(ierr); 1778 } 1779 } 1780 } 1781 PetscFunctionReturn(0); 1782 } 1783 1784 #undef __FUNCT__ 1785 #define __FUNCT__ "MatGetInfo_MUMPS" 1786 PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info) 1787 { 1788 Mat_MUMPS *mumps =(Mat_MUMPS*)A->data; 1789 1790 PetscFunctionBegin; 1791 info->block_size = 1.0; 1792 info->nz_allocated = mumps->id.INFOG(20); 1793 info->nz_used = mumps->id.INFOG(20); 1794 info->nz_unneeded = 0.0; 1795 info->assemblies = 0.0; 1796 info->mallocs = 0.0; 1797 info->memory = 0.0; 1798 info->fill_ratio_given = 0; 1799 info->fill_ratio_needed = 0; 1800 info->factor_mallocs = 0; 1801 PetscFunctionReturn(0); 1802 } 1803 1804 /* -------------------------------------------------------------------------------------------*/ 1805 #undef __FUNCT__ 1806 #define __FUNCT__ "MatFactorSetSchurIS_MUMPS" 1807 PetscErrorCode MatFactorSetSchurIS_MUMPS(Mat F, IS is) 1808 { 1809 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1810 const PetscInt *idxs; 1811 PetscInt size,i; 1812 PetscErrorCode ierr; 1813 1814 PetscFunctionBegin; 1815 if (mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MUMPS parallel Schur complements not yet supported from PETSc\n"); 1816 ierr = ISGetLocalSize(is,&size);CHKERRQ(ierr); 1817 if (mumps->id.size_schur != size) { 1818 ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr); 1819 mumps->id.size_schur = size; 1820 mumps->id.schur_lld = size; 1821 ierr = PetscMalloc2(size,&mumps->id.listvar_schur,size*size,&mumps->id.schur);CHKERRQ(ierr); 1822 } 1823 ierr = ISGetIndices(is,&idxs);CHKERRQ(ierr); 1824 ierr = PetscMemcpy(mumps->id.listvar_schur,idxs,size*sizeof(PetscInt));CHKERRQ(ierr); 1825 /* MUMPS expects Fortran style indices */ 1826 for (i=0;i<size;i++) mumps->id.listvar_schur[i]++; 1827 ierr = ISRestoreIndices(is,&idxs);CHKERRQ(ierr); 1828 if (size) { /* turn on Schur switch if we the set of indices is not empty */ 1829 if (F->factortype == MAT_FACTOR_LU) { 1830 mumps->id.ICNTL(19) = 3; /* MUMPS returns full matrix */ 1831 } else { 1832 mumps->id.ICNTL(19) = 2; /* MUMPS returns lower triangular part */ 1833 } 1834 /* set a special value of ICNTL (not handled my MUMPS) to be used in the solve phase by PETSc */ 1835 mumps->id.ICNTL(26) = -1; 1836 } 1837 PetscFunctionReturn(0); 1838 } 1839 1840 /* -------------------------------------------------------------------------------------------*/ 1841 #undef __FUNCT__ 1842 #define __FUNCT__ "MatFactorCreateSchurComplement_MUMPS" 1843 PetscErrorCode MatFactorCreateSchurComplement_MUMPS(Mat F,Mat* S) 1844 { 1845 Mat St; 1846 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1847 PetscScalar *array; 1848 #if defined(PETSC_USE_COMPLEX) 1849 PetscScalar im = PetscSqrtScalar((PetscScalar)-1.0); 1850 #endif 1851 PetscErrorCode ierr; 1852 1853 PetscFunctionBegin; 1854 if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatFactorSetSchurIS to enable it"); 1855 else if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatFactorSetSchurIS before"); 1856 1857 ierr = MatCreate(PetscObjectComm((PetscObject)F),&St);CHKERRQ(ierr); 1858 ierr = MatSetSizes(St,PETSC_DECIDE,PETSC_DECIDE,mumps->id.size_schur,mumps->id.size_schur);CHKERRQ(ierr); 1859 ierr = MatSetType(St,MATDENSE);CHKERRQ(ierr); 1860 ierr = MatSetUp(St);CHKERRQ(ierr); 1861 ierr = MatDenseGetArray(St,&array);CHKERRQ(ierr); 1862 if (!mumps->sym) { /* MUMPS always return a full matrix */ 1863 if (mumps->id.ICNTL(19) == 1) { /* stored by rows */ 1864 PetscInt i,j,N=mumps->id.size_schur; 1865 for (i=0;i<N;i++) { 1866 for (j=0;j<N;j++) { 1867 #if !defined(PETSC_USE_COMPLEX) 1868 PetscScalar val = mumps->id.schur[i*N+j]; 1869 #else 1870 PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i; 1871 #endif 1872 array[j*N+i] = val; 1873 } 1874 } 1875 } else { /* stored by columns */ 1876 ierr = PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));CHKERRQ(ierr); 1877 } 1878 } else { /* either full or lower-triangular (not packed) */ 1879 if (mumps->id.ICNTL(19) == 2) { /* lower triangular stored by columns */ 1880 PetscInt i,j,N=mumps->id.size_schur; 1881 for (i=0;i<N;i++) { 1882 for (j=i;j<N;j++) { 1883 #if !defined(PETSC_USE_COMPLEX) 1884 PetscScalar val = mumps->id.schur[i*N+j]; 1885 #else 1886 PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i; 1887 #endif 1888 array[i*N+j] = val; 1889 array[j*N+i] = val; 1890 } 1891 } 1892 } else if (mumps->id.ICNTL(19) == 3) { /* full matrix */ 1893 ierr = PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));CHKERRQ(ierr); 1894 } else { /* ICNTL(19) == 1 lower triangular stored by rows */ 1895 PetscInt i,j,N=mumps->id.size_schur; 1896 for (i=0;i<N;i++) { 1897 for (j=0;j<i+1;j++) { 1898 #if !defined(PETSC_USE_COMPLEX) 1899 PetscScalar val = mumps->id.schur[i*N+j]; 1900 #else 1901 PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i; 1902 #endif 1903 array[i*N+j] = val; 1904 array[j*N+i] = val; 1905 } 1906 } 1907 } 1908 } 1909 ierr = MatDenseRestoreArray(St,&array);CHKERRQ(ierr); 1910 *S = St; 1911 PetscFunctionReturn(0); 1912 } 1913 1914 /* -------------------------------------------------------------------------------------------*/ 1915 #undef __FUNCT__ 1916 #define __FUNCT__ "MatFactorGetSchurComplement_MUMPS" 1917 PetscErrorCode MatFactorGetSchurComplement_MUMPS(Mat F,Mat* S) 1918 { 1919 Mat St; 1920 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1921 PetscErrorCode ierr; 1922 1923 PetscFunctionBegin; 1924 if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatFactorSetSchurIS to enable it"); 1925 else if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatFactorSetSchurIS before"); 1926 1927 /* It should be the responsibility of the user to handle different ICNTL(19) cases and factorization stages if they want to work with the raw data */ 1928 ierr = MatCreateSeqDense(PetscObjectComm((PetscObject)F),mumps->id.size_schur,mumps->id.size_schur,(PetscScalar*)mumps->id.schur,&St);CHKERRQ(ierr); 1929 *S = St; 1930 PetscFunctionReturn(0); 1931 } 1932 1933 /* -------------------------------------------------------------------------------------------*/ 1934 #undef __FUNCT__ 1935 #define __FUNCT__ "MatFactorFactorizeSchurComplement_MUMPS" 1936 PetscErrorCode MatFactorFactorizeSchurComplement_MUMPS(Mat F) 1937 { 1938 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1939 PetscErrorCode ierr; 1940 1941 PetscFunctionBegin; 1942 if (!mumps->id.ICNTL(19)) { /* do nothing */ 1943 PetscFunctionReturn(0); 1944 } 1945 if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before"); 1946 ierr = MatMumpsFactorSchur_Private(mumps);CHKERRQ(ierr); 1947 PetscFunctionReturn(0); 1948 } 1949 1950 #undef __FUNCT__ 1951 #define __FUNCT__ "MatFactorInvertSchurComplement_MUMPS" 1952 PetscErrorCode MatFactorInvertSchurComplement_MUMPS(Mat F) 1953 { 1954 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1955 PetscErrorCode ierr; 1956 1957 PetscFunctionBegin; 1958 if (!mumps->id.ICNTL(19)) { /* do nothing */ 1959 PetscFunctionReturn(0); 1960 } 1961 if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatFactorSetSchurIS before"); 1962 ierr = MatMumpsInvertSchur_Private(mumps);CHKERRQ(ierr); 1963 PetscFunctionReturn(0); 1964 } 1965 1966 /* -------------------------------------------------------------------------------------------*/ 1967 #undef __FUNCT__ 1968 #define __FUNCT__ "MatFactorSolveSchurComplement_MUMPS" 1969 PetscErrorCode MatFactorSolveSchurComplement_MUMPS(Mat F, Vec rhs, Vec sol) 1970 { 1971 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 1972 MumpsScalar *orhs; 1973 PetscScalar *osol,*nrhs,*nsol; 1974 PetscInt orhs_size,osol_size,olrhs_size; 1975 PetscErrorCode ierr; 1976 1977 PetscFunctionBegin; 1978 if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatFactorSetSchurIS to enable it"); 1979 if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatFactorSetSchurIS before"); 1980 1981 /* swap pointers */ 1982 orhs = mumps->id.redrhs; 1983 olrhs_size = mumps->id.lredrhs; 1984 orhs_size = mumps->sizeredrhs; 1985 osol = mumps->schur_sol; 1986 osol_size = mumps->schur_sizesol; 1987 ierr = VecGetArray(rhs,&nrhs);CHKERRQ(ierr); 1988 ierr = VecGetArray(sol,&nsol);CHKERRQ(ierr); 1989 mumps->id.redrhs = (MumpsScalar*)nrhs; 1990 ierr = VecGetLocalSize(rhs,&mumps->sizeredrhs);CHKERRQ(ierr); 1991 mumps->id.lredrhs = mumps->sizeredrhs; 1992 mumps->schur_sol = nsol; 1993 ierr = VecGetLocalSize(sol,&mumps->schur_sizesol);CHKERRQ(ierr); 1994 1995 /* solve Schur complement */ 1996 mumps->id.nrhs = 1; 1997 ierr = MatMumpsSolveSchur_Private(mumps,PETSC_FALSE);CHKERRQ(ierr); 1998 /* restore pointers */ 1999 ierr = VecRestoreArray(rhs,&nrhs);CHKERRQ(ierr); 2000 ierr = VecRestoreArray(sol,&nsol);CHKERRQ(ierr); 2001 mumps->id.redrhs = orhs; 2002 mumps->id.lredrhs = olrhs_size; 2003 mumps->sizeredrhs = orhs_size; 2004 mumps->schur_sol = osol; 2005 mumps->schur_sizesol = osol_size; 2006 PetscFunctionReturn(0); 2007 } 2008 2009 /* -------------------------------------------------------------------------------------------*/ 2010 #undef __FUNCT__ 2011 #define __FUNCT__ "MatFactorSolveSchurComplementTranspose_MUMPS" 2012 PetscErrorCode MatFactorSolveSchurComplementTranspose_MUMPS(Mat F, Vec rhs, Vec sol) 2013 { 2014 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2015 MumpsScalar *orhs; 2016 PetscScalar *osol,*nrhs,*nsol; 2017 PetscInt orhs_size,osol_size; 2018 PetscErrorCode ierr; 2019 2020 PetscFunctionBegin; 2021 if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatFactorSetSchurIS to enable it"); 2022 else if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatFactorSetSchurIS before"); 2023 2024 /* swap pointers */ 2025 orhs = mumps->id.redrhs; 2026 orhs_size = mumps->sizeredrhs; 2027 osol = mumps->schur_sol; 2028 osol_size = mumps->schur_sizesol; 2029 ierr = VecGetArray(rhs,&nrhs);CHKERRQ(ierr); 2030 ierr = VecGetArray(sol,&nsol);CHKERRQ(ierr); 2031 mumps->id.redrhs = (MumpsScalar*)nrhs; 2032 ierr = VecGetLocalSize(rhs,&mumps->sizeredrhs);CHKERRQ(ierr); 2033 mumps->schur_sol = nsol; 2034 ierr = VecGetLocalSize(sol,&mumps->schur_sizesol);CHKERRQ(ierr); 2035 2036 /* solve Schur complement */ 2037 mumps->id.nrhs = 1; 2038 mumps->id.ICNTL(9) = 0; 2039 ierr = MatMumpsSolveSchur_Private(mumps,PETSC_FALSE);CHKERRQ(ierr); 2040 mumps->id.ICNTL(9) = 1; 2041 /* restore pointers */ 2042 ierr = VecRestoreArray(rhs,&nrhs);CHKERRQ(ierr); 2043 ierr = VecRestoreArray(sol,&nsol);CHKERRQ(ierr); 2044 mumps->id.redrhs = orhs; 2045 mumps->sizeredrhs = orhs_size; 2046 mumps->schur_sol = osol; 2047 mumps->schur_sizesol = osol_size; 2048 PetscFunctionReturn(0); 2049 } 2050 2051 /* -------------------------------------------------------------------------------------------*/ 2052 #undef __FUNCT__ 2053 #define __FUNCT__ "MatFactorSetSchurComplementSolverType_MUMPS" 2054 PetscErrorCode MatFactorSetSchurComplementSolverType_MUMPS(Mat F, PetscInt sym) 2055 { 2056 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2057 2058 PetscFunctionBegin; 2059 if (mumps->schur_factored && mumps->sym != mumps->schur_sym) { 2060 SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONG,"Cannot change the Schur solver! Schur complement data has been already factored"); 2061 } 2062 mumps->schur_sym = sym; 2063 PetscFunctionReturn(0); 2064 } 2065 2066 /* -------------------------------------------------------------------------------------------*/ 2067 #undef __FUNCT__ 2068 #define __FUNCT__ "MatMumpsSetIcntl_MUMPS" 2069 PetscErrorCode MatMumpsSetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt ival) 2070 { 2071 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2072 2073 PetscFunctionBegin; 2074 mumps->id.ICNTL(icntl) = ival; 2075 PetscFunctionReturn(0); 2076 } 2077 2078 #undef __FUNCT__ 2079 #define __FUNCT__ "MatMumpsGetIcntl_MUMPS" 2080 PetscErrorCode MatMumpsGetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt *ival) 2081 { 2082 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2083 2084 PetscFunctionBegin; 2085 *ival = mumps->id.ICNTL(icntl); 2086 PetscFunctionReturn(0); 2087 } 2088 2089 #undef __FUNCT__ 2090 #define __FUNCT__ "MatMumpsSetIcntl" 2091 /*@ 2092 MatMumpsSetIcntl - Set MUMPS parameter ICNTL() 2093 2094 Logically Collective on Mat 2095 2096 Input Parameters: 2097 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2098 . icntl - index of MUMPS parameter array ICNTL() 2099 - ival - value of MUMPS ICNTL(icntl) 2100 2101 Options Database: 2102 . -mat_mumps_icntl_<icntl> <ival> 2103 2104 Level: beginner 2105 2106 References: 2107 . MUMPS Users' Guide 2108 2109 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2110 @*/ 2111 PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt icntl,PetscInt ival) 2112 { 2113 PetscErrorCode ierr; 2114 2115 PetscFunctionBegin; 2116 PetscValidType(F,1); 2117 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2118 PetscValidLogicalCollectiveInt(F,icntl,2); 2119 PetscValidLogicalCollectiveInt(F,ival,3); 2120 ierr = PetscTryMethod(F,"MatMumpsSetIcntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));CHKERRQ(ierr); 2121 PetscFunctionReturn(0); 2122 } 2123 2124 #undef __FUNCT__ 2125 #define __FUNCT__ "MatMumpsGetIcntl" 2126 /*@ 2127 MatMumpsGetIcntl - Get MUMPS parameter ICNTL() 2128 2129 Logically Collective on Mat 2130 2131 Input Parameters: 2132 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2133 - icntl - index of MUMPS parameter array ICNTL() 2134 2135 Output Parameter: 2136 . ival - value of MUMPS ICNTL(icntl) 2137 2138 Level: beginner 2139 2140 References: 2141 . MUMPS Users' Guide 2142 2143 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2144 @*/ 2145 PetscErrorCode MatMumpsGetIcntl(Mat F,PetscInt icntl,PetscInt *ival) 2146 { 2147 PetscErrorCode ierr; 2148 2149 PetscFunctionBegin; 2150 PetscValidType(F,1); 2151 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2152 PetscValidLogicalCollectiveInt(F,icntl,2); 2153 PetscValidIntPointer(ival,3); 2154 ierr = PetscUseMethod(F,"MatMumpsGetIcntl_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr); 2155 PetscFunctionReturn(0); 2156 } 2157 2158 /* -------------------------------------------------------------------------------------------*/ 2159 #undef __FUNCT__ 2160 #define __FUNCT__ "MatMumpsSetCntl_MUMPS" 2161 PetscErrorCode MatMumpsSetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal val) 2162 { 2163 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2164 2165 PetscFunctionBegin; 2166 mumps->id.CNTL(icntl) = val; 2167 PetscFunctionReturn(0); 2168 } 2169 2170 #undef __FUNCT__ 2171 #define __FUNCT__ "MatMumpsGetCntl_MUMPS" 2172 PetscErrorCode MatMumpsGetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal *val) 2173 { 2174 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2175 2176 PetscFunctionBegin; 2177 *val = mumps->id.CNTL(icntl); 2178 PetscFunctionReturn(0); 2179 } 2180 2181 #undef __FUNCT__ 2182 #define __FUNCT__ "MatMumpsSetCntl" 2183 /*@ 2184 MatMumpsSetCntl - Set MUMPS parameter CNTL() 2185 2186 Logically Collective on Mat 2187 2188 Input Parameters: 2189 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2190 . icntl - index of MUMPS parameter array CNTL() 2191 - val - value of MUMPS CNTL(icntl) 2192 2193 Options Database: 2194 . -mat_mumps_cntl_<icntl> <val> 2195 2196 Level: beginner 2197 2198 References: 2199 . MUMPS Users' Guide 2200 2201 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2202 @*/ 2203 PetscErrorCode MatMumpsSetCntl(Mat F,PetscInt icntl,PetscReal val) 2204 { 2205 PetscErrorCode ierr; 2206 2207 PetscFunctionBegin; 2208 PetscValidType(F,1); 2209 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2210 PetscValidLogicalCollectiveInt(F,icntl,2); 2211 PetscValidLogicalCollectiveReal(F,val,3); 2212 ierr = PetscTryMethod(F,"MatMumpsSetCntl_C",(Mat,PetscInt,PetscReal),(F,icntl,val));CHKERRQ(ierr); 2213 PetscFunctionReturn(0); 2214 } 2215 2216 #undef __FUNCT__ 2217 #define __FUNCT__ "MatMumpsGetCntl" 2218 /*@ 2219 MatMumpsGetCntl - Get MUMPS parameter CNTL() 2220 2221 Logically Collective on Mat 2222 2223 Input Parameters: 2224 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2225 - icntl - index of MUMPS parameter array CNTL() 2226 2227 Output Parameter: 2228 . val - value of MUMPS CNTL(icntl) 2229 2230 Level: beginner 2231 2232 References: 2233 . MUMPS Users' Guide 2234 2235 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2236 @*/ 2237 PetscErrorCode MatMumpsGetCntl(Mat F,PetscInt icntl,PetscReal *val) 2238 { 2239 PetscErrorCode ierr; 2240 2241 PetscFunctionBegin; 2242 PetscValidType(F,1); 2243 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2244 PetscValidLogicalCollectiveInt(F,icntl,2); 2245 PetscValidRealPointer(val,3); 2246 ierr = PetscUseMethod(F,"MatMumpsGetCntl_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr); 2247 PetscFunctionReturn(0); 2248 } 2249 2250 #undef __FUNCT__ 2251 #define __FUNCT__ "MatMumpsGetInfo_MUMPS" 2252 PetscErrorCode MatMumpsGetInfo_MUMPS(Mat F,PetscInt icntl,PetscInt *info) 2253 { 2254 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2255 2256 PetscFunctionBegin; 2257 *info = mumps->id.INFO(icntl); 2258 PetscFunctionReturn(0); 2259 } 2260 2261 #undef __FUNCT__ 2262 #define __FUNCT__ "MatMumpsGetInfog_MUMPS" 2263 PetscErrorCode MatMumpsGetInfog_MUMPS(Mat F,PetscInt icntl,PetscInt *infog) 2264 { 2265 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2266 2267 PetscFunctionBegin; 2268 *infog = mumps->id.INFOG(icntl); 2269 PetscFunctionReturn(0); 2270 } 2271 2272 #undef __FUNCT__ 2273 #define __FUNCT__ "MatMumpsGetRinfo_MUMPS" 2274 PetscErrorCode MatMumpsGetRinfo_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfo) 2275 { 2276 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2277 2278 PetscFunctionBegin; 2279 *rinfo = mumps->id.RINFO(icntl); 2280 PetscFunctionReturn(0); 2281 } 2282 2283 #undef __FUNCT__ 2284 #define __FUNCT__ "MatMumpsGetRinfog_MUMPS" 2285 PetscErrorCode MatMumpsGetRinfog_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfog) 2286 { 2287 Mat_MUMPS *mumps =(Mat_MUMPS*)F->data; 2288 2289 PetscFunctionBegin; 2290 *rinfog = mumps->id.RINFOG(icntl); 2291 PetscFunctionReturn(0); 2292 } 2293 2294 #undef __FUNCT__ 2295 #define __FUNCT__ "MatMumpsGetInfo" 2296 /*@ 2297 MatMumpsGetInfo - Get MUMPS parameter INFO() 2298 2299 Logically Collective on Mat 2300 2301 Input Parameters: 2302 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2303 - icntl - index of MUMPS parameter array INFO() 2304 2305 Output Parameter: 2306 . ival - value of MUMPS INFO(icntl) 2307 2308 Level: beginner 2309 2310 References: 2311 . MUMPS Users' Guide 2312 2313 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2314 @*/ 2315 PetscErrorCode MatMumpsGetInfo(Mat F,PetscInt icntl,PetscInt *ival) 2316 { 2317 PetscErrorCode ierr; 2318 2319 PetscFunctionBegin; 2320 PetscValidType(F,1); 2321 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2322 PetscValidIntPointer(ival,3); 2323 ierr = PetscUseMethod(F,"MatMumpsGetInfo_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr); 2324 PetscFunctionReturn(0); 2325 } 2326 2327 #undef __FUNCT__ 2328 #define __FUNCT__ "MatMumpsGetInfog" 2329 /*@ 2330 MatMumpsGetInfog - Get MUMPS parameter INFOG() 2331 2332 Logically Collective on Mat 2333 2334 Input Parameters: 2335 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2336 - icntl - index of MUMPS parameter array INFOG() 2337 2338 Output Parameter: 2339 . ival - value of MUMPS INFOG(icntl) 2340 2341 Level: beginner 2342 2343 References: 2344 . MUMPS Users' Guide 2345 2346 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2347 @*/ 2348 PetscErrorCode MatMumpsGetInfog(Mat F,PetscInt icntl,PetscInt *ival) 2349 { 2350 PetscErrorCode ierr; 2351 2352 PetscFunctionBegin; 2353 PetscValidType(F,1); 2354 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2355 PetscValidIntPointer(ival,3); 2356 ierr = PetscUseMethod(F,"MatMumpsGetInfog_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr); 2357 PetscFunctionReturn(0); 2358 } 2359 2360 #undef __FUNCT__ 2361 #define __FUNCT__ "MatMumpsGetRinfo" 2362 /*@ 2363 MatMumpsGetRinfo - Get MUMPS parameter RINFO() 2364 2365 Logically Collective on Mat 2366 2367 Input Parameters: 2368 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2369 - icntl - index of MUMPS parameter array RINFO() 2370 2371 Output Parameter: 2372 . val - value of MUMPS RINFO(icntl) 2373 2374 Level: beginner 2375 2376 References: 2377 . MUMPS Users' Guide 2378 2379 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2380 @*/ 2381 PetscErrorCode MatMumpsGetRinfo(Mat F,PetscInt icntl,PetscReal *val) 2382 { 2383 PetscErrorCode ierr; 2384 2385 PetscFunctionBegin; 2386 PetscValidType(F,1); 2387 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2388 PetscValidRealPointer(val,3); 2389 ierr = PetscUseMethod(F,"MatMumpsGetRinfo_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr); 2390 PetscFunctionReturn(0); 2391 } 2392 2393 #undef __FUNCT__ 2394 #define __FUNCT__ "MatMumpsGetRinfog" 2395 /*@ 2396 MatMumpsGetRinfog - Get MUMPS parameter RINFOG() 2397 2398 Logically Collective on Mat 2399 2400 Input Parameters: 2401 + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface 2402 - icntl - index of MUMPS parameter array RINFOG() 2403 2404 Output Parameter: 2405 . val - value of MUMPS RINFOG(icntl) 2406 2407 Level: beginner 2408 2409 References: 2410 . MUMPS Users' Guide 2411 2412 .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog() 2413 @*/ 2414 PetscErrorCode MatMumpsGetRinfog(Mat F,PetscInt icntl,PetscReal *val) 2415 { 2416 PetscErrorCode ierr; 2417 2418 PetscFunctionBegin; 2419 PetscValidType(F,1); 2420 if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix"); 2421 PetscValidRealPointer(val,3); 2422 ierr = PetscUseMethod(F,"MatMumpsGetRinfog_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr); 2423 PetscFunctionReturn(0); 2424 } 2425 2426 /*MC 2427 MATSOLVERMUMPS - A matrix type providing direct solvers (LU and Cholesky) for 2428 distributed and sequential matrices via the external package MUMPS. 2429 2430 Works with MATAIJ and MATSBAIJ matrices 2431 2432 Use ./configure --download-mumps --download-scalapack --download-parmetis --download-metis --download-ptscotch to have PETSc installed with MUMPS 2433 2434 Use -pc_type cholesky or lu -pc_factor_mat_solver_package mumps to us this direct solver 2435 2436 Options Database Keys: 2437 + -mat_mumps_icntl_1 - ICNTL(1): output stream for error messages 2438 . -mat_mumps_icntl_2 - ICNTL(2): output stream for diagnostic printing, statistics, and warning 2439 . -mat_mumps_icntl_3 - ICNTL(3): output stream for global information, collected on the host 2440 . -mat_mumps_icntl_4 - ICNTL(4): level of printing (0 to 4) 2441 . -mat_mumps_icntl_6 - ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7) 2442 . -mat_mumps_icntl_7 - ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis 2443 . -mat_mumps_icntl_8 - ICNTL(8): scaling strategy (-2 to 8 or 77) 2444 . -mat_mumps_icntl_10 - ICNTL(10): max num of refinements 2445 . -mat_mumps_icntl_11 - ICNTL(11): statistics related to an error analysis (via -ksp_view) 2446 . -mat_mumps_icntl_12 - ICNTL(12): an ordering strategy for symmetric matrices (0 to 3) 2447 . -mat_mumps_icntl_13 - ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting 2448 . -mat_mumps_icntl_14 - ICNTL(14): percentage increase in the estimated working space 2449 . -mat_mumps_icntl_19 - ICNTL(19): computes the Schur complement 2450 . -mat_mumps_icntl_22 - ICNTL(22): in-core/out-of-core factorization and solve (0 or 1) 2451 . -mat_mumps_icntl_23 - ICNTL(23): max size of the working memory (MB) that can allocate per processor 2452 . -mat_mumps_icntl_24 - ICNTL(24): detection of null pivot rows (0 or 1) 2453 . -mat_mumps_icntl_25 - ICNTL(25): compute a solution of a deficient matrix and a null space basis 2454 . -mat_mumps_icntl_26 - ICNTL(26): drives the solution phase if a Schur complement matrix 2455 . -mat_mumps_icntl_28 - ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering 2456 . -mat_mumps_icntl_29 - ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis 2457 . -mat_mumps_icntl_30 - ICNTL(30): compute user-specified set of entries in inv(A) 2458 . -mat_mumps_icntl_31 - ICNTL(31): indicates which factors may be discarded during factorization 2459 . -mat_mumps_icntl_33 - ICNTL(33): compute determinant 2460 . -mat_mumps_cntl_1 - CNTL(1): relative pivoting threshold 2461 . -mat_mumps_cntl_2 - CNTL(2): stopping criterion of refinement 2462 . -mat_mumps_cntl_3 - CNTL(3): absolute pivoting threshold 2463 . -mat_mumps_cntl_4 - CNTL(4): value for static pivoting 2464 - -mat_mumps_cntl_5 - CNTL(5): fixation for null pivots 2465 2466 Level: beginner 2467 2468 Notes: When a MUMPS factorization fails inside a KSP solve, for example with a KSP_DIVERGED_PCSETUP_FAILED, one can find the MUMPS information about the failure by calling 2469 $ KSPGetPC(ksp,&pc); 2470 $ PCFactorGetMatrix(pc,&mat); 2471 $ MatMumpsGetInfo(mat,....); 2472 $ MatMumpsGetInfog(mat,....); etc. 2473 Or you can run with -ksp_error_if_not_converged and the program will be stopped and the information printed in the error message. 2474 2475 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage, MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog(), KSPGetPC(), PCGetFactor(), PCFactorGetMatrix() 2476 2477 M*/ 2478 2479 #undef __FUNCT__ 2480 #define __FUNCT__ "MatFactorGetSolverPackage_mumps" 2481 static PetscErrorCode MatFactorGetSolverPackage_mumps(Mat A,const MatSolverPackage *type) 2482 { 2483 PetscFunctionBegin; 2484 *type = MATSOLVERMUMPS; 2485 PetscFunctionReturn(0); 2486 } 2487 2488 /* MatGetFactor for Seq and MPI AIJ matrices */ 2489 #undef __FUNCT__ 2490 #define __FUNCT__ "MatGetFactor_aij_mumps" 2491 static PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F) 2492 { 2493 Mat B; 2494 PetscErrorCode ierr; 2495 Mat_MUMPS *mumps; 2496 PetscBool isSeqAIJ; 2497 2498 PetscFunctionBegin; 2499 /* Create the factorization matrix */ 2500 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr); 2501 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 2502 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 2503 ierr = PetscStrallocpy("mumps",&((PetscObject)B)->type_name);CHKERRQ(ierr); 2504 ierr = MatSetUp(B);CHKERRQ(ierr); 2505 2506 ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr); 2507 2508 B->ops->view = MatView_MUMPS; 2509 B->ops->getinfo = MatGetInfo_MUMPS; 2510 2511 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr); 2512 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);CHKERRQ(ierr); 2513 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorInvertSchurComplement_C",MatFactorInvertSchurComplement_MUMPS);CHKERRQ(ierr); 2514 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);CHKERRQ(ierr); 2515 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSchurComplement_C",MatFactorGetSchurComplement_MUMPS);CHKERRQ(ierr); 2516 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSolveSchurComplement_C",MatFactorSolveSchurComplement_MUMPS);CHKERRQ(ierr); 2517 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSolveSchurComplementTranspose_C",MatFactorSolveSchurComplementTranspose_MUMPS);CHKERRQ(ierr); 2518 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorFactorizeSchurComplement_C",MatFactorFactorizeSchurComplement_MUMPS);CHKERRQ(ierr); 2519 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurComplementSolverType_C",MatFactorSetSchurComplementSolverType_MUMPS);CHKERRQ(ierr); 2520 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr); 2521 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr); 2522 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr); 2523 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr); 2524 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr); 2525 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr); 2526 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr); 2527 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr); 2528 2529 if (ftype == MAT_FACTOR_LU) { 2530 B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS; 2531 B->factortype = MAT_FACTOR_LU; 2532 if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij; 2533 else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij; 2534 mumps->sym = 0; 2535 } else { 2536 B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS; 2537 B->factortype = MAT_FACTOR_CHOLESKY; 2538 if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij; 2539 else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij; 2540 #if defined(PETSC_USE_COMPLEX) 2541 mumps->sym = 2; 2542 #else 2543 if (A->spd_set && A->spd) mumps->sym = 1; 2544 else mumps->sym = 2; 2545 #endif 2546 } 2547 2548 /* set solvertype */ 2549 ierr = PetscFree(B->solvertype);CHKERRQ(ierr); 2550 ierr = PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);CHKERRQ(ierr); 2551 2552 mumps->isAIJ = PETSC_TRUE; 2553 B->ops->destroy = MatDestroy_MUMPS; 2554 B->data = (void*)mumps; 2555 2556 ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr); 2557 2558 *F = B; 2559 PetscFunctionReturn(0); 2560 } 2561 2562 /* MatGetFactor for Seq and MPI SBAIJ matrices */ 2563 #undef __FUNCT__ 2564 #define __FUNCT__ "MatGetFactor_sbaij_mumps" 2565 static PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F) 2566 { 2567 Mat B; 2568 PetscErrorCode ierr; 2569 Mat_MUMPS *mumps; 2570 PetscBool isSeqSBAIJ; 2571 2572 PetscFunctionBegin; 2573 if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix"); 2574 if (A->rmap->bs > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead"); 2575 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr); 2576 /* Create the factorization matrix */ 2577 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 2578 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 2579 ierr = PetscStrallocpy("mumps",&((PetscObject)B)->type_name);CHKERRQ(ierr); 2580 ierr = MatSetUp(B);CHKERRQ(ierr); 2581 2582 ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr); 2583 if (isSeqSBAIJ) { 2584 mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij; 2585 } else { 2586 mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij; 2587 } 2588 2589 B->ops->getinfo = MatGetInfo_External; 2590 B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS; 2591 B->ops->view = MatView_MUMPS; 2592 2593 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr); 2594 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);CHKERRQ(ierr); 2595 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorInvertSchurComplement_C",MatFactorInvertSchurComplement_MUMPS);CHKERRQ(ierr); 2596 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);CHKERRQ(ierr); 2597 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSchurComplement_C",MatFactorGetSchurComplement_MUMPS);CHKERRQ(ierr); 2598 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSolveSchurComplement_C",MatFactorSolveSchurComplement_MUMPS);CHKERRQ(ierr); 2599 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSolveSchurComplementTranspose_C",MatFactorSolveSchurComplementTranspose_MUMPS);CHKERRQ(ierr); 2600 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorFactorizeSchurComplement_C",MatFactorFactorizeSchurComplement_MUMPS);CHKERRQ(ierr); 2601 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurComplementSolverType_C",MatFactorSetSchurComplementSolverType_MUMPS);CHKERRQ(ierr); 2602 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr); 2603 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr); 2604 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr); 2605 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr); 2606 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr); 2607 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr); 2608 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr); 2609 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr); 2610 2611 B->factortype = MAT_FACTOR_CHOLESKY; 2612 #if defined(PETSC_USE_COMPLEX) 2613 mumps->sym = 2; 2614 #else 2615 if (A->spd_set && A->spd) mumps->sym = 1; 2616 else mumps->sym = 2; 2617 #endif 2618 2619 /* set solvertype */ 2620 ierr = PetscFree(B->solvertype);CHKERRQ(ierr); 2621 ierr = PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);CHKERRQ(ierr); 2622 2623 mumps->isAIJ = PETSC_FALSE; 2624 B->ops->destroy = MatDestroy_MUMPS; 2625 B->data = (void*)mumps; 2626 2627 ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr); 2628 2629 *F = B; 2630 PetscFunctionReturn(0); 2631 } 2632 2633 #undef __FUNCT__ 2634 #define __FUNCT__ "MatGetFactor_baij_mumps" 2635 static PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F) 2636 { 2637 Mat B; 2638 PetscErrorCode ierr; 2639 Mat_MUMPS *mumps; 2640 PetscBool isSeqBAIJ; 2641 2642 PetscFunctionBegin; 2643 /* Create the factorization matrix */ 2644 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);CHKERRQ(ierr); 2645 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 2646 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 2647 ierr = PetscStrallocpy("mumps",&((PetscObject)B)->type_name);CHKERRQ(ierr); 2648 ierr = MatSetUp(B);CHKERRQ(ierr); 2649 2650 ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr); 2651 if (ftype == MAT_FACTOR_LU) { 2652 B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS; 2653 B->factortype = MAT_FACTOR_LU; 2654 if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij; 2655 else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij; 2656 mumps->sym = 0; 2657 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n"); 2658 2659 B->ops->getinfo = MatGetInfo_External; 2660 B->ops->view = MatView_MUMPS; 2661 2662 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr); 2663 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);CHKERRQ(ierr); 2664 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorInvertSchurComplement_C",MatFactorInvertSchurComplement_MUMPS);CHKERRQ(ierr); 2665 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);CHKERRQ(ierr); 2666 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSchurComplement_C",MatFactorGetSchurComplement_MUMPS);CHKERRQ(ierr); 2667 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSolveSchurComplement_C",MatFactorSolveSchurComplement_MUMPS);CHKERRQ(ierr); 2668 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSolveSchurComplementTranspose_C",MatFactorSolveSchurComplementTranspose_MUMPS);CHKERRQ(ierr); 2669 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorFactorizeSchurComplement_C",MatFactorFactorizeSchurComplement_MUMPS);CHKERRQ(ierr); 2670 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurComplementSolverType_C",MatFactorSetSchurComplementSolverType_MUMPS);CHKERRQ(ierr); 2671 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr); 2672 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr); 2673 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr); 2674 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr); 2675 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr); 2676 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr); 2677 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr); 2678 ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr); 2679 2680 /* set solvertype */ 2681 ierr = PetscFree(B->solvertype);CHKERRQ(ierr); 2682 ierr = PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);CHKERRQ(ierr); 2683 2684 mumps->isAIJ = PETSC_TRUE; 2685 B->ops->destroy = MatDestroy_MUMPS; 2686 B->data = (void*)mumps; 2687 2688 ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr); 2689 2690 *F = B; 2691 PetscFunctionReturn(0); 2692 } 2693 2694 #undef __FUNCT__ 2695 #define __FUNCT__ "MatSolverPackageRegister_MUMPS" 2696 PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_MUMPS(void) 2697 { 2698 PetscErrorCode ierr; 2699 2700 PetscFunctionBegin; 2701 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr); 2702 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr); 2703 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr); 2704 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr); 2705 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPISBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr); 2706 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr); 2707 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr); 2708 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr); 2709 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr); 2710 ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQSBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr); 2711 PetscFunctionReturn(0); 2712 } 2713 2714