1 2 /* 3 Provides an interface to the SuperLU_DIST sparse solver 4 */ 5 6 #include <../src/mat/impls/aij/seq/aij.h> 7 #include <../src/mat/impls/aij/mpi/mpiaij.h> 8 9 EXTERN_C_BEGIN 10 #if defined(PETSC_USE_COMPLEX) 11 #include <superlu_zdefs.h> 12 #else 13 #include <superlu_ddefs.h> 14 #endif 15 EXTERN_C_END 16 17 typedef struct { 18 int_t nprow,npcol,*row,*col; 19 gridinfo_t grid; 20 superlu_dist_options_t options; 21 SuperMatrix A_sup; 22 ScalePermstruct_t ScalePermstruct; 23 LUstruct_t LUstruct; 24 int StatPrint; 25 SOLVEstruct_t SOLVEstruct; 26 fact_t FactPattern; 27 MPI_Comm comm_superlu; 28 #if defined(PETSC_USE_COMPLEX) 29 doublecomplex *val; 30 #else 31 double *val; 32 #endif 33 PetscBool matsolve_iscalled,matmatsolve_iscalled; 34 PetscBool CleanUpSuperLU_Dist; /* Flag to clean up (non-global) SuperLU objects during Destroy */ 35 } Mat_SuperLU_DIST; 36 37 38 PetscErrorCode MatSuperluDistGetDiagU_SuperLU_DIST(Mat F,PetscScalar *diagU) 39 { 40 Mat_SuperLU_DIST *lu= (Mat_SuperLU_DIST*)F->data; 41 42 PetscFunctionBegin; 43 #if defined(PETSC_USE_COMPLEX) 44 PetscStackCall("SuperLU_DIST:pzGetDiagU",pzGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,(doublecomplex*)diagU)); 45 #else 46 PetscStackCall("SuperLU_DIST:pdGetDiagU",pdGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,diagU)); 47 #endif 48 PetscFunctionReturn(0); 49 } 50 51 PetscErrorCode MatSuperluDistGetDiagU(Mat F,PetscScalar *diagU) 52 { 53 PetscErrorCode ierr; 54 55 PetscFunctionBegin; 56 PetscValidHeaderSpecific(F,MAT_CLASSID,1); 57 ierr = PetscTryMethod(F,"MatSuperluDistGetDiagU_C",(Mat,PetscScalar*),(F,diagU));CHKERRQ(ierr); 58 PetscFunctionReturn(0); 59 } 60 61 static PetscErrorCode MatDestroy_SuperLU_DIST(Mat A) 62 { 63 PetscErrorCode ierr; 64 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data; 65 66 PetscFunctionBegin; 67 if (lu->CleanUpSuperLU_Dist) { 68 /* Deallocate SuperLU_DIST storage */ 69 PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup)); 70 if (lu->options.SolveInitialized) { 71 #if defined(PETSC_USE_COMPLEX) 72 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 73 #else 74 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 75 #endif 76 } 77 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct)); 78 PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct)); 79 PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct)); 80 81 /* Release the SuperLU_DIST process grid. */ 82 PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid)); 83 ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr); 84 } 85 ierr = PetscFree(A->data);CHKERRQ(ierr); 86 /* clear composed functions */ 87 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);CHKERRQ(ierr); 88 ierr = PetscObjectComposeFunction((PetscObject)A,"MatSuperluDistGetDiagU_C",NULL);CHKERRQ(ierr); 89 90 PetscFunctionReturn(0); 91 } 92 93 static PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x) 94 { 95 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data; 96 PetscErrorCode ierr; 97 PetscMPIInt size; 98 PetscInt m=A->rmap->n; 99 SuperLUStat_t stat; 100 double berr[1]; 101 PetscScalar *bptr=NULL; 102 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 103 static PetscBool cite = PETSC_FALSE; 104 105 PetscFunctionBegin; 106 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 107 ierr = PetscCitationsRegister("@article{lidemmel03,\n author = {Xiaoye S. Li and James W. Demmel},\n title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n Solver for Unsymmetric Linear Systems},\n journal = {ACM Trans. Mathematical Software},\n volume = {29},\n number = {2},\n pages = {110-140},\n year = 2003\n}\n",&cite);CHKERRQ(ierr); 108 109 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 110 111 if (lu->options.SolveInitialized && !lu->matsolve_iscalled) { 112 /* see comments in MatMatSolve() */ 113 #if defined(PETSC_USE_COMPLEX) 114 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 115 #else 116 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 117 #endif 118 lu->options.SolveInitialized = NO; 119 } 120 ierr = VecCopy(b_mpi,x);CHKERRQ(ierr); 121 ierr = VecGetArray(x,&bptr);CHKERRQ(ierr); 122 123 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 124 #if defined(PETSC_USE_COMPLEX) 125 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,1,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 126 #else 127 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,1,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 128 #endif 129 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 130 131 if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 132 PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat)); 133 134 ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr); 135 lu->matsolve_iscalled = PETSC_TRUE; 136 lu->matmatsolve_iscalled = PETSC_FALSE; 137 PetscFunctionReturn(0); 138 } 139 140 static PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X) 141 { 142 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data; 143 PetscErrorCode ierr; 144 PetscMPIInt size; 145 PetscInt m=A->rmap->n,nrhs; 146 SuperLUStat_t stat; 147 double berr[1]; 148 PetscScalar *bptr; 149 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 150 PetscBool flg; 151 152 PetscFunctionBegin; 153 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 154 ierr = PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 155 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix"); 156 ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 157 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix"); 158 159 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 160 161 if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) { 162 /* communication pattern of SOLVEstruct is unlikely created for matmatsolve, 163 thus destroy it and create a new SOLVEstruct. 164 Otherwise it may result in memory corruption or incorrect solution 165 See src/mat/examples/tests/ex125.c */ 166 #if defined(PETSC_USE_COMPLEX) 167 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 168 #else 169 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 170 #endif 171 lu->options.SolveInitialized = NO; 172 } 173 ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 174 175 ierr = MatGetSize(B_mpi,NULL,&nrhs);CHKERRQ(ierr); 176 177 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 178 ierr = MatDenseGetArray(X,&bptr);CHKERRQ(ierr); 179 180 #if defined(PETSC_USE_COMPLEX) 181 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 182 #else 183 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 184 #endif 185 186 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 187 ierr = MatDenseRestoreArray(X,&bptr);CHKERRQ(ierr); 188 189 if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 190 PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat)); 191 lu->matsolve_iscalled = PETSC_FALSE; 192 lu->matmatsolve_iscalled = PETSC_TRUE; 193 PetscFunctionReturn(0); 194 } 195 196 /* 197 input: 198 F: numeric Cholesky factor 199 output: 200 nneg: total number of negative pivots 201 nzero: total number of zero pivots 202 npos: (global dimension of F) - nneg - nzero 203 */ 204 static PetscErrorCode MatGetInertia_SuperLU_DIST(Mat F,PetscInt *nneg,PetscInt *nzero,PetscInt *npos) 205 { 206 PetscErrorCode ierr; 207 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data; 208 PetscScalar *diagU=NULL; 209 PetscInt M,i,neg=0,zero=0,pos=0; 210 PetscReal r; 211 212 PetscFunctionBegin; 213 if (!F->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Matrix factor F is not assembled"); 214 if (lu->options.RowPerm != NOROWPERM) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Must set NOROWPERM"); 215 ierr = MatGetSize(F,&M,NULL);CHKERRQ(ierr); 216 ierr = PetscMalloc1(M,&diagU);CHKERRQ(ierr); 217 ierr = MatSuperluDistGetDiagU(F,diagU);CHKERRQ(ierr); 218 for (i=0; i<M; i++) { 219 #if defined(PETSC_USE_COMPLEX) 220 r = PetscImaginaryPart(diagU[i])/10.0; 221 if (r< -PETSC_MACHINE_EPSILON || r>PETSC_MACHINE_EPSILON) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"diagU[%d]=%g + i %g is non-real",i,PetscRealPart(diagU[i]),r*10.0); 222 r = PetscRealPart(diagU[i]); 223 #else 224 r = diagU[i]; 225 #endif 226 if (r > 0) { 227 pos++; 228 } else if (r < 0) { 229 neg++; 230 } else zero++; 231 } 232 233 ierr = PetscFree(diagU);CHKERRQ(ierr); 234 if (nneg) *nneg = neg; 235 if (nzero) *nzero = zero; 236 if (npos) *npos = pos; 237 PetscFunctionReturn(0); 238 } 239 240 static PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info) 241 { 242 Mat_SeqAIJ *aa=NULL,*bb=NULL; 243 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data; 244 PetscErrorCode ierr; 245 PetscInt M=A->rmap->N,N=A->cmap->N,i,*ai=NULL,*aj=NULL,*bi=NULL,*bj=NULL,nz,rstart,*garray=NULL, 246 m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj=NULL,*ajj=NULL; 247 int sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */ 248 PetscMPIInt size; 249 SuperLUStat_t stat; 250 double *berr=0; 251 #if defined(PETSC_USE_COMPLEX) 252 doublecomplex *av=NULL, *bv=NULL; 253 #else 254 double *av=NULL, *bv=NULL; 255 #endif 256 257 PetscFunctionBegin; 258 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 259 260 if (size == 1) { 261 aa = (Mat_SeqAIJ*)A->data; 262 rstart = 0; 263 nz = aa->nz; 264 } else { 265 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 266 aa = (Mat_SeqAIJ*)(mat->A)->data; 267 bb = (Mat_SeqAIJ*)(mat->B)->data; 268 ai = aa->i; aj = aa->j; 269 bi = bb->i; bj = bb->j; 270 #if defined(PETSC_USE_COMPLEX) 271 av = (doublecomplex*)aa->a; 272 bv = (doublecomplex*)bb->a; 273 #else 274 av =aa->a; 275 bv = bb->a; 276 #endif 277 rstart = A->rmap->rstart; 278 nz = aa->nz + bb->nz; 279 garray = mat->garray; 280 } 281 282 /* Allocations for A_sup */ 283 if (lu->options.Fact == DOFACT) { /* first numeric factorization */ 284 #if defined(PETSC_USE_COMPLEX) 285 PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row)); 286 #else 287 PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row)); 288 #endif 289 } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */ 290 if (lu->FactPattern == SamePattern_SameRowPerm) { 291 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 292 } else if (lu->FactPattern == SamePattern) { 293 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate L and U matrices. */ 294 lu->options.Fact = SamePattern; 295 } else if (lu->FactPattern == DOFACT) { 296 PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup)); 297 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); 298 lu->options.Fact = DOFACT; 299 300 #if defined(PETSC_USE_COMPLEX) 301 PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row)); 302 #else 303 PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row)); 304 #endif 305 } else { 306 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"options.Fact must be one of SamePattern SamePattern_SameRowPerm DOFACT"); 307 } 308 } 309 310 /* Copy AIJ matrix to superlu_dist matrix */ 311 if (size == 1) { /* A_sup has same SeqAIJ format as input mat */ 312 ai = aa->i; aj = aa->j; 313 #if defined(PETSC_USE_COMPLEX) 314 av = (doublecomplex*)aa->a; 315 #else 316 av = aa->a; 317 #endif 318 319 ierr = PetscArraycpy(lu->row,ai,(m+1));CHKERRQ(ierr); 320 ierr = PetscArraycpy(lu->col,aj,nz);CHKERRQ(ierr); 321 ierr = PetscArraycpy(lu->val,av,nz);CHKERRQ(ierr); 322 } else { 323 nz = 0; 324 for (i=0; i<m; i++) { 325 lu->row[i] = nz; 326 countA = ai[i+1] - ai[i]; 327 countB = bi[i+1] - bi[i]; 328 if (aj) { 329 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 330 } else { 331 ajj = NULL; 332 } 333 bjj = bj + bi[i]; 334 335 /* B part, smaller col index */ 336 if (aj) { 337 colA_start = rstart + ajj[0]; /* the smallest global col index of A */ 338 } else { /* superlu_dist does not require matrix has diagonal entries, thus aj=NULL would work */ 339 colA_start = rstart; 340 } 341 jB = 0; 342 for (j=0; j<countB; j++) { 343 jcol = garray[bjj[j]]; 344 if (jcol > colA_start) { 345 jB = j; 346 break; 347 } 348 lu->col[nz] = jcol; 349 lu->val[nz++] = *bv++; 350 if (j==countB-1) jB = countB; 351 } 352 353 /* A part */ 354 for (j=0; j<countA; j++) { 355 lu->col[nz] = rstart + ajj[j]; 356 lu->val[nz++] = *av++; 357 } 358 359 /* B part, larger col index */ 360 for (j=jB; j<countB; j++) { 361 lu->col[nz] = garray[bjj[j]]; 362 lu->val[nz++] = *bv++; 363 } 364 } 365 lu->row[m] = nz; 366 } 367 368 /* Create and setup A_sup */ 369 if (lu->options.Fact == DOFACT) { 370 #if defined(PETSC_USE_COMPLEX) 371 PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE)); 372 #else 373 PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE)); 374 #endif 375 } 376 377 /* Factor the matrix. */ 378 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 379 #if defined(PETSC_USE_COMPLEX) 380 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); 381 #else 382 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); 383 #endif 384 385 if (sinfo > 0) { 386 if (A->erroriffailure) { 387 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot in row %D",sinfo); 388 } else { 389 if (sinfo <= lu->A_sup.ncol) { 390 F->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 391 ierr = PetscInfo1(F,"U(i,i) is exactly zero, i= %D\n",sinfo);CHKERRQ(ierr); 392 } else if (sinfo > lu->A_sup.ncol) { 393 /* 394 number of bytes allocated when memory allocation 395 failure occurred, plus A->ncol. 396 */ 397 F->factorerrortype = MAT_FACTOR_OUTMEMORY; 398 ierr = PetscInfo1(F,"Number of bytes allocated when memory allocation fails %D\n",sinfo);CHKERRQ(ierr); 399 } 400 } 401 } else if (sinfo < 0) { 402 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB, "info = %D, argument in p*gssvx() had an illegal value", sinfo); 403 } 404 405 if (lu->options.PrintStat) { 406 PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 407 } 408 PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat)); 409 F->assembled = PETSC_TRUE; 410 F->preallocated = PETSC_TRUE; 411 lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */ 412 PetscFunctionReturn(0); 413 } 414 415 /* Note the Petsc r and c permutations are ignored */ 416 static PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 417 { 418 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data; 419 PetscInt M = A->rmap->N,N=A->cmap->N; 420 421 PetscFunctionBegin; 422 /* Initialize the SuperLU process grid. */ 423 PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid)); 424 425 /* Initialize ScalePermstruct and LUstruct. */ 426 PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct)); 427 PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(N, &lu->LUstruct)); 428 F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST; 429 F->ops->solve = MatSolve_SuperLU_DIST; 430 F->ops->matsolve = MatMatSolve_SuperLU_DIST; 431 F->ops->getinertia = NULL; 432 433 if (A->symmetric || A->hermitian) { 434 F->ops->getinertia = MatGetInertia_SuperLU_DIST; 435 } 436 lu->CleanUpSuperLU_Dist = PETSC_TRUE; 437 PetscFunctionReturn(0); 438 } 439 440 static PetscErrorCode MatCholeskyFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,const MatFactorInfo *info) 441 { 442 PetscErrorCode ierr; 443 444 PetscFunctionBegin; 445 if (!A->symmetric) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Input matrix must be symmetric\n"); 446 ierr = MatLUFactorSymbolic_SuperLU_DIST(F,A,r,r,info);CHKERRQ(ierr); 447 F->ops->choleskyfactornumeric = MatLUFactorNumeric_SuperLU_DIST; 448 PetscFunctionReturn(0); 449 } 450 451 static PetscErrorCode MatFactorGetSolverType_aij_superlu_dist(Mat A,MatSolverType *type) 452 { 453 PetscFunctionBegin; 454 *type = MATSOLVERSUPERLU_DIST; 455 PetscFunctionReturn(0); 456 } 457 458 static PetscErrorCode MatView_Info_SuperLU_DIST(Mat A,PetscViewer viewer) 459 { 460 Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)A->data; 461 superlu_dist_options_t options; 462 PetscErrorCode ierr; 463 464 PetscFunctionBegin; 465 /* check if matrix is superlu_dist type */ 466 if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(0); 467 468 options = lu->options; 469 ierr = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr); 470 ierr = PetscViewerASCIIPrintf(viewer," Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 471 ierr = PetscViewerASCIIPrintf(viewer," Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);CHKERRQ(ierr); 472 ierr = PetscViewerASCIIPrintf(viewer," Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);CHKERRQ(ierr); 473 ierr = PetscViewerASCIIPrintf(viewer," Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);CHKERRQ(ierr); 474 ierr = PetscViewerASCIIPrintf(viewer," Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 475 476 switch (options.RowPerm) { 477 case NOROWPERM: 478 ierr = PetscViewerASCIIPrintf(viewer," Row permutation NOROWPERM\n");CHKERRQ(ierr); 479 break; 480 case LargeDiag_MC64: 481 ierr = PetscViewerASCIIPrintf(viewer," Row permutation LargeDiag_MC64\n");CHKERRQ(ierr); 482 break; 483 case LargeDiag_AWPM: 484 ierr = PetscViewerASCIIPrintf(viewer," Row permutation LargeDiag_AWPM\n");CHKERRQ(ierr); 485 break; 486 case MY_PERMR: 487 ierr = PetscViewerASCIIPrintf(viewer," Row permutation MY_PERMR\n");CHKERRQ(ierr); 488 break; 489 default: 490 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 491 } 492 493 switch (options.ColPerm) { 494 case NATURAL: 495 ierr = PetscViewerASCIIPrintf(viewer," Column permutation NATURAL\n");CHKERRQ(ierr); 496 break; 497 case MMD_AT_PLUS_A: 498 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_AT_PLUS_A\n");CHKERRQ(ierr); 499 break; 500 case MMD_ATA: 501 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_ATA\n");CHKERRQ(ierr); 502 break; 503 case METIS_AT_PLUS_A: 504 ierr = PetscViewerASCIIPrintf(viewer," Column permutation METIS_AT_PLUS_A\n");CHKERRQ(ierr); 505 break; 506 case PARMETIS: 507 ierr = PetscViewerASCIIPrintf(viewer," Column permutation PARMETIS\n");CHKERRQ(ierr); 508 break; 509 default: 510 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 511 } 512 513 ierr = PetscViewerASCIIPrintf(viewer," Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);CHKERRQ(ierr); 514 515 if (lu->FactPattern == SamePattern) { 516 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern\n");CHKERRQ(ierr); 517 } else if (lu->FactPattern == SamePattern_SameRowPerm) { 518 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern_SameRowPerm\n");CHKERRQ(ierr); 519 } else if (lu->FactPattern == DOFACT) { 520 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization DOFACT\n");CHKERRQ(ierr); 521 } else { 522 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown factorization pattern"); 523 } 524 PetscFunctionReturn(0); 525 } 526 527 static PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer) 528 { 529 PetscErrorCode ierr; 530 PetscBool iascii; 531 PetscViewerFormat format; 532 533 PetscFunctionBegin; 534 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 535 if (iascii) { 536 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 537 if (format == PETSC_VIEWER_ASCII_INFO) { 538 ierr = MatView_Info_SuperLU_DIST(A,viewer);CHKERRQ(ierr); 539 } 540 } 541 PetscFunctionReturn(0); 542 } 543 544 static PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 545 { 546 Mat B; 547 Mat_SuperLU_DIST *lu; 548 PetscErrorCode ierr; 549 PetscInt M=A->rmap->N,N=A->cmap->N,indx; 550 PetscMPIInt size; 551 superlu_dist_options_t options; 552 PetscBool flg; 553 const char *colperm[] = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"}; 554 const char *rowperm[] = {"NOROWPERM","LargeDiag_MC64","LargeDiag_AWPM","MY_PERMR"}; 555 const char *factPattern[] = {"SamePattern","SamePattern_SameRowPerm","DOFACT"}; 556 PetscBool set; 557 558 PetscFunctionBegin; 559 /* Create the factorization matrix */ 560 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 561 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);CHKERRQ(ierr); 562 ierr = PetscStrallocpy("superlu_dist",&((PetscObject)B)->type_name);CHKERRQ(ierr); 563 ierr = MatSetUp(B);CHKERRQ(ierr); 564 B->ops->getinfo = MatGetInfo_External; 565 B->ops->view = MatView_SuperLU_DIST; 566 B->ops->destroy = MatDestroy_SuperLU_DIST; 567 568 if (ftype == MAT_FACTOR_LU) { 569 B->factortype = MAT_FACTOR_LU; 570 B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST; 571 } else { 572 B->factortype = MAT_FACTOR_CHOLESKY; 573 B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SuperLU_DIST; 574 } 575 576 /* set solvertype */ 577 ierr = PetscFree(B->solvertype);CHKERRQ(ierr); 578 ierr = PetscStrallocpy(MATSOLVERSUPERLU_DIST,&B->solvertype);CHKERRQ(ierr); 579 580 ierr = PetscNewLog(B,&lu);CHKERRQ(ierr); 581 B->data = lu; 582 583 /* Set the default input options: 584 options.Fact = DOFACT; 585 options.Equil = YES; 586 options.ParSymbFact = NO; 587 options.ColPerm = METIS_AT_PLUS_A; 588 options.RowPerm = LargeDiag_MC64; 589 options.ReplaceTinyPivot = YES; 590 options.IterRefine = DOUBLE; 591 options.Trans = NOTRANS; 592 options.SolveInitialized = NO; -hold the communication pattern used MatSolve() and MatMatSolve() 593 options.RefineInitialized = NO; 594 options.PrintStat = YES; 595 */ 596 set_default_options_dist(&options); 597 598 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));CHKERRQ(ierr); 599 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 600 /* Default num of process columns and rows */ 601 lu->nprow = (int_t) (0.5 + PetscSqrtReal((PetscReal)size)); 602 if (!lu->nprow) lu->nprow = 1; 603 while (lu->nprow > 0) { 604 lu->npcol = (int_t) (size/lu->nprow); 605 if (size == lu->nprow * lu->npcol) break; 606 lu->nprow--; 607 } 608 609 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr); 610 ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,(PetscInt*)&lu->nprow,NULL);CHKERRQ(ierr); 611 ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,(PetscInt*)&lu->npcol,NULL);CHKERRQ(ierr); 612 if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol); 613 614 ierr = PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",options.Equil ? PETSC_TRUE : PETSC_FALSE,&flg,&set);CHKERRQ(ierr); 615 if (set && !flg) options.Equil = NO; 616 617 ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,4,rowperm[1],&indx,&flg);CHKERRQ(ierr); 618 if (flg) { 619 switch (indx) { 620 case 0: 621 options.RowPerm = NOROWPERM; 622 break; 623 case 1: 624 options.RowPerm = LargeDiag_MC64; 625 break; 626 case 2: 627 options.RowPerm = LargeDiag_AWPM; 628 break; 629 case 3: 630 options.RowPerm = MY_PERMR; 631 break; 632 default: 633 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown row permutation"); 634 } 635 } 636 637 ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);CHKERRQ(ierr); 638 if (flg) { 639 switch (indx) { 640 case 0: 641 options.ColPerm = NATURAL; 642 break; 643 case 1: 644 options.ColPerm = MMD_AT_PLUS_A; 645 break; 646 case 2: 647 options.ColPerm = MMD_ATA; 648 break; 649 case 3: 650 options.ColPerm = METIS_AT_PLUS_A; 651 break; 652 case 4: 653 options.ColPerm = PARMETIS; /* only works for np>1 */ 654 break; 655 default: 656 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 657 } 658 } 659 660 options.ReplaceTinyPivot = NO; 661 ierr = PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",options.ReplaceTinyPivot ? PETSC_TRUE : PETSC_FALSE,&flg,&set);CHKERRQ(ierr); 662 if (set && flg) options.ReplaceTinyPivot = YES; 663 664 options.ParSymbFact = NO; 665 ierr = PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,&set);CHKERRQ(ierr); 666 if (set && flg && size>1) { 667 #if defined(PETSC_HAVE_PARMETIS) 668 options.ParSymbFact = YES; 669 options.ColPerm = PARMETIS; /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */ 670 #else 671 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"parsymbfact needs PARMETIS"); 672 #endif 673 } 674 675 lu->FactPattern = SamePattern; 676 ierr = PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,3,factPattern[0],&indx,&flg);CHKERRQ(ierr); 677 if (flg) { 678 switch (indx) { 679 case 0: 680 lu->FactPattern = SamePattern; 681 break; 682 case 1: 683 lu->FactPattern = SamePattern_SameRowPerm; 684 break; 685 case 2: 686 lu->FactPattern = DOFACT; 687 break; 688 } 689 } 690 691 options.IterRefine = NOREFINE; 692 ierr = PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",options.IterRefine == NOREFINE ? PETSC_FALSE : PETSC_TRUE ,&flg,&set);CHKERRQ(ierr); 693 if (set) { 694 if (flg) options.IterRefine = SLU_DOUBLE; 695 else options.IterRefine = NOREFINE; 696 } 697 698 if (PetscLogPrintInfo) options.PrintStat = YES; 699 else options.PrintStat = NO; 700 ierr = PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",(PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,NULL);CHKERRQ(ierr); 701 ierr = PetscOptionsEnd();CHKERRQ(ierr); 702 703 lu->options = options; 704 lu->options.Fact = DOFACT; 705 lu->matsolve_iscalled = PETSC_FALSE; 706 lu->matmatsolve_iscalled = PETSC_FALSE; 707 708 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_aij_superlu_dist);CHKERRQ(ierr); 709 ierr = PetscObjectComposeFunction((PetscObject)B,"MatSuperluDistGetDiagU_C",MatSuperluDistGetDiagU_SuperLU_DIST);CHKERRQ(ierr); 710 711 *F = B; 712 PetscFunctionReturn(0); 713 } 714 715 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_SuperLU_DIST(void) 716 { 717 PetscErrorCode ierr; 718 PetscFunctionBegin; 719 ierr = MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ, MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);CHKERRQ(ierr); 720 ierr = MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ, MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);CHKERRQ(ierr); 721 ierr = MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ, MAT_FACTOR_CHOLESKY,MatGetFactor_aij_superlu_dist);CHKERRQ(ierr); 722 ierr = MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ, MAT_FACTOR_CHOLESKY,MatGetFactor_aij_superlu_dist);CHKERRQ(ierr); 723 PetscFunctionReturn(0); 724 } 725 726 /*MC 727 MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization 728 729 Use ./configure --download-superlu_dist --download-parmetis --download-metis --download-ptscotch to have PETSc installed with SuperLU_DIST 730 731 Use -pc_type lu -pc_factor_mat_solver_type superlu_dist to use this direct solver 732 733 Works with AIJ matrices 734 735 Options Database Keys: 736 + -mat_superlu_dist_r <n> - number of rows in processor partition 737 . -mat_superlu_dist_c <n> - number of columns in processor partition 738 . -mat_superlu_dist_equil - equilibrate the matrix 739 . -mat_superlu_dist_rowperm <NOROWPERM,LargeDiag_MC64,LargeDiag_AWPM,MY_PERMR> - row permutation 740 . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation 741 . -mat_superlu_dist_replacetinypivot - replace tiny pivots 742 . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm DOFACT 743 . -mat_superlu_dist_iterrefine - use iterative refinement 744 - -mat_superlu_dist_statprint - print factorization information 745 746 Level: beginner 747 748 .seealso: PCLU 749 750 .seealso: PCFactorSetMatSolverType(), MatSolverType 751 752 M*/ 753