1 2 /* 3 Provides an interface to the SuperLU_DIST_2.2 sparse solver 4 */ 5 6 #include <../src/mat/impls/aij/seq/aij.h> 7 #include <../src/mat/impls/aij/mpi/mpiaij.h> 8 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */ 9 #include <stdlib.h> 10 #endif 11 12 #if defined(PETSC_USE_64BIT_INDICES) 13 /* ugly SuperLU_Dist variable telling it to use long long int */ 14 #define _LONGINT 15 #endif 16 17 EXTERN_C_BEGIN 18 #if defined(PETSC_USE_COMPLEX) 19 #include <superlu_zdefs.h> 20 #else 21 #include <superlu_ddefs.h> 22 #endif 23 EXTERN_C_END 24 25 /* 26 GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized 27 DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator. 28 */ 29 typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode; 30 const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0}; 31 32 typedef struct { 33 int_t nprow,npcol,*row,*col; 34 gridinfo_t grid; 35 superlu_options_t options; 36 SuperMatrix A_sup; 37 ScalePermstruct_t ScalePermstruct; 38 LUstruct_t LUstruct; 39 int StatPrint; 40 SuperLU_MatInputMode MatInputMode; 41 SOLVEstruct_t SOLVEstruct; 42 fact_t FactPattern; 43 MPI_Comm comm_superlu; 44 #if defined(PETSC_USE_COMPLEX) 45 doublecomplex *val; 46 #else 47 double *val; 48 #endif 49 PetscBool matsolve_iscalled,matmatsolve_iscalled; 50 PetscBool CleanUpSuperLU_Dist; /* Flag to clean up (non-global) SuperLU objects during Destroy */ 51 } Mat_SuperLU_DIST; 52 53 extern PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat,PetscViewer); 54 extern PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat,Mat,const MatFactorInfo*); 55 extern PetscErrorCode MatDestroy_SuperLU_DIST(Mat); 56 extern PetscErrorCode MatView_SuperLU_DIST(Mat,PetscViewer); 57 extern PetscErrorCode MatSolve_SuperLU_DIST(Mat,Vec,Vec); 58 extern PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat,Mat,IS,IS,const MatFactorInfo*); 59 extern PetscErrorCode MatDestroy_MPIAIJ(Mat); 60 61 #undef __FUNCT__ 62 #define __FUNCT__ "MatDestroy_SuperLU_DIST" 63 PetscErrorCode MatDestroy_SuperLU_DIST(Mat A) 64 { 65 PetscErrorCode ierr; 66 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 67 PetscBool flg; 68 69 PetscFunctionBegin; 70 if (lu && lu->CleanUpSuperLU_Dist) { 71 /* Deallocate SuperLU_DIST storage */ 72 if (lu->MatInputMode == GLOBAL) { 73 PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup)); 74 } else { 75 PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup)); 76 if (lu->options.SolveInitialized) { 77 #if defined(PETSC_USE_COMPLEX) 78 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 79 #else 80 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 81 #endif 82 } 83 } 84 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct)); 85 PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct)); 86 PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct)); 87 88 /* Release the SuperLU_DIST process grid. */ 89 PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid)); 90 ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr); 91 } 92 ierr = PetscFree(A->spptr);CHKERRQ(ierr); 93 94 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 95 if (flg) { 96 ierr = MatDestroy_SeqAIJ(A);CHKERRQ(ierr); 97 } else { 98 ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr); 99 } 100 PetscFunctionReturn(0); 101 } 102 103 #undef __FUNCT__ 104 #define __FUNCT__ "MatSolve_SuperLU_DIST" 105 PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x) 106 { 107 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 108 PetscErrorCode ierr; 109 PetscMPIInt size; 110 PetscInt m=A->rmap->n,M=A->rmap->N,N=A->cmap->N; 111 SuperLUStat_t stat; 112 double berr[1]; 113 PetscScalar *bptr; 114 PetscInt nrhs=1; 115 Vec x_seq; 116 IS iden; 117 VecScatter scat; 118 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 119 static PetscBool cite = PETSC_FALSE; 120 121 PetscFunctionBegin; 122 ierr = PetscCitationsRegister("@article{lidemmel03,\n author = {Xiaoye S. Li and James W. Demmel},\n title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n Solver for Unsymmetric Linear Systems},\n journal = {ACM Trans. Mathematical Software},\n volume = {29},\n number = {2},\n pages = {110-140},\n year = 2003\n}\n",&cite);CHKERRQ(ierr); 123 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 124 if (size > 1 && lu->MatInputMode == GLOBAL) { 125 /* global mat input, convert b to x_seq */ 126 ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr); 127 ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr); 128 ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr); 129 ierr = ISDestroy(&iden);CHKERRQ(ierr); 130 131 ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 132 ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 133 ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr); 134 } else { /* size==1 || distributed mat input */ 135 if (lu->options.SolveInitialized && !lu->matsolve_iscalled) { 136 /* see comments in MatMatSolve() */ 137 #if defined(PETSC_USE_COMPLEX) 138 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 139 #else 140 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 141 #endif 142 lu->options.SolveInitialized = NO; 143 } 144 ierr = VecCopy(b_mpi,x);CHKERRQ(ierr); 145 ierr = VecGetArray(x,&bptr);CHKERRQ(ierr); 146 } 147 148 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 149 150 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 151 if (lu->MatInputMode == GLOBAL) { 152 #if defined(PETSC_USE_COMPLEX) 153 PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info)); 154 #else 155 PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info)); 156 #endif 157 } else { /* distributed mat input */ 158 #if defined(PETSC_USE_COMPLEX) 159 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 160 #else 161 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 162 #endif 163 } 164 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 165 166 if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 167 PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat)); 168 169 if (size > 1 && lu->MatInputMode == GLOBAL) { 170 /* convert seq x to mpi x */ 171 ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr); 172 ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 173 ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 174 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 175 ierr = VecDestroy(&x_seq);CHKERRQ(ierr); 176 } else { 177 ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr); 178 179 lu->matsolve_iscalled = PETSC_TRUE; 180 lu->matmatsolve_iscalled = PETSC_FALSE; 181 } 182 PetscFunctionReturn(0); 183 } 184 185 #undef __FUNCT__ 186 #define __FUNCT__ "MatMatSolve_SuperLU_DIST" 187 PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X) 188 { 189 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 190 PetscErrorCode ierr; 191 PetscMPIInt size; 192 PetscInt M=A->rmap->N,m=A->rmap->n,nrhs; 193 SuperLUStat_t stat; 194 double berr[1]; 195 PetscScalar *bptr; 196 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 197 PetscBool flg; 198 199 PetscFunctionBegin; 200 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 201 ierr = PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 202 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix"); 203 ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 204 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix"); 205 206 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 207 if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size); 208 /* size==1 or distributed mat input */ 209 if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) { 210 /* communication pattern of SOLVEstruct is unlikely created for matmatsolve, 211 thus destroy it and create a new SOLVEstruct. 212 Otherwise it may result in memory corruption or incorrect solution 213 See src/mat/examples/tests/ex125.c */ 214 #if defined(PETSC_USE_COMPLEX) 215 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 216 #else 217 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 218 #endif 219 lu->options.SolveInitialized = NO; 220 } 221 ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 222 223 ierr = MatGetSize(B_mpi,NULL,&nrhs);CHKERRQ(ierr); 224 225 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 226 ierr = MatDenseGetArray(X,&bptr);CHKERRQ(ierr); 227 if (lu->MatInputMode == GLOBAL) { /* size == 1 */ 228 #if defined(PETSC_USE_COMPLEX) 229 PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info)); 230 #else 231 PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info)); 232 #endif 233 } else { /* distributed mat input */ 234 #if defined(PETSC_USE_COMPLEX) 235 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 236 #else 237 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 238 #endif 239 } 240 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 241 ierr = MatDenseRestoreArray(X,&bptr);CHKERRQ(ierr); 242 243 if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 244 PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat)); 245 lu->matsolve_iscalled = PETSC_FALSE; 246 lu->matmatsolve_iscalled = PETSC_TRUE; 247 PetscFunctionReturn(0); 248 } 249 250 251 #undef __FUNCT__ 252 #define __FUNCT__ "MatLUFactorNumeric_SuperLU_DIST" 253 PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info) 254 { 255 Mat *tseq,A_seq = NULL; 256 Mat_SeqAIJ *aa,*bb; 257 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->spptr; 258 PetscErrorCode ierr; 259 PetscInt M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray, 260 m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj; 261 int sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */ 262 PetscMPIInt size; 263 SuperLUStat_t stat; 264 double *berr=0; 265 IS isrow; 266 Mat F_diag=NULL; 267 #if defined(PETSC_USE_COMPLEX) 268 doublecomplex *av, *bv; 269 #else 270 double *av, *bv; 271 #endif 272 273 PetscFunctionBegin; 274 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 275 276 if (lu->MatInputMode == GLOBAL) { /* global mat input */ 277 if (size > 1) { /* convert mpi A to seq mat A */ 278 ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr); 279 ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr); 280 ierr = ISDestroy(&isrow);CHKERRQ(ierr); 281 282 A_seq = *tseq; 283 ierr = PetscFree(tseq);CHKERRQ(ierr); 284 aa = (Mat_SeqAIJ*)A_seq->data; 285 } else { 286 PetscBool flg; 287 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 288 if (flg) { 289 Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data; 290 A = At->A; 291 } 292 aa = (Mat_SeqAIJ*)A->data; 293 } 294 295 /* Convert Petsc NR matrix to SuperLU_DIST NC. 296 Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */ 297 if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */ 298 PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup)); 299 if (lu->FactPattern == SamePattern_SameRowPerm) { 300 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 301 } else { /* lu->FactPattern == SamePattern */ 302 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); 303 lu->options.Fact = SamePattern; 304 } 305 } 306 #if defined(PETSC_USE_COMPLEX) 307 PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val,&lu->col, &lu->row)); 308 #else 309 PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val, &lu->col, &lu->row)); 310 #endif 311 312 /* Create compressed column matrix A_sup. */ 313 #if defined(PETSC_USE_COMPLEX) 314 PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE)); 315 #else 316 PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE)); 317 #endif 318 } else { /* distributed mat input */ 319 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 320 aa=(Mat_SeqAIJ*)(mat->A)->data; 321 bb=(Mat_SeqAIJ*)(mat->B)->data; 322 ai=aa->i; aj=aa->j; 323 bi=bb->i; bj=bb->j; 324 #if defined(PETSC_USE_COMPLEX) 325 av=(doublecomplex*)aa->a; 326 bv=(doublecomplex*)bb->a; 327 #else 328 av=aa->a; 329 bv=bb->a; 330 #endif 331 rstart = A->rmap->rstart; 332 nz = aa->nz + bb->nz; 333 garray = mat->garray; 334 335 if (lu->options.Fact == DOFACT) { /* first numeric factorization */ 336 #if defined(PETSC_USE_COMPLEX) 337 PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row)); 338 #else 339 PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row)); 340 #endif 341 } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */ 342 /* Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); */ /* this leads to crash! However, see SuperLU_DIST_2.5/EXAMPLE/pzdrive2.c */ 343 if (lu->FactPattern == SamePattern_SameRowPerm) { 344 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 345 } else { 346 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate storage associated with the L and U matrices. */ 347 lu->options.Fact = SamePattern; 348 } 349 } 350 nz = 0; 351 for (i=0; i<m; i++) { 352 lu->row[i] = nz; 353 countA = ai[i+1] - ai[i]; 354 countB = bi[i+1] - bi[i]; 355 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 356 bjj = bj + bi[i]; 357 358 /* B part, smaller col index */ 359 colA_start = rstart + ajj[0]; /* the smallest global col index of A */ 360 jB = 0; 361 for (j=0; j<countB; j++) { 362 jcol = garray[bjj[j]]; 363 if (jcol > colA_start) { 364 jB = j; 365 break; 366 } 367 lu->col[nz] = jcol; 368 lu->val[nz++] = *bv++; 369 if (j==countB-1) jB = countB; 370 } 371 372 /* A part */ 373 for (j=0; j<countA; j++) { 374 lu->col[nz] = rstart + ajj[j]; 375 lu->val[nz++] = *av++; 376 } 377 378 /* B part, larger col index */ 379 for (j=jB; j<countB; j++) { 380 lu->col[nz] = garray[bjj[j]]; 381 lu->val[nz++] = *bv++; 382 } 383 } 384 lu->row[m] = nz; 385 #if defined(PETSC_USE_COMPLEX) 386 PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE)); 387 #else 388 PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE)); 389 #endif 390 } 391 392 /* Factor the matrix. */ 393 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 394 if (lu->MatInputMode == GLOBAL) { /* global mat input */ 395 #if defined(PETSC_USE_COMPLEX) 396 PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo)); 397 #else 398 PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo)); 399 #endif 400 } else { /* distributed mat input */ 401 #if defined(PETSC_USE_COMPLEX) 402 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); 403 if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pzgssvx fails, info: %d\n",sinfo); 404 #else 405 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); 406 if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",sinfo); 407 #endif 408 } 409 410 if (lu->MatInputMode == GLOBAL && size > 1) { 411 ierr = MatDestroy(&A_seq);CHKERRQ(ierr); 412 } 413 414 if (lu->options.PrintStat) { 415 PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 416 } 417 PStatFree(&stat); 418 if (size > 1) { 419 F_diag = ((Mat_MPIAIJ*)(F)->data)->A; 420 F_diag->assembled = PETSC_TRUE; 421 } 422 (F)->assembled = PETSC_TRUE; 423 (F)->preallocated = PETSC_TRUE; 424 lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */ 425 PetscFunctionReturn(0); 426 } 427 428 /* Note the Petsc r and c permutations are ignored */ 429 #undef __FUNCT__ 430 #define __FUNCT__ "MatLUFactorSymbolic_SuperLU_DIST" 431 PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 432 { 433 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->spptr; 434 PetscInt M = A->rmap->N,N=A->cmap->N; 435 436 PetscFunctionBegin; 437 /* Initialize the SuperLU process grid. */ 438 PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid)); 439 440 /* Initialize ScalePermstruct and LUstruct. */ 441 PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct)); 442 PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(M, N, &lu->LUstruct)); 443 F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST; 444 F->ops->solve = MatSolve_SuperLU_DIST; 445 F->ops->matsolve = MatMatSolve_SuperLU_DIST; 446 lu->CleanUpSuperLU_Dist = PETSC_TRUE; 447 PetscFunctionReturn(0); 448 } 449 450 #undef __FUNCT__ 451 #define __FUNCT__ "MatFactorGetSolverPackage_aij_superlu_dist" 452 PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type) 453 { 454 PetscFunctionBegin; 455 *type = MATSOLVERSUPERLU_DIST; 456 PetscFunctionReturn(0); 457 } 458 459 #undef __FUNCT__ 460 #define __FUNCT__ "MatGetFactor_aij_superlu_dist" 461 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 462 { 463 Mat B; 464 Mat_SuperLU_DIST *lu; 465 PetscErrorCode ierr; 466 PetscInt M=A->rmap->N,N=A->cmap->N,indx; 467 PetscMPIInt size; 468 superlu_options_t options; 469 PetscBool flg; 470 const char *colperm[] = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"}; 471 const char *rowperm[] = {"LargeDiag","NATURAL"}; 472 const char *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"}; 473 PetscBool set; 474 475 PetscFunctionBegin; 476 /* Create the factorization matrix */ 477 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 478 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);CHKERRQ(ierr); 479 ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 480 ierr = MatSeqAIJSetPreallocation(B,0,NULL); 481 ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr); 482 483 B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST; 484 B->ops->view = MatView_SuperLU_DIST; 485 B->ops->destroy = MatDestroy_SuperLU_DIST; 486 487 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_aij_superlu_dist);CHKERRQ(ierr); 488 489 B->factortype = MAT_FACTOR_LU; 490 491 ierr = PetscNewLog(B,&lu);CHKERRQ(ierr); 492 B->spptr = lu; 493 494 /* Set the default input options: 495 options.Fact = DOFACT; 496 options.Equil = YES; 497 options.ParSymbFact = NO; 498 options.ColPerm = METIS_AT_PLUS_A; 499 options.RowPerm = LargeDiag; 500 options.ReplaceTinyPivot = YES; 501 options.IterRefine = DOUBLE; 502 options.Trans = NOTRANS; 503 options.SolveInitialized = NO; -hold the communication pattern used MatSolve() and MatMatSolve() 504 options.RefineInitialized = NO; 505 options.PrintStat = YES; 506 */ 507 set_default_options_dist(&options); 508 509 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));CHKERRQ(ierr); 510 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 511 /* Default num of process columns and rows */ 512 lu->npcol = (int_t) (0.5 + PetscSqrtReal((PetscReal)size)); 513 if (!lu->npcol) lu->npcol = 1; 514 while (lu->npcol > 0) { 515 lu->nprow = (int_t) (size/lu->npcol); 516 if (size == lu->nprow * lu->npcol) break; 517 lu->npcol--; 518 } 519 520 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr); 521 ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,(PetscInt*)&lu->nprow,NULL);CHKERRQ(ierr); 522 ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,(PetscInt*)&lu->npcol,NULL);CHKERRQ(ierr); 523 if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol); 524 525 lu->MatInputMode = DISTRIBUTED; 526 527 ierr = PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,NULL);CHKERRQ(ierr); 528 if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL; 529 530 ierr = PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",options.Equil ? PETSC_TRUE : PETSC_FALSE,&flg,&set);CHKERRQ(ierr); 531 if (set && !flg) options.Equil = NO; 532 533 ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);CHKERRQ(ierr); 534 if (flg) { 535 switch (indx) { 536 case 0: 537 options.RowPerm = LargeDiag; 538 break; 539 case 1: 540 options.RowPerm = NOROWPERM; 541 break; 542 } 543 } 544 545 ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);CHKERRQ(ierr); 546 if (flg) { 547 switch (indx) { 548 case 0: 549 options.ColPerm = NATURAL; 550 break; 551 case 1: 552 options.ColPerm = MMD_AT_PLUS_A; 553 break; 554 case 2: 555 options.ColPerm = MMD_ATA; 556 break; 557 case 3: 558 options.ColPerm = METIS_AT_PLUS_A; 559 break; 560 case 4: 561 options.ColPerm = PARMETIS; /* only works for np>1 */ 562 break; 563 default: 564 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 565 } 566 } 567 568 ierr = PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",options.ReplaceTinyPivot ? PETSC_TRUE : PETSC_FALSE,&flg,&set);CHKERRQ(ierr); 569 if (set && !flg) options.ReplaceTinyPivot = NO; 570 571 options.ParSymbFact = NO; 572 573 ierr = PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,&set);CHKERRQ(ierr); 574 if (set && flg) { 575 #if defined(PETSC_HAVE_PARMETIS) 576 options.ParSymbFact = YES; 577 options.ColPerm = PARMETIS; /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */ 578 #else 579 printf("parsymbfact needs PARMETIS"); 580 #endif 581 } 582 583 lu->FactPattern = SamePattern_SameRowPerm; 584 585 ierr = PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[1],&indx,&flg);CHKERRQ(ierr); 586 if (flg) { 587 switch (indx) { 588 case 0: 589 lu->FactPattern = SamePattern; 590 break; 591 case 1: 592 lu->FactPattern = SamePattern_SameRowPerm; 593 break; 594 } 595 } 596 597 options.IterRefine = NOREFINE; 598 ierr = PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",options.IterRefine == NOREFINE ? PETSC_FALSE : PETSC_TRUE ,&flg,&set);CHKERRQ(ierr); 599 if (set) { 600 if (flg) options.IterRefine = SLU_DOUBLE; 601 else options.IterRefine = NOREFINE; 602 } 603 604 if (PetscLogPrintInfo) options.PrintStat = YES; 605 else options.PrintStat = NO; 606 ierr = PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",(PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,NULL);CHKERRQ(ierr); 607 PetscOptionsEnd(); 608 609 lu->options = options; 610 lu->options.Fact = DOFACT; 611 lu->matsolve_iscalled = PETSC_FALSE; 612 lu->matmatsolve_iscalled = PETSC_FALSE; 613 614 *F = B; 615 PetscFunctionReturn(0); 616 } 617 618 #undef __FUNCT__ 619 #define __FUNCT__ "MatSolverPackageRegister_SuperLU_DIST" 620 PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_SuperLU_DIST(void) 621 { 622 PetscErrorCode ierr; 623 PetscFunctionBegin; 624 ierr = MatSolverPackageRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ, MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);CHKERRQ(ierr); 625 ierr = MatSolverPackageRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ, MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);CHKERRQ(ierr); 626 PetscFunctionReturn(0); 627 } 628 629 #undef __FUNCT__ 630 #define __FUNCT__ "MatFactorInfo_SuperLU_DIST" 631 PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer) 632 { 633 Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)A->spptr; 634 superlu_options_t options; 635 PetscErrorCode ierr; 636 637 PetscFunctionBegin; 638 /* check if matrix is superlu_dist type */ 639 if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(0); 640 641 options = lu->options; 642 ierr = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr); 643 ierr = PetscViewerASCIIPrintf(viewer," Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 644 ierr = PetscViewerASCIIPrintf(viewer," Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);CHKERRQ(ierr); 645 ierr = PetscViewerASCIIPrintf(viewer," Matrix input mode %d \n",lu->MatInputMode);CHKERRQ(ierr); 646 ierr = PetscViewerASCIIPrintf(viewer," Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);CHKERRQ(ierr); 647 ierr = PetscViewerASCIIPrintf(viewer," Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);CHKERRQ(ierr); 648 ierr = PetscViewerASCIIPrintf(viewer," Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 649 ierr = PetscViewerASCIIPrintf(viewer," Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL" : "LargeDiag");CHKERRQ(ierr); 650 651 switch (options.ColPerm) { 652 case NATURAL: 653 ierr = PetscViewerASCIIPrintf(viewer," Column permutation NATURAL\n");CHKERRQ(ierr); 654 break; 655 case MMD_AT_PLUS_A: 656 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_AT_PLUS_A\n");CHKERRQ(ierr); 657 break; 658 case MMD_ATA: 659 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_ATA\n");CHKERRQ(ierr); 660 break; 661 case METIS_AT_PLUS_A: 662 ierr = PetscViewerASCIIPrintf(viewer," Column permutation METIS_AT_PLUS_A\n");CHKERRQ(ierr); 663 break; 664 case PARMETIS: 665 ierr = PetscViewerASCIIPrintf(viewer," Column permutation PARMETIS\n");CHKERRQ(ierr); 666 break; 667 default: 668 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 669 } 670 671 ierr = PetscViewerASCIIPrintf(viewer," Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);CHKERRQ(ierr); 672 673 if (lu->FactPattern == SamePattern) { 674 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern\n");CHKERRQ(ierr); 675 } else { 676 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern_SameRowPerm\n");CHKERRQ(ierr); 677 } 678 PetscFunctionReturn(0); 679 } 680 681 #undef __FUNCT__ 682 #define __FUNCT__ "MatView_SuperLU_DIST" 683 PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer) 684 { 685 PetscErrorCode ierr; 686 PetscBool iascii; 687 PetscViewerFormat format; 688 689 PetscFunctionBegin; 690 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 691 if (iascii) { 692 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 693 if (format == PETSC_VIEWER_ASCII_INFO) { 694 ierr = MatFactorInfo_SuperLU_DIST(A,viewer);CHKERRQ(ierr); 695 } 696 } 697 PetscFunctionReturn(0); 698 } 699 700 701 /*MC 702 MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization 703 704 Works with AIJ matrices 705 706 Options Database Keys: 707 + -mat_superlu_dist_r <n> - number of rows in processor partition 708 . -mat_superlu_dist_c <n> - number of columns in processor partition 709 . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed 710 . -mat_superlu_dist_equil - equilibrate the matrix 711 . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation 712 . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation 713 . -mat_superlu_dist_replacetinypivot - replace tiny pivots 714 . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm 715 . -mat_superlu_dist_iterrefine - use iterative refinement 716 - -mat_superlu_dist_statprint - print factorization information 717 718 Level: beginner 719 720 .seealso: PCLU 721 722 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage 723 724 M*/ 725 726