1 2 /* 3 Provides an interface to the SuperLU_DIST_2.2 sparse solver 4 */ 5 6 #include <../src/mat/impls/aij/seq/aij.h> 7 #include <../src/mat/impls/aij/mpi/mpiaij.h> 8 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */ 9 #include <stdlib.h> 10 #endif 11 12 #if defined(PETSC_USE_64BIT_INDICES) 13 /* ugly SuperLU_Dist variable telling it to use long long int */ 14 #define _LONGINT 15 #endif 16 17 EXTERN_C_BEGIN 18 #if defined(PETSC_USE_COMPLEX) 19 #include <superlu_zdefs.h> 20 #else 21 #include <superlu_ddefs.h> 22 #endif 23 EXTERN_C_END 24 25 /* 26 GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized 27 DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator. 28 */ 29 typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode; 30 const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0}; 31 32 typedef struct { 33 int_t nprow,npcol,*row,*col; 34 gridinfo_t grid; 35 superlu_dist_options_t options; 36 SuperMatrix A_sup; 37 ScalePermstruct_t ScalePermstruct; 38 LUstruct_t LUstruct; 39 int StatPrint; 40 SuperLU_MatInputMode MatInputMode; 41 SOLVEstruct_t SOLVEstruct; 42 fact_t FactPattern; 43 MPI_Comm comm_superlu; 44 #if defined(PETSC_USE_COMPLEX) 45 doublecomplex *val; 46 #else 47 double *val; 48 #endif 49 PetscBool matsolve_iscalled,matmatsolve_iscalled; 50 PetscBool CleanUpSuperLU_Dist; /* Flag to clean up (non-global) SuperLU objects during Destroy */ 51 } Mat_SuperLU_DIST; 52 53 #undef __FUNCT__ 54 #define __FUNCT__ "MatDestroy_SuperLU_DIST" 55 static PetscErrorCode MatDestroy_SuperLU_DIST(Mat A) 56 { 57 PetscErrorCode ierr; 58 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data; 59 60 PetscFunctionBegin; 61 if (lu->CleanUpSuperLU_Dist) { 62 /* Deallocate SuperLU_DIST storage */ 63 if (lu->MatInputMode == GLOBAL) { 64 PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup)); 65 } else { 66 PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup)); 67 if (lu->options.SolveInitialized) { 68 #if defined(PETSC_USE_COMPLEX) 69 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 70 #else 71 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 72 #endif 73 } 74 } 75 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct)); 76 PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct)); 77 PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct)); 78 79 /* Release the SuperLU_DIST process grid. */ 80 PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid)); 81 ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr); 82 } 83 ierr = PetscFree(A->data);CHKERRQ(ierr); 84 PetscFunctionReturn(0); 85 } 86 87 #undef __FUNCT__ 88 #define __FUNCT__ "MatSolve_SuperLU_DIST" 89 static PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x) 90 { 91 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data; 92 PetscErrorCode ierr; 93 PetscMPIInt size; 94 PetscInt m=A->rmap->n,M=A->rmap->N,N=A->cmap->N; 95 SuperLUStat_t stat; 96 double berr[1]; 97 PetscScalar *bptr; 98 PetscInt nrhs=1; 99 Vec x_seq; 100 IS iden; 101 VecScatter scat; 102 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 103 static PetscBool cite = PETSC_FALSE; 104 105 PetscFunctionBegin; 106 ierr = PetscCitationsRegister("@article{lidemmel03,\n author = {Xiaoye S. Li and James W. Demmel},\n title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n Solver for Unsymmetric Linear Systems},\n journal = {ACM Trans. Mathematical Software},\n volume = {29},\n number = {2},\n pages = {110-140},\n year = 2003\n}\n",&cite);CHKERRQ(ierr); 107 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 108 if (size > 1 && lu->MatInputMode == GLOBAL) { 109 /* global mat input, convert b to x_seq */ 110 ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr); 111 ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr); 112 ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr); 113 ierr = ISDestroy(&iden);CHKERRQ(ierr); 114 115 ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 116 ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 117 ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr); 118 } else { /* size==1 || distributed mat input */ 119 if (lu->options.SolveInitialized && !lu->matsolve_iscalled) { 120 /* see comments in MatMatSolve() */ 121 #if defined(PETSC_USE_COMPLEX) 122 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 123 #else 124 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 125 #endif 126 lu->options.SolveInitialized = NO; 127 } 128 ierr = VecCopy(b_mpi,x);CHKERRQ(ierr); 129 ierr = VecGetArray(x,&bptr);CHKERRQ(ierr); 130 } 131 132 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 133 134 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 135 if (lu->MatInputMode == GLOBAL) { 136 #if defined(PETSC_USE_COMPLEX) 137 PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info)); 138 #else 139 PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info)); 140 #endif 141 } else { /* distributed mat input */ 142 #if defined(PETSC_USE_COMPLEX) 143 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 144 #else 145 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 146 #endif 147 } 148 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 149 150 if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 151 PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat)); 152 153 if (size > 1 && lu->MatInputMode == GLOBAL) { 154 /* convert seq x to mpi x */ 155 ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr); 156 ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 157 ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 158 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 159 ierr = VecDestroy(&x_seq);CHKERRQ(ierr); 160 } else { 161 ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr); 162 163 lu->matsolve_iscalled = PETSC_TRUE; 164 lu->matmatsolve_iscalled = PETSC_FALSE; 165 } 166 PetscFunctionReturn(0); 167 } 168 169 #undef __FUNCT__ 170 #define __FUNCT__ "MatMatSolve_SuperLU_DIST" 171 static PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X) 172 { 173 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data; 174 PetscErrorCode ierr; 175 PetscMPIInt size; 176 PetscInt M=A->rmap->N,m=A->rmap->n,nrhs; 177 SuperLUStat_t stat; 178 double berr[1]; 179 PetscScalar *bptr; 180 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 181 PetscBool flg; 182 183 PetscFunctionBegin; 184 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 185 ierr = PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 186 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix"); 187 ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 188 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix"); 189 190 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 191 if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size); 192 /* size==1 or distributed mat input */ 193 if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) { 194 /* communication pattern of SOLVEstruct is unlikely created for matmatsolve, 195 thus destroy it and create a new SOLVEstruct. 196 Otherwise it may result in memory corruption or incorrect solution 197 See src/mat/examples/tests/ex125.c */ 198 #if defined(PETSC_USE_COMPLEX) 199 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 200 #else 201 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 202 #endif 203 lu->options.SolveInitialized = NO; 204 } 205 ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 206 207 ierr = MatGetSize(B_mpi,NULL,&nrhs);CHKERRQ(ierr); 208 209 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 210 ierr = MatDenseGetArray(X,&bptr);CHKERRQ(ierr); 211 if (lu->MatInputMode == GLOBAL) { /* size == 1 */ 212 #if defined(PETSC_USE_COMPLEX) 213 PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info)); 214 #else 215 PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info)); 216 #endif 217 } else { /* distributed mat input */ 218 #if defined(PETSC_USE_COMPLEX) 219 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 220 #else 221 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 222 #endif 223 } 224 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 225 ierr = MatDenseRestoreArray(X,&bptr);CHKERRQ(ierr); 226 227 if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 228 PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat)); 229 lu->matsolve_iscalled = PETSC_FALSE; 230 lu->matmatsolve_iscalled = PETSC_TRUE; 231 PetscFunctionReturn(0); 232 } 233 234 235 #undef __FUNCT__ 236 #define __FUNCT__ "MatLUFactorNumeric_SuperLU_DIST" 237 static PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info) 238 { 239 Mat *tseq,A_seq = NULL; 240 Mat_SeqAIJ *aa,*bb; 241 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->data; 242 PetscErrorCode ierr; 243 PetscInt M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray, 244 m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj; 245 int sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */ 246 PetscMPIInt size; 247 SuperLUStat_t stat; 248 double *berr=0; 249 IS isrow; 250 #if defined(PETSC_USE_COMPLEX) 251 doublecomplex *av, *bv; 252 #else 253 double *av, *bv; 254 #endif 255 256 PetscFunctionBegin; 257 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 258 259 if (lu->MatInputMode == GLOBAL) { /* global mat input */ 260 if (size > 1) { /* convert mpi A to seq mat A */ 261 ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr); 262 ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr); 263 ierr = ISDestroy(&isrow);CHKERRQ(ierr); 264 265 A_seq = *tseq; 266 ierr = PetscFree(tseq);CHKERRQ(ierr); 267 aa = (Mat_SeqAIJ*)A_seq->data; 268 } else { 269 PetscBool flg; 270 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 271 if (flg) { 272 Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data; 273 A = At->A; 274 } 275 aa = (Mat_SeqAIJ*)A->data; 276 } 277 278 /* Convert Petsc NR matrix to SuperLU_DIST NC. 279 Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */ 280 if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */ 281 PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup)); 282 if (lu->FactPattern == SamePattern_SameRowPerm) { 283 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 284 } else { /* lu->FactPattern == SamePattern */ 285 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); 286 lu->options.Fact = SamePattern; 287 } 288 } 289 #if defined(PETSC_USE_COMPLEX) 290 PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val,&lu->col, &lu->row)); 291 #else 292 PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val, &lu->col, &lu->row)); 293 #endif 294 295 /* Create compressed column matrix A_sup. */ 296 #if defined(PETSC_USE_COMPLEX) 297 PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE)); 298 #else 299 PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE)); 300 #endif 301 } else { /* distributed mat input */ 302 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 303 aa=(Mat_SeqAIJ*)(mat->A)->data; 304 bb=(Mat_SeqAIJ*)(mat->B)->data; 305 ai=aa->i; aj=aa->j; 306 bi=bb->i; bj=bb->j; 307 #if defined(PETSC_USE_COMPLEX) 308 av=(doublecomplex*)aa->a; 309 bv=(doublecomplex*)bb->a; 310 #else 311 av=aa->a; 312 bv=bb->a; 313 #endif 314 rstart = A->rmap->rstart; 315 nz = aa->nz + bb->nz; 316 garray = mat->garray; 317 318 if (lu->options.Fact == DOFACT) { /* first numeric factorization */ 319 #if defined(PETSC_USE_COMPLEX) 320 PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row)); 321 #else 322 PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row)); 323 #endif 324 } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */ 325 SUPERLU_FREE((void*)lu->A_sup.Store); 326 if (lu->FactPattern == SamePattern_SameRowPerm) { 327 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 328 } else { 329 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate storage associated with the L and U matrices. */ 330 lu->options.Fact = SamePattern; 331 } 332 } 333 nz = 0; 334 for (i=0; i<m; i++) { 335 lu->row[i] = nz; 336 countA = ai[i+1] - ai[i]; 337 countB = bi[i+1] - bi[i]; 338 if (aj) { 339 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 340 } else { 341 ajj = NULL; 342 } 343 bjj = bj + bi[i]; 344 345 /* B part, smaller col index */ 346 if (aj) { 347 colA_start = rstart + ajj[0]; /* the smallest global col index of A */ 348 } else { /* superlu_dist does not require matrix has diagonal entries, thus aj=NULL would work */ 349 colA_start = rstart; 350 } 351 jB = 0; 352 for (j=0; j<countB; j++) { 353 jcol = garray[bjj[j]]; 354 if (jcol > colA_start) { 355 jB = j; 356 break; 357 } 358 lu->col[nz] = jcol; 359 lu->val[nz++] = *bv++; 360 if (j==countB-1) jB = countB; 361 } 362 363 /* A part */ 364 for (j=0; j<countA; j++) { 365 lu->col[nz] = rstart + ajj[j]; 366 lu->val[nz++] = *av++; 367 } 368 369 /* B part, larger col index */ 370 for (j=jB; j<countB; j++) { 371 lu->col[nz] = garray[bjj[j]]; 372 lu->val[nz++] = *bv++; 373 } 374 } 375 lu->row[m] = nz; 376 #if defined(PETSC_USE_COMPLEX) 377 PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE)); 378 #else 379 PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE)); 380 #endif 381 } 382 383 /* Factor the matrix. */ 384 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 385 if (lu->MatInputMode == GLOBAL) { /* global mat input */ 386 #if defined(PETSC_USE_COMPLEX) 387 PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo)); 388 #else 389 PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo)); 390 #endif 391 } else { /* distributed mat input */ 392 #if defined(PETSC_USE_COMPLEX) 393 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); 394 #else 395 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); 396 #endif 397 } 398 399 if (sinfo > 0) { 400 if (A->erroriffailure) { 401 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot in row %D",sinfo); 402 } else { 403 if (sinfo <= lu->A_sup.ncol) { 404 F->errortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 405 ierr = PetscInfo1(F,"U(i,i) is exactly zero, i= %D\n",sinfo);CHKERRQ(ierr); 406 } else if (sinfo > lu->A_sup.ncol) { 407 /* 408 number of bytes allocated when memory allocation 409 failure occurred, plus A->ncol. 410 */ 411 F->errortype = MAT_FACTOR_OUTMEMORY; 412 ierr = PetscInfo1(F,"Number of bytes allocated when memory allocation fails %D\n",sinfo);CHKERRQ(ierr); 413 } 414 } 415 } else if (sinfo < 0) { 416 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB, "info = %D, argument in p*gssvx() had an illegal value", sinfo); 417 } 418 419 if (lu->MatInputMode == GLOBAL && size > 1) { 420 ierr = MatDestroy(&A_seq);CHKERRQ(ierr); 421 } 422 423 if (lu->options.PrintStat) { 424 PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 425 } 426 PStatFree(&stat); 427 (F)->assembled = PETSC_TRUE; 428 (F)->preallocated = PETSC_TRUE; 429 lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */ 430 PetscFunctionReturn(0); 431 } 432 433 /* Note the Petsc r and c permutations are ignored */ 434 #undef __FUNCT__ 435 #define __FUNCT__ "MatLUFactorSymbolic_SuperLU_DIST" 436 static PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 437 { 438 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data; 439 PetscInt M = A->rmap->N,N=A->cmap->N; 440 441 PetscFunctionBegin; 442 /* Initialize the SuperLU process grid. */ 443 PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid)); 444 445 /* Initialize ScalePermstruct and LUstruct. */ 446 PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct)); 447 PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(N, &lu->LUstruct)); 448 F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST; 449 F->ops->solve = MatSolve_SuperLU_DIST; 450 F->ops->matsolve = MatMatSolve_SuperLU_DIST; 451 lu->CleanUpSuperLU_Dist = PETSC_TRUE; 452 PetscFunctionReturn(0); 453 } 454 455 #undef __FUNCT__ 456 #define __FUNCT__ "MatFactorGetSolverPackage_aij_superlu_dist" 457 static PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type) 458 { 459 PetscFunctionBegin; 460 *type = MATSOLVERSUPERLU_DIST; 461 PetscFunctionReturn(0); 462 } 463 464 #undef __FUNCT__ 465 #define __FUNCT__ "MatFactorInfo_SuperLU_DIST" 466 static PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer) 467 { 468 Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)A->data; 469 superlu_dist_options_t options; 470 PetscErrorCode ierr; 471 472 PetscFunctionBegin; 473 /* check if matrix is superlu_dist type */ 474 if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(0); 475 476 options = lu->options; 477 ierr = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr); 478 ierr = PetscViewerASCIIPrintf(viewer," Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 479 ierr = PetscViewerASCIIPrintf(viewer," Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);CHKERRQ(ierr); 480 ierr = PetscViewerASCIIPrintf(viewer," Matrix input mode %d \n",lu->MatInputMode);CHKERRQ(ierr); 481 ierr = PetscViewerASCIIPrintf(viewer," Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);CHKERRQ(ierr); 482 ierr = PetscViewerASCIIPrintf(viewer," Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);CHKERRQ(ierr); 483 ierr = PetscViewerASCIIPrintf(viewer," Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 484 ierr = PetscViewerASCIIPrintf(viewer," Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL" : "LargeDiag");CHKERRQ(ierr); 485 486 switch (options.ColPerm) { 487 case NATURAL: 488 ierr = PetscViewerASCIIPrintf(viewer," Column permutation NATURAL\n");CHKERRQ(ierr); 489 break; 490 case MMD_AT_PLUS_A: 491 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_AT_PLUS_A\n");CHKERRQ(ierr); 492 break; 493 case MMD_ATA: 494 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_ATA\n");CHKERRQ(ierr); 495 break; 496 case METIS_AT_PLUS_A: 497 ierr = PetscViewerASCIIPrintf(viewer," Column permutation METIS_AT_PLUS_A\n");CHKERRQ(ierr); 498 break; 499 case PARMETIS: 500 ierr = PetscViewerASCIIPrintf(viewer," Column permutation PARMETIS\n");CHKERRQ(ierr); 501 break; 502 default: 503 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 504 } 505 506 ierr = PetscViewerASCIIPrintf(viewer," Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);CHKERRQ(ierr); 507 508 if (lu->FactPattern == SamePattern) { 509 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern\n");CHKERRQ(ierr); 510 } else { 511 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern_SameRowPerm\n");CHKERRQ(ierr); 512 } 513 PetscFunctionReturn(0); 514 } 515 516 #undef __FUNCT__ 517 #define __FUNCT__ "MatView_SuperLU_DIST" 518 static PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer) 519 { 520 PetscErrorCode ierr; 521 PetscBool iascii; 522 PetscViewerFormat format; 523 524 PetscFunctionBegin; 525 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 526 if (iascii) { 527 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 528 if (format == PETSC_VIEWER_ASCII_INFO) { 529 ierr = MatFactorInfo_SuperLU_DIST(A,viewer);CHKERRQ(ierr); 530 } 531 } 532 PetscFunctionReturn(0); 533 } 534 535 #undef __FUNCT__ 536 #define __FUNCT__ "MatGetFactor_aij_superlu_dist" 537 static PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 538 { 539 Mat B; 540 Mat_SuperLU_DIST *lu; 541 PetscErrorCode ierr; 542 PetscInt M=A->rmap->N,N=A->cmap->N,indx; 543 PetscMPIInt size; 544 superlu_dist_options_t options; 545 PetscBool flg; 546 const char *colperm[] = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"}; 547 const char *rowperm[] = {"LargeDiag","NATURAL"}; 548 const char *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"}; 549 PetscBool set; 550 551 PetscFunctionBegin; 552 /* Create the factorization matrix */ 553 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 554 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);CHKERRQ(ierr); 555 ierr = PetscStrallocpy("superlu_dist",&((PetscObject)B)->type_name);CHKERRQ(ierr); 556 ierr = MatSetUp(B);CHKERRQ(ierr); 557 B->ops->getinfo = MatGetInfo_External; 558 B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST; 559 B->ops->view = MatView_SuperLU_DIST; 560 B->ops->destroy = MatDestroy_SuperLU_DIST; 561 562 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_aij_superlu_dist);CHKERRQ(ierr); 563 564 B->factortype = MAT_FACTOR_LU; 565 566 /* set solvertype */ 567 ierr = PetscFree(B->solvertype);CHKERRQ(ierr); 568 ierr = PetscStrallocpy(MATSOLVERSUPERLU_DIST,&B->solvertype);CHKERRQ(ierr); 569 570 ierr = PetscNewLog(B,&lu);CHKERRQ(ierr); 571 B->data = lu; 572 573 /* Set the default input options: 574 options.Fact = DOFACT; 575 options.Equil = YES; 576 options.ParSymbFact = NO; 577 options.ColPerm = METIS_AT_PLUS_A; 578 options.RowPerm = LargeDiag; 579 options.ReplaceTinyPivot = YES; 580 options.IterRefine = DOUBLE; 581 options.Trans = NOTRANS; 582 options.SolveInitialized = NO; -hold the communication pattern used MatSolve() and MatMatSolve() 583 options.RefineInitialized = NO; 584 options.PrintStat = YES; 585 */ 586 set_default_options_dist(&options); 587 588 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));CHKERRQ(ierr); 589 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 590 /* Default num of process columns and rows */ 591 lu->npcol = (int_t) (0.5 + PetscSqrtReal((PetscReal)size)); 592 if (!lu->npcol) lu->npcol = 1; 593 while (lu->npcol > 0) { 594 lu->nprow = (int_t) (size/lu->npcol); 595 if (size == lu->nprow * lu->npcol) break; 596 lu->npcol--; 597 } 598 599 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr); 600 ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,(PetscInt*)&lu->nprow,NULL);CHKERRQ(ierr); 601 ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,(PetscInt*)&lu->npcol,NULL);CHKERRQ(ierr); 602 if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol); 603 604 lu->MatInputMode = DISTRIBUTED; 605 606 ierr = PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,NULL);CHKERRQ(ierr); 607 if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL; 608 609 ierr = PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",options.Equil ? PETSC_TRUE : PETSC_FALSE,&flg,&set);CHKERRQ(ierr); 610 if (set && !flg) options.Equil = NO; 611 612 ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);CHKERRQ(ierr); 613 if (flg) { 614 switch (indx) { 615 case 0: 616 options.RowPerm = LargeDiag; 617 break; 618 case 1: 619 options.RowPerm = NOROWPERM; 620 break; 621 } 622 } 623 624 ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);CHKERRQ(ierr); 625 if (flg) { 626 switch (indx) { 627 case 0: 628 options.ColPerm = NATURAL; 629 break; 630 case 1: 631 options.ColPerm = MMD_AT_PLUS_A; 632 break; 633 case 2: 634 options.ColPerm = MMD_ATA; 635 break; 636 case 3: 637 options.ColPerm = METIS_AT_PLUS_A; 638 break; 639 case 4: 640 options.ColPerm = PARMETIS; /* only works for np>1 */ 641 break; 642 default: 643 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 644 } 645 } 646 647 options.ReplaceTinyPivot = NO; 648 ierr = PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",options.ReplaceTinyPivot ? PETSC_TRUE : PETSC_FALSE,&flg,&set);CHKERRQ(ierr); 649 if (set && flg) options.ReplaceTinyPivot = YES; 650 651 options.ParSymbFact = NO; 652 ierr = PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,&set);CHKERRQ(ierr); 653 if (set && flg && size>1) { 654 if (lu->MatInputMode == GLOBAL) { 655 #if defined(PETSC_USE_INFO) 656 ierr = PetscInfo(A,"Warning: '-mat_superlu_dist_parsymbfact' is ignored because MatInputMode=GLOBAL\n");CHKERRQ(ierr); 657 #endif 658 } else { 659 #if defined(PETSC_HAVE_PARMETIS) 660 options.ParSymbFact = YES; 661 options.ColPerm = PARMETIS; /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */ 662 #else 663 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"parsymbfact needs PARMETIS"); 664 #endif 665 } 666 } 667 668 lu->FactPattern = SamePattern; 669 ierr = PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[0],&indx,&flg);CHKERRQ(ierr); 670 if (flg) { 671 switch (indx) { 672 case 0: 673 lu->FactPattern = SamePattern; 674 break; 675 case 1: 676 lu->FactPattern = SamePattern_SameRowPerm; 677 break; 678 } 679 } 680 681 options.IterRefine = NOREFINE; 682 ierr = PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",options.IterRefine == NOREFINE ? PETSC_FALSE : PETSC_TRUE ,&flg,&set);CHKERRQ(ierr); 683 if (set) { 684 if (flg) options.IterRefine = SLU_DOUBLE; 685 else options.IterRefine = NOREFINE; 686 } 687 688 if (PetscLogPrintInfo) options.PrintStat = YES; 689 else options.PrintStat = NO; 690 ierr = PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",(PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,NULL);CHKERRQ(ierr); 691 ierr = PetscOptionsEnd();CHKERRQ(ierr); 692 693 lu->options = options; 694 lu->options.Fact = DOFACT; 695 lu->matsolve_iscalled = PETSC_FALSE; 696 lu->matmatsolve_iscalled = PETSC_FALSE; 697 698 *F = B; 699 PetscFunctionReturn(0); 700 } 701 702 #undef __FUNCT__ 703 #define __FUNCT__ "MatSolverPackageRegister_SuperLU_DIST" 704 PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_SuperLU_DIST(void) 705 { 706 PetscErrorCode ierr; 707 PetscFunctionBegin; 708 ierr = MatSolverPackageRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ, MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);CHKERRQ(ierr); 709 ierr = MatSolverPackageRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ, MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);CHKERRQ(ierr); 710 PetscFunctionReturn(0); 711 } 712 713 /*MC 714 MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization 715 716 Use ./configure --download-superlu_dist --download-parmetis --download-metis --download-ptscotch to have PETSc installed with SuperLU_DIST 717 718 Use -pc_type lu -pc_factor_mat_solver_package superlu_dist to us this direct solver 719 720 Works with AIJ matrices 721 722 Options Database Keys: 723 + -mat_superlu_dist_r <n> - number of rows in processor partition 724 . -mat_superlu_dist_c <n> - number of columns in processor partition 725 . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed 726 . -mat_superlu_dist_equil - equilibrate the matrix 727 . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation 728 . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation 729 . -mat_superlu_dist_replacetinypivot - replace tiny pivots 730 . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm 731 . -mat_superlu_dist_iterrefine - use iterative refinement 732 - -mat_superlu_dist_statprint - print factorization information 733 734 Level: beginner 735 736 .seealso: PCLU 737 738 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage 739 740 M*/ 741 742