1 2 /* 3 Provides an interface to the SuperLU_DIST sparse solver 4 */ 5 6 #include <../src/mat/impls/aij/seq/aij.h> 7 #include <../src/mat/impls/aij/mpi/mpiaij.h> 8 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */ 9 #include <stdlib.h> 10 #endif 11 12 #if defined(PETSC_USE_64BIT_INDICES) 13 /* ugly SuperLU_Dist variable telling it to use long long int */ 14 #define _LONGINT 15 #endif 16 17 EXTERN_C_BEGIN 18 #if defined(PETSC_USE_COMPLEX) 19 #include <superlu_zdefs.h> 20 #else 21 #include <superlu_ddefs.h> 22 #endif 23 EXTERN_C_END 24 25 /* 26 GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized 27 DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator. 28 */ 29 typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode; 30 const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0}; 31 32 typedef struct { 33 int_t nprow,npcol,*row,*col; 34 gridinfo_t grid; 35 superlu_dist_options_t options; 36 SuperMatrix A_sup; 37 ScalePermstruct_t ScalePermstruct; 38 LUstruct_t LUstruct; 39 int StatPrint; 40 SuperLU_MatInputMode MatInputMode; 41 SOLVEstruct_t SOLVEstruct; 42 fact_t FactPattern; 43 MPI_Comm comm_superlu; 44 #if defined(PETSC_USE_COMPLEX) 45 doublecomplex *val; 46 #else 47 double *val; 48 #endif 49 PetscBool matsolve_iscalled,matmatsolve_iscalled; 50 PetscBool CleanUpSuperLU_Dist; /* Flag to clean up (non-global) SuperLU objects during Destroy */ 51 } Mat_SuperLU_DIST; 52 53 54 #undef __FUNCT__ 55 #define __FUNCT__ "MatSuperluDistGetDiagU_SuperLU_DIST" 56 PetscErrorCode MatSuperluDistGetDiagU_SuperLU_DIST(Mat F,PetscScalar *diagU) 57 { 58 Mat_SuperLU_DIST *lu= (Mat_SuperLU_DIST*)F->data; 59 60 PetscFunctionBegin; 61 #if defined(PETSC_USE_COMPLEX) 62 PetscStackCall("SuperLU_DIST:pzGetDiagU",pzGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,(doublecomplex*)diagU)); 63 #else 64 PetscStackCall("SuperLU_DIST:pdGetDiagU",pdGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,diagU)); 65 #endif 66 PetscFunctionReturn(0); 67 } 68 69 #undef __FUNCT__ 70 #define __FUNCT__ "MatSuperluDistGetDiagU" 71 PetscErrorCode MatSuperluDistGetDiagU(Mat F,PetscScalar *diagU) 72 { 73 PetscErrorCode ierr; 74 75 PetscFunctionBegin; 76 PetscValidHeaderSpecific(F,MAT_CLASSID,1); 77 ierr = PetscTryMethod(F,"MatSuperluDistGetDiagU_C",(Mat,PetscScalar*),(F,diagU));CHKERRQ(ierr); 78 PetscFunctionReturn(0); 79 } 80 81 #undef __FUNCT__ 82 #define __FUNCT__ "MatDestroy_SuperLU_DIST" 83 static PetscErrorCode MatDestroy_SuperLU_DIST(Mat A) 84 { 85 PetscErrorCode ierr; 86 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data; 87 88 PetscFunctionBegin; 89 if (lu->CleanUpSuperLU_Dist) { 90 /* Deallocate SuperLU_DIST storage */ 91 if (lu->MatInputMode == GLOBAL) { 92 PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup)); 93 } else { 94 PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup)); 95 if (lu->options.SolveInitialized) { 96 #if defined(PETSC_USE_COMPLEX) 97 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 98 #else 99 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 100 #endif 101 } 102 } 103 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct)); 104 PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct)); 105 PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct)); 106 107 /* Release the SuperLU_DIST process grid. */ 108 PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid)); 109 ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr); 110 } 111 ierr = PetscFree(A->data);CHKERRQ(ierr); 112 /* clear composed functions */ 113 ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);CHKERRQ(ierr); 114 ierr = PetscObjectComposeFunction((PetscObject)A,"MatSuperluDistGetDiagU_C",NULL);CHKERRQ(ierr); 115 116 PetscFunctionReturn(0); 117 } 118 119 #undef __FUNCT__ 120 #define __FUNCT__ "MatSolve_SuperLU_DIST" 121 static PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x) 122 { 123 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data; 124 PetscErrorCode ierr; 125 PetscMPIInt size; 126 PetscInt m=A->rmap->n,M=A->rmap->N,N=A->cmap->N; 127 SuperLUStat_t stat; 128 double berr[1]; 129 PetscScalar *bptr; 130 PetscInt nrhs=1; 131 Vec x_seq; 132 IS iden; 133 VecScatter scat; 134 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 135 static PetscBool cite = PETSC_FALSE; 136 137 PetscFunctionBegin; 138 ierr = PetscCitationsRegister("@article{lidemmel03,\n author = {Xiaoye S. Li and James W. Demmel},\n title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n Solver for Unsymmetric Linear Systems},\n journal = {ACM Trans. Mathematical Software},\n volume = {29},\n number = {2},\n pages = {110-140},\n year = 2003\n}\n",&cite);CHKERRQ(ierr); 139 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 140 if (size > 1 && lu->MatInputMode == GLOBAL) { 141 /* global mat input, convert b to x_seq */ 142 ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr); 143 ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr); 144 ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr); 145 ierr = ISDestroy(&iden);CHKERRQ(ierr); 146 147 ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 148 ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 149 ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr); 150 } else { /* size==1 || distributed mat input */ 151 if (lu->options.SolveInitialized && !lu->matsolve_iscalled) { 152 /* see comments in MatMatSolve() */ 153 #if defined(PETSC_USE_COMPLEX) 154 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 155 #else 156 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 157 #endif 158 lu->options.SolveInitialized = NO; 159 } 160 ierr = VecCopy(b_mpi,x);CHKERRQ(ierr); 161 ierr = VecGetArray(x,&bptr);CHKERRQ(ierr); 162 } 163 164 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 165 166 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 167 if (lu->MatInputMode == GLOBAL) { 168 #if defined(PETSC_USE_COMPLEX) 169 PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info)); 170 #else 171 PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info)); 172 #endif 173 } else { /* distributed mat input */ 174 #if defined(PETSC_USE_COMPLEX) 175 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 176 #else 177 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 178 #endif 179 } 180 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 181 182 if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 183 PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat)); 184 185 if (size > 1 && lu->MatInputMode == GLOBAL) { 186 /* convert seq x to mpi x */ 187 ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr); 188 ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 189 ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 190 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 191 ierr = VecDestroy(&x_seq);CHKERRQ(ierr); 192 } else { 193 ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr); 194 195 lu->matsolve_iscalled = PETSC_TRUE; 196 lu->matmatsolve_iscalled = PETSC_FALSE; 197 } 198 PetscFunctionReturn(0); 199 } 200 201 #undef __FUNCT__ 202 #define __FUNCT__ "MatMatSolve_SuperLU_DIST" 203 static PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X) 204 { 205 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data; 206 PetscErrorCode ierr; 207 PetscMPIInt size; 208 PetscInt M=A->rmap->N,m=A->rmap->n,nrhs; 209 SuperLUStat_t stat; 210 double berr[1]; 211 PetscScalar *bptr; 212 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 213 PetscBool flg; 214 215 PetscFunctionBegin; 216 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 217 ierr = PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 218 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix"); 219 ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 220 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix"); 221 222 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 223 if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size); 224 /* size==1 or distributed mat input */ 225 if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) { 226 /* communication pattern of SOLVEstruct is unlikely created for matmatsolve, 227 thus destroy it and create a new SOLVEstruct. 228 Otherwise it may result in memory corruption or incorrect solution 229 See src/mat/examples/tests/ex125.c */ 230 #if defined(PETSC_USE_COMPLEX) 231 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 232 #else 233 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 234 #endif 235 lu->options.SolveInitialized = NO; 236 } 237 ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 238 239 ierr = MatGetSize(B_mpi,NULL,&nrhs);CHKERRQ(ierr); 240 241 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 242 ierr = MatDenseGetArray(X,&bptr);CHKERRQ(ierr); 243 if (lu->MatInputMode == GLOBAL) { /* size == 1 */ 244 #if defined(PETSC_USE_COMPLEX) 245 PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info)); 246 #else 247 PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info)); 248 #endif 249 } else { /* distributed mat input */ 250 #if defined(PETSC_USE_COMPLEX) 251 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 252 #else 253 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 254 #endif 255 } 256 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 257 ierr = MatDenseRestoreArray(X,&bptr);CHKERRQ(ierr); 258 259 if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 260 PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat)); 261 lu->matsolve_iscalled = PETSC_FALSE; 262 lu->matmatsolve_iscalled = PETSC_TRUE; 263 PetscFunctionReturn(0); 264 } 265 266 267 #undef __FUNCT__ 268 #define __FUNCT__ "MatLUFactorNumeric_SuperLU_DIST" 269 static PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info) 270 { 271 Mat *tseq,A_seq = NULL; 272 Mat_SeqAIJ *aa,*bb; 273 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->data; 274 PetscErrorCode ierr; 275 PetscInt M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray, 276 m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj; 277 int sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */ 278 PetscMPIInt size; 279 SuperLUStat_t stat; 280 double *berr=0; 281 IS isrow; 282 #if defined(PETSC_USE_COMPLEX) 283 doublecomplex *av, *bv; 284 #else 285 double *av, *bv; 286 #endif 287 288 PetscFunctionBegin; 289 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 290 291 if (lu->MatInputMode == GLOBAL) { /* global mat input */ 292 if (size > 1) { /* convert mpi A to seq mat A */ 293 ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr); 294 ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr); 295 ierr = ISDestroy(&isrow);CHKERRQ(ierr); 296 297 A_seq = *tseq; 298 ierr = PetscFree(tseq);CHKERRQ(ierr); 299 aa = (Mat_SeqAIJ*)A_seq->data; 300 } else { 301 PetscBool flg; 302 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 303 if (flg) { 304 Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data; 305 A = At->A; 306 } 307 aa = (Mat_SeqAIJ*)A->data; 308 } 309 310 /* Convert Petsc NR matrix to SuperLU_DIST NC. 311 Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */ 312 if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */ 313 PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup)); 314 if (lu->FactPattern == SamePattern_SameRowPerm) { 315 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 316 } else { /* lu->FactPattern == SamePattern */ 317 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); 318 lu->options.Fact = SamePattern; 319 } 320 } 321 #if defined(PETSC_USE_COMPLEX) 322 PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val,&lu->col, &lu->row)); 323 #else 324 PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val, &lu->col, &lu->row)); 325 #endif 326 327 /* Create compressed column matrix A_sup. */ 328 #if defined(PETSC_USE_COMPLEX) 329 PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE)); 330 #else 331 PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE)); 332 #endif 333 } else { /* distributed mat input */ 334 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 335 aa=(Mat_SeqAIJ*)(mat->A)->data; 336 bb=(Mat_SeqAIJ*)(mat->B)->data; 337 ai=aa->i; aj=aa->j; 338 bi=bb->i; bj=bb->j; 339 #if defined(PETSC_USE_COMPLEX) 340 av=(doublecomplex*)aa->a; 341 bv=(doublecomplex*)bb->a; 342 #else 343 av=aa->a; 344 bv=bb->a; 345 #endif 346 rstart = A->rmap->rstart; 347 nz = aa->nz + bb->nz; 348 garray = mat->garray; 349 350 if (lu->options.Fact == DOFACT) { /* first numeric factorization */ 351 #if defined(PETSC_USE_COMPLEX) 352 PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row)); 353 #else 354 PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row)); 355 #endif 356 } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */ 357 SUPERLU_FREE((void*)lu->A_sup.Store); 358 if (lu->FactPattern == SamePattern_SameRowPerm) { 359 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 360 } else { 361 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate storage associated with the L and U matrices. */ 362 lu->options.Fact = SamePattern; 363 } 364 } 365 nz = 0; 366 for (i=0; i<m; i++) { 367 lu->row[i] = nz; 368 countA = ai[i+1] - ai[i]; 369 countB = bi[i+1] - bi[i]; 370 if (aj) { 371 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 372 } else { 373 ajj = NULL; 374 } 375 bjj = bj + bi[i]; 376 377 /* B part, smaller col index */ 378 if (aj) { 379 colA_start = rstart + ajj[0]; /* the smallest global col index of A */ 380 } else { /* superlu_dist does not require matrix has diagonal entries, thus aj=NULL would work */ 381 colA_start = rstart; 382 } 383 jB = 0; 384 for (j=0; j<countB; j++) { 385 jcol = garray[bjj[j]]; 386 if (jcol > colA_start) { 387 jB = j; 388 break; 389 } 390 lu->col[nz] = jcol; 391 lu->val[nz++] = *bv++; 392 if (j==countB-1) jB = countB; 393 } 394 395 /* A part */ 396 for (j=0; j<countA; j++) { 397 lu->col[nz] = rstart + ajj[j]; 398 lu->val[nz++] = *av++; 399 } 400 401 /* B part, larger col index */ 402 for (j=jB; j<countB; j++) { 403 lu->col[nz] = garray[bjj[j]]; 404 lu->val[nz++] = *bv++; 405 } 406 } 407 lu->row[m] = nz; 408 #if defined(PETSC_USE_COMPLEX) 409 PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE)); 410 #else 411 PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE)); 412 #endif 413 } 414 415 /* Factor the matrix. */ 416 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 417 if (lu->MatInputMode == GLOBAL) { /* global mat input */ 418 #if defined(PETSC_USE_COMPLEX) 419 PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo)); 420 #else 421 PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo)); 422 #endif 423 } else { /* distributed mat input */ 424 #if defined(PETSC_USE_COMPLEX) 425 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); 426 #else 427 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); 428 #endif 429 } 430 431 if (sinfo > 0) { 432 if (A->erroriffailure) { 433 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot in row %D",sinfo); 434 } else { 435 if (sinfo <= lu->A_sup.ncol) { 436 F->errortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 437 ierr = PetscInfo1(F,"U(i,i) is exactly zero, i= %D\n",sinfo);CHKERRQ(ierr); 438 } else if (sinfo > lu->A_sup.ncol) { 439 /* 440 number of bytes allocated when memory allocation 441 failure occurred, plus A->ncol. 442 */ 443 F->errortype = MAT_FACTOR_OUTMEMORY; 444 ierr = PetscInfo1(F,"Number of bytes allocated when memory allocation fails %D\n",sinfo);CHKERRQ(ierr); 445 } 446 } 447 } else if (sinfo < 0) { 448 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB, "info = %D, argument in p*gssvx() had an illegal value", sinfo); 449 } 450 451 if (lu->MatInputMode == GLOBAL && size > 1) { 452 ierr = MatDestroy(&A_seq);CHKERRQ(ierr); 453 } 454 455 if (lu->options.PrintStat) { 456 PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 457 } 458 PStatFree(&stat); 459 (F)->assembled = PETSC_TRUE; 460 (F)->preallocated = PETSC_TRUE; 461 lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */ 462 PetscFunctionReturn(0); 463 } 464 465 /* Note the Petsc r and c permutations are ignored */ 466 #undef __FUNCT__ 467 #define __FUNCT__ "MatLUFactorSymbolic_SuperLU_DIST" 468 static PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 469 { 470 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data; 471 PetscInt M = A->rmap->N,N=A->cmap->N; 472 473 PetscFunctionBegin; 474 /* Initialize the SuperLU process grid. */ 475 PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid)); 476 477 /* Initialize ScalePermstruct and LUstruct. */ 478 PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct)); 479 PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(N, &lu->LUstruct)); 480 F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST; 481 F->ops->solve = MatSolve_SuperLU_DIST; 482 F->ops->matsolve = MatMatSolve_SuperLU_DIST; 483 lu->CleanUpSuperLU_Dist = PETSC_TRUE; 484 PetscFunctionReturn(0); 485 } 486 487 #undef __FUNCT__ 488 #define __FUNCT__ "MatFactorGetSolverPackage_aij_superlu_dist" 489 static PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type) 490 { 491 PetscFunctionBegin; 492 *type = MATSOLVERSUPERLU_DIST; 493 PetscFunctionReturn(0); 494 } 495 496 #undef __FUNCT__ 497 #define __FUNCT__ "MatFactorInfo_SuperLU_DIST" 498 static PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer) 499 { 500 Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)A->data; 501 superlu_dist_options_t options; 502 PetscErrorCode ierr; 503 504 PetscFunctionBegin; 505 /* check if matrix is superlu_dist type */ 506 if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(0); 507 508 options = lu->options; 509 ierr = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr); 510 ierr = PetscViewerASCIIPrintf(viewer," Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 511 ierr = PetscViewerASCIIPrintf(viewer," Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);CHKERRQ(ierr); 512 ierr = PetscViewerASCIIPrintf(viewer," Matrix input mode %d \n",lu->MatInputMode);CHKERRQ(ierr); 513 ierr = PetscViewerASCIIPrintf(viewer," Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);CHKERRQ(ierr); 514 ierr = PetscViewerASCIIPrintf(viewer," Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);CHKERRQ(ierr); 515 ierr = PetscViewerASCIIPrintf(viewer," Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 516 ierr = PetscViewerASCIIPrintf(viewer," Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL" : "LargeDiag");CHKERRQ(ierr); 517 518 switch (options.ColPerm) { 519 case NATURAL: 520 ierr = PetscViewerASCIIPrintf(viewer," Column permutation NATURAL\n");CHKERRQ(ierr); 521 break; 522 case MMD_AT_PLUS_A: 523 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_AT_PLUS_A\n");CHKERRQ(ierr); 524 break; 525 case MMD_ATA: 526 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_ATA\n");CHKERRQ(ierr); 527 break; 528 case METIS_AT_PLUS_A: 529 ierr = PetscViewerASCIIPrintf(viewer," Column permutation METIS_AT_PLUS_A\n");CHKERRQ(ierr); 530 break; 531 case PARMETIS: 532 ierr = PetscViewerASCIIPrintf(viewer," Column permutation PARMETIS\n");CHKERRQ(ierr); 533 break; 534 default: 535 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 536 } 537 538 ierr = PetscViewerASCIIPrintf(viewer," Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);CHKERRQ(ierr); 539 540 if (lu->FactPattern == SamePattern) { 541 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern\n");CHKERRQ(ierr); 542 } else { 543 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern_SameRowPerm\n");CHKERRQ(ierr); 544 } 545 PetscFunctionReturn(0); 546 } 547 548 #undef __FUNCT__ 549 #define __FUNCT__ "MatView_SuperLU_DIST" 550 static PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer) 551 { 552 PetscErrorCode ierr; 553 PetscBool iascii; 554 PetscViewerFormat format; 555 556 PetscFunctionBegin; 557 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 558 if (iascii) { 559 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 560 if (format == PETSC_VIEWER_ASCII_INFO) { 561 ierr = MatFactorInfo_SuperLU_DIST(A,viewer);CHKERRQ(ierr); 562 } 563 } 564 PetscFunctionReturn(0); 565 } 566 567 #undef __FUNCT__ 568 #define __FUNCT__ "MatGetFactor_aij_superlu_dist" 569 static PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 570 { 571 Mat B; 572 Mat_SuperLU_DIST *lu; 573 PetscErrorCode ierr; 574 PetscInt M=A->rmap->N,N=A->cmap->N,indx; 575 PetscMPIInt size; 576 superlu_dist_options_t options; 577 PetscBool flg; 578 const char *colperm[] = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"}; 579 const char *rowperm[] = {"LargeDiag","NATURAL"}; 580 const char *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"}; 581 PetscBool set; 582 583 PetscFunctionBegin; 584 /* Create the factorization matrix */ 585 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 586 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);CHKERRQ(ierr); 587 ierr = PetscStrallocpy("superlu_dist",&((PetscObject)B)->type_name);CHKERRQ(ierr); 588 ierr = MatSetUp(B);CHKERRQ(ierr); 589 B->ops->getinfo = MatGetInfo_External; 590 B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST; 591 B->ops->view = MatView_SuperLU_DIST; 592 B->ops->destroy = MatDestroy_SuperLU_DIST; 593 594 B->factortype = MAT_FACTOR_LU; 595 596 /* set solvertype */ 597 ierr = PetscFree(B->solvertype);CHKERRQ(ierr); 598 ierr = PetscStrallocpy(MATSOLVERSUPERLU_DIST,&B->solvertype);CHKERRQ(ierr); 599 600 ierr = PetscNewLog(B,&lu);CHKERRQ(ierr); 601 B->data = lu; 602 603 /* Set the default input options: 604 options.Fact = DOFACT; 605 options.Equil = YES; 606 options.ParSymbFact = NO; 607 options.ColPerm = METIS_AT_PLUS_A; 608 options.RowPerm = LargeDiag; 609 options.ReplaceTinyPivot = YES; 610 options.IterRefine = DOUBLE; 611 options.Trans = NOTRANS; 612 options.SolveInitialized = NO; -hold the communication pattern used MatSolve() and MatMatSolve() 613 options.RefineInitialized = NO; 614 options.PrintStat = YES; 615 */ 616 set_default_options_dist(&options); 617 618 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));CHKERRQ(ierr); 619 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 620 /* Default num of process columns and rows */ 621 lu->npcol = (int_t) (0.5 + PetscSqrtReal((PetscReal)size)); 622 if (!lu->npcol) lu->npcol = 1; 623 while (lu->npcol > 0) { 624 lu->nprow = (int_t) (size/lu->npcol); 625 if (size == lu->nprow * lu->npcol) break; 626 lu->npcol--; 627 } 628 629 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr); 630 ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,(PetscInt*)&lu->nprow,NULL);CHKERRQ(ierr); 631 ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,(PetscInt*)&lu->npcol,NULL);CHKERRQ(ierr); 632 if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol); 633 634 lu->MatInputMode = DISTRIBUTED; 635 636 ierr = PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,NULL);CHKERRQ(ierr); 637 if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL; 638 639 ierr = PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",options.Equil ? PETSC_TRUE : PETSC_FALSE,&flg,&set);CHKERRQ(ierr); 640 if (set && !flg) options.Equil = NO; 641 642 ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);CHKERRQ(ierr); 643 if (flg) { 644 switch (indx) { 645 case 0: 646 options.RowPerm = LargeDiag; 647 break; 648 case 1: 649 options.RowPerm = NOROWPERM; 650 break; 651 } 652 } 653 654 ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);CHKERRQ(ierr); 655 if (flg) { 656 switch (indx) { 657 case 0: 658 options.ColPerm = NATURAL; 659 break; 660 case 1: 661 options.ColPerm = MMD_AT_PLUS_A; 662 break; 663 case 2: 664 options.ColPerm = MMD_ATA; 665 break; 666 case 3: 667 options.ColPerm = METIS_AT_PLUS_A; 668 break; 669 case 4: 670 options.ColPerm = PARMETIS; /* only works for np>1 */ 671 break; 672 default: 673 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 674 } 675 } 676 677 options.ReplaceTinyPivot = NO; 678 ierr = PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",options.ReplaceTinyPivot ? PETSC_TRUE : PETSC_FALSE,&flg,&set);CHKERRQ(ierr); 679 if (set && flg) options.ReplaceTinyPivot = YES; 680 681 options.ParSymbFact = NO; 682 ierr = PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,&set);CHKERRQ(ierr); 683 if (set && flg && size>1) { 684 if (lu->MatInputMode == GLOBAL) { 685 #if defined(PETSC_USE_INFO) 686 ierr = PetscInfo(A,"Warning: '-mat_superlu_dist_parsymbfact' is ignored because MatInputMode=GLOBAL\n");CHKERRQ(ierr); 687 #endif 688 } else { 689 #if defined(PETSC_HAVE_PARMETIS) 690 options.ParSymbFact = YES; 691 options.ColPerm = PARMETIS; /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */ 692 #else 693 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"parsymbfact needs PARMETIS"); 694 #endif 695 } 696 } 697 698 lu->FactPattern = SamePattern; 699 ierr = PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[0],&indx,&flg);CHKERRQ(ierr); 700 if (flg) { 701 switch (indx) { 702 case 0: 703 lu->FactPattern = SamePattern; 704 break; 705 case 1: 706 lu->FactPattern = SamePattern_SameRowPerm; 707 break; 708 } 709 } 710 711 options.IterRefine = NOREFINE; 712 ierr = PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",options.IterRefine == NOREFINE ? PETSC_FALSE : PETSC_TRUE ,&flg,&set);CHKERRQ(ierr); 713 if (set) { 714 if (flg) options.IterRefine = SLU_DOUBLE; 715 else options.IterRefine = NOREFINE; 716 } 717 718 if (PetscLogPrintInfo) options.PrintStat = YES; 719 else options.PrintStat = NO; 720 ierr = PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",(PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,NULL);CHKERRQ(ierr); 721 ierr = PetscOptionsEnd();CHKERRQ(ierr); 722 723 lu->options = options; 724 lu->options.Fact = DOFACT; 725 lu->matsolve_iscalled = PETSC_FALSE; 726 lu->matmatsolve_iscalled = PETSC_FALSE; 727 728 ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_aij_superlu_dist);CHKERRQ(ierr); 729 ierr = PetscObjectComposeFunction((PetscObject)B,"MatSuperluDistGetDiagU_C",MatSuperluDistGetDiagU_SuperLU_DIST);CHKERRQ(ierr); 730 731 *F = B; 732 PetscFunctionReturn(0); 733 } 734 735 #undef __FUNCT__ 736 #define __FUNCT__ "MatSolverPackageRegister_SuperLU_DIST" 737 PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_SuperLU_DIST(void) 738 { 739 PetscErrorCode ierr; 740 PetscFunctionBegin; 741 ierr = MatSolverPackageRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ, MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);CHKERRQ(ierr); 742 ierr = MatSolverPackageRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ, MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);CHKERRQ(ierr); 743 PetscFunctionReturn(0); 744 } 745 746 /*MC 747 MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization 748 749 Use ./configure --download-superlu_dist --download-parmetis --download-metis --download-ptscotch to have PETSc installed with SuperLU_DIST 750 751 Use -pc_type lu -pc_factor_mat_solver_package superlu_dist to us this direct solver 752 753 Works with AIJ matrices 754 755 Options Database Keys: 756 + -mat_superlu_dist_r <n> - number of rows in processor partition 757 . -mat_superlu_dist_c <n> - number of columns in processor partition 758 . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed 759 . -mat_superlu_dist_equil - equilibrate the matrix 760 . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation 761 . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation 762 . -mat_superlu_dist_replacetinypivot - replace tiny pivots 763 . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm 764 . -mat_superlu_dist_iterrefine - use iterative refinement 765 - -mat_superlu_dist_statprint - print factorization information 766 767 Level: beginner 768 769 .seealso: PCLU 770 771 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage 772 773 M*/ 774 775