1 2 /* 3 Provides an interface to the SuperLU_DIST_2.2 sparse solver 4 */ 5 6 #include <../src/mat/impls/aij/seq/aij.h> 7 #include <../src/mat/impls/aij/mpi/mpiaij.h> 8 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */ 9 #include <stdlib.h> 10 #endif 11 12 #if defined(PETSC_USE_64BIT_INDICES) 13 /* ugly SuperLU_Dist variable telling it to use long long int */ 14 #define _LONGINT 15 #endif 16 17 EXTERN_C_BEGIN 18 #if defined(PETSC_USE_COMPLEX) 19 #include <superlu_zdefs.h> 20 #else 21 #include <superlu_ddefs.h> 22 #endif 23 EXTERN_C_END 24 25 /* 26 GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized 27 DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator. 28 */ 29 typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode; 30 const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0}; 31 32 typedef struct { 33 int_t nprow,npcol,*row,*col; 34 gridinfo_t grid; 35 superlu_options_t options; 36 SuperMatrix A_sup; 37 ScalePermstruct_t ScalePermstruct; 38 LUstruct_t LUstruct; 39 int StatPrint; 40 SuperLU_MatInputMode MatInputMode; 41 SOLVEstruct_t SOLVEstruct; 42 fact_t FactPattern; 43 MPI_Comm comm_superlu; 44 #if defined(PETSC_USE_COMPLEX) 45 doublecomplex *val; 46 #else 47 double *val; 48 #endif 49 PetscBool matsolve_iscalled,matmatsolve_iscalled; 50 PetscBool CleanUpSuperLU_Dist; /* Flag to clean up (non-global) SuperLU objects during Destroy */ 51 } Mat_SuperLU_DIST; 52 53 extern PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat,PetscViewer); 54 extern PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat,Mat,const MatFactorInfo*); 55 extern PetscErrorCode MatDestroy_SuperLU_DIST(Mat); 56 extern PetscErrorCode MatView_SuperLU_DIST(Mat,PetscViewer); 57 extern PetscErrorCode MatSolve_SuperLU_DIST(Mat,Vec,Vec); 58 extern PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat,Mat,IS,IS,const MatFactorInfo*); 59 extern PetscErrorCode MatDestroy_MPIAIJ(Mat); 60 61 #undef __FUNCT__ 62 #define __FUNCT__ "MatDestroy_SuperLU_DIST" 63 PetscErrorCode MatDestroy_SuperLU_DIST(Mat A) 64 { 65 PetscErrorCode ierr; 66 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 67 PetscBool flg; 68 69 PetscFunctionBegin; 70 if (lu && lu->CleanUpSuperLU_Dist) { 71 /* Deallocate SuperLU_DIST storage */ 72 if (lu->MatInputMode == GLOBAL) { 73 PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup)); 74 } else { 75 PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup)); 76 if (lu->options.SolveInitialized) { 77 #if defined(PETSC_USE_COMPLEX) 78 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 79 #else 80 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 81 #endif 82 } 83 } 84 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct)); 85 PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct)); 86 PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct)); 87 88 /* Release the SuperLU_DIST process grid. */ 89 PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid)); 90 ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr); 91 } 92 ierr = PetscFree(A->spptr);CHKERRQ(ierr); 93 94 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 95 if (flg) { 96 ierr = MatDestroy_SeqAIJ(A);CHKERRQ(ierr); 97 } else { 98 ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr); 99 } 100 PetscFunctionReturn(0); 101 } 102 103 #undef __FUNCT__ 104 #define __FUNCT__ "MatSolve_SuperLU_DIST" 105 PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x) 106 { 107 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 108 PetscErrorCode ierr; 109 PetscMPIInt size; 110 PetscInt m=A->rmap->n,M=A->rmap->N,N=A->cmap->N; 111 SuperLUStat_t stat; 112 double berr[1]; 113 PetscScalar *bptr; 114 PetscInt nrhs=1; 115 Vec x_seq; 116 IS iden; 117 VecScatter scat; 118 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 119 120 PetscFunctionBegin; 121 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 122 if (size > 1 && lu->MatInputMode == GLOBAL) { 123 /* global mat input, convert b to x_seq */ 124 ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr); 125 ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr); 126 ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr); 127 ierr = ISDestroy(&iden);CHKERRQ(ierr); 128 129 ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 130 ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 131 ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr); 132 } else { /* size==1 || distributed mat input */ 133 if (lu->options.SolveInitialized && !lu->matsolve_iscalled) { 134 /* see comments in MatMatSolve() */ 135 #if defined(PETSC_USE_COMPLEX) 136 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 137 #else 138 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 139 #endif 140 lu->options.SolveInitialized = NO; 141 } 142 ierr = VecCopy(b_mpi,x);CHKERRQ(ierr); 143 ierr = VecGetArray(x,&bptr);CHKERRQ(ierr); 144 } 145 146 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 147 148 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 149 if (lu->MatInputMode == GLOBAL) { 150 #if defined(PETSC_USE_COMPLEX) 151 PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info)); 152 #else 153 PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info)); 154 #endif 155 } else { /* distributed mat input */ 156 #if defined(PETSC_USE_COMPLEX) 157 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 158 #else 159 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 160 #endif 161 } 162 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 163 164 if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 165 PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat)); 166 167 if (size > 1 && lu->MatInputMode == GLOBAL) { 168 /* convert seq x to mpi x */ 169 ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr); 170 ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 171 ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 172 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 173 ierr = VecDestroy(&x_seq);CHKERRQ(ierr); 174 } else { 175 ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr); 176 177 lu->matsolve_iscalled = PETSC_TRUE; 178 lu->matmatsolve_iscalled = PETSC_FALSE; 179 } 180 PetscFunctionReturn(0); 181 } 182 183 #undef __FUNCT__ 184 #define __FUNCT__ "MatMatSolve_SuperLU_DIST" 185 PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X) 186 { 187 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 188 PetscErrorCode ierr; 189 PetscMPIInt size; 190 PetscInt M=A->rmap->N,m=A->rmap->n,nrhs; 191 SuperLUStat_t stat; 192 double berr[1]; 193 PetscScalar *bptr; 194 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 195 PetscBool flg; 196 197 PetscFunctionBegin; 198 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 199 ierr = PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 200 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix"); 201 ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); 202 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix"); 203 204 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 205 if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size); 206 /* size==1 or distributed mat input */ 207 if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) { 208 /* communication pattern of SOLVEstruct is unlikely created for matmatsolve, 209 thus destroy it and create a new SOLVEstruct. 210 Otherwise it may result in memory corruption or incorrect solution 211 See src/mat/examples/tests/ex125.c */ 212 #if defined(PETSC_USE_COMPLEX) 213 PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); 214 #else 215 PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); 216 #endif 217 lu->options.SolveInitialized = NO; 218 } 219 ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 220 221 ierr = MatGetSize(B_mpi,NULL,&nrhs);CHKERRQ(ierr); 222 223 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 224 ierr = MatDenseGetArray(X,&bptr);CHKERRQ(ierr); 225 if (lu->MatInputMode == GLOBAL) { /* size == 1 */ 226 #if defined(PETSC_USE_COMPLEX) 227 PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info)); 228 #else 229 PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info)); 230 #endif 231 } else { /* distributed mat input */ 232 #if defined(PETSC_USE_COMPLEX) 233 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 234 #else 235 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); 236 #endif 237 } 238 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 239 ierr = MatDenseRestoreArray(X,&bptr);CHKERRQ(ierr); 240 241 if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 242 PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat)); 243 lu->matsolve_iscalled = PETSC_FALSE; 244 lu->matmatsolve_iscalled = PETSC_TRUE; 245 PetscFunctionReturn(0); 246 } 247 248 249 #undef __FUNCT__ 250 #define __FUNCT__ "MatLUFactorNumeric_SuperLU_DIST" 251 PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info) 252 { 253 Mat *tseq,A_seq = NULL; 254 Mat_SeqAIJ *aa,*bb; 255 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->spptr; 256 PetscErrorCode ierr; 257 PetscInt M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray, 258 m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj; 259 int sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */ 260 PetscMPIInt size; 261 SuperLUStat_t stat; 262 double *berr=0; 263 IS isrow; 264 PetscLogDouble time0,time,time_min,time_max; 265 Mat F_diag=NULL; 266 #if defined(PETSC_USE_COMPLEX) 267 doublecomplex *av, *bv; 268 #else 269 double *av, *bv; 270 #endif 271 272 PetscFunctionBegin; 273 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 274 275 if (lu->options.PrintStat) { /* collect time for mat conversion */ 276 ierr = MPI_Barrier(PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 277 ierr = PetscGetTime(&time0);CHKERRQ(ierr); 278 } 279 280 if (lu->MatInputMode == GLOBAL) { /* global mat input */ 281 if (size > 1) { /* convert mpi A to seq mat A */ 282 ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr); 283 ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr); 284 ierr = ISDestroy(&isrow);CHKERRQ(ierr); 285 286 A_seq = *tseq; 287 ierr = PetscFree(tseq);CHKERRQ(ierr); 288 aa = (Mat_SeqAIJ*)A_seq->data; 289 } else { 290 PetscBool flg; 291 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 292 if (flg) { 293 Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data; 294 A = At->A; 295 } 296 aa = (Mat_SeqAIJ*)A->data; 297 } 298 299 /* Convert Petsc NR matrix to SuperLU_DIST NC. 300 Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */ 301 if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */ 302 PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup)); 303 if (lu->FactPattern == SamePattern_SameRowPerm) { 304 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 305 } else { /* lu->FactPattern == SamePattern */ 306 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); 307 lu->options.Fact = SamePattern; 308 } 309 } 310 #if defined(PETSC_USE_COMPLEX) 311 PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,aa->j,aa->i,&lu->val,&lu->col, &lu->row)); 312 #else 313 PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,aa->j,aa->i,&lu->val, &lu->col, &lu->row)); 314 #endif 315 316 /* Create compressed column matrix A_sup. */ 317 #if defined(PETSC_USE_COMPLEX) 318 PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE)); 319 #else 320 PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE)); 321 #endif 322 } else { /* distributed mat input */ 323 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 324 aa=(Mat_SeqAIJ*)(mat->A)->data; 325 bb=(Mat_SeqAIJ*)(mat->B)->data; 326 ai=aa->i; aj=aa->j; 327 bi=bb->i; bj=bb->j; 328 #if defined(PETSC_USE_COMPLEX) 329 av=(doublecomplex*)aa->a; 330 bv=(doublecomplex*)bb->a; 331 #else 332 av=aa->a; 333 bv=bb->a; 334 #endif 335 rstart = A->rmap->rstart; 336 nz = aa->nz + bb->nz; 337 garray = mat->garray; 338 339 if (lu->options.Fact == DOFACT) { /* first numeric factorization */ 340 #if defined(PETSC_USE_COMPLEX) 341 PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row)); 342 #else 343 PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row)); 344 #endif 345 } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */ 346 /* Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); */ /* this leads to crash! However, see SuperLU_DIST_2.5/EXAMPLE/pzdrive2.c */ 347 if (lu->FactPattern == SamePattern_SameRowPerm) { 348 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 349 } else { 350 PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate storage associated with the L and U matrices. */ 351 lu->options.Fact = SamePattern; 352 } 353 } 354 nz = 0; 355 for (i=0; i<m; i++) { 356 lu->row[i] = nz; 357 countA = ai[i+1] - ai[i]; 358 countB = bi[i+1] - bi[i]; 359 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 360 bjj = bj + bi[i]; 361 362 /* B part, smaller col index */ 363 colA_start = rstart + ajj[0]; /* the smallest global col index of A */ 364 jB = 0; 365 for (j=0; j<countB; j++) { 366 jcol = garray[bjj[j]]; 367 if (jcol > colA_start) { 368 jB = j; 369 break; 370 } 371 lu->col[nz] = jcol; 372 lu->val[nz++] = *bv++; 373 if (j==countB-1) jB = countB; 374 } 375 376 /* A part */ 377 for (j=0; j<countA; j++) { 378 lu->col[nz] = rstart + ajj[j]; 379 lu->val[nz++] = *av++; 380 } 381 382 /* B part, larger col index */ 383 for (j=jB; j<countB; j++) { 384 lu->col[nz] = garray[bjj[j]]; 385 lu->val[nz++] = *bv++; 386 } 387 } 388 lu->row[m] = nz; 389 #if defined(PETSC_USE_COMPLEX) 390 PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE)); 391 #else 392 PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE)); 393 #endif 394 } 395 if (lu->options.PrintStat) { 396 ierr = PetscGetTime(&time);CHKERRQ(ierr); 397 time0 = time - time0; 398 } 399 400 /* Factor the matrix. */ 401 PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ 402 if (lu->MatInputMode == GLOBAL) { /* global mat input */ 403 #if defined(PETSC_USE_COMPLEX) 404 PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo)); 405 #else 406 PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo)); 407 #endif 408 } else { /* distributed mat input */ 409 #if defined(PETSC_USE_COMPLEX) 410 PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); 411 if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pzgssvx fails, info: %d\n",sinfo); 412 #else 413 PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); 414 if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",sinfo); 415 #endif 416 } 417 418 if (lu->MatInputMode == GLOBAL && size > 1) { 419 ierr = MatDestroy(&A_seq);CHKERRQ(ierr); 420 } 421 422 if (lu->options.PrintStat) { 423 ierr = MPI_Reduce(&time0,&time_max,1,MPI_DOUBLE,MPI_MAX,0,PetscObjectComm((PetscObject)A)); 424 ierr = MPI_Reduce(&time0,&time_min,1,MPI_DOUBLE,MPI_MIN,0,PetscObjectComm((PetscObject)A)); 425 ierr = MPI_Reduce(&time0,&time,1,MPI_DOUBLE,MPI_SUM,0,PetscObjectComm((PetscObject)A)); 426 time = time/size; /* average time */ 427 ierr = PetscPrintf(PetscObjectComm((PetscObject)A), " Mat conversion(PETSc->SuperLU_DIST) time (max/min/avg): \n %g / %g / %g\n",time_max,time_min,time);CHKERRQ(ierr); 428 PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 429 } 430 PStatFree(&stat); 431 if (size > 1) { 432 F_diag = ((Mat_MPIAIJ*)(F)->data)->A; 433 F_diag->assembled = PETSC_TRUE; 434 } 435 (F)->assembled = PETSC_TRUE; 436 (F)->preallocated = PETSC_TRUE; 437 lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */ 438 PetscFunctionReturn(0); 439 } 440 441 /* Note the Petsc r and c permutations are ignored */ 442 #undef __FUNCT__ 443 #define __FUNCT__ "MatLUFactorSymbolic_SuperLU_DIST" 444 PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 445 { 446 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->spptr; 447 PetscInt M = A->rmap->N,N=A->cmap->N; 448 449 PetscFunctionBegin; 450 /* Initialize the SuperLU process grid. */ 451 PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid)); 452 453 /* Initialize ScalePermstruct and LUstruct. */ 454 PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct)); 455 PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(M, N, &lu->LUstruct)); 456 F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST; 457 F->ops->solve = MatSolve_SuperLU_DIST; 458 F->ops->matsolve = MatMatSolve_SuperLU_DIST; 459 lu->CleanUpSuperLU_Dist = PETSC_TRUE; 460 PetscFunctionReturn(0); 461 } 462 463 EXTERN_C_BEGIN 464 #undef __FUNCT__ 465 #define __FUNCT__ "MatFactorGetSolverPackage_aij_superlu_dist" 466 PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type) 467 { 468 PetscFunctionBegin; 469 *type = MATSOLVERSUPERLU_DIST; 470 PetscFunctionReturn(0); 471 } 472 EXTERN_C_END 473 474 #undef __FUNCT__ 475 #define __FUNCT__ "MatGetFactor_aij_superlu_dist" 476 PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 477 { 478 Mat B; 479 Mat_SuperLU_DIST *lu; 480 PetscErrorCode ierr; 481 PetscInt M=A->rmap->N,N=A->cmap->N,indx; 482 PetscMPIInt size; 483 superlu_options_t options; 484 PetscBool flg; 485 const char *colperm[] = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"}; 486 const char *rowperm[] = {"LargeDiag","NATURAL"}; 487 const char *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"}; 488 489 PetscFunctionBegin; 490 /* Create the factorization matrix */ 491 ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 492 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);CHKERRQ(ierr); 493 ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 494 ierr = MatSeqAIJSetPreallocation(B,0,NULL); 495 ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr); 496 497 B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST; 498 B->ops->view = MatView_SuperLU_DIST; 499 B->ops->destroy = MatDestroy_SuperLU_DIST; 500 501 ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_aij_superlu_dist",MatFactorGetSolverPackage_aij_superlu_dist);CHKERRQ(ierr); 502 503 B->factortype = MAT_FACTOR_LU; 504 505 ierr = PetscNewLog(B,Mat_SuperLU_DIST,&lu);CHKERRQ(ierr); 506 B->spptr = lu; 507 508 /* Set the default input options: 509 options.Fact = DOFACT; 510 options.Equil = YES; 511 options.ParSymbFact = NO; 512 options.ColPerm = METIS_AT_PLUS_A; 513 options.RowPerm = LargeDiag; 514 options.ReplaceTinyPivot = YES; 515 options.IterRefine = DOUBLE; 516 options.Trans = NOTRANS; 517 options.SolveInitialized = NO; -hold the communication pattern used MatSolve() and MatMatSolve() 518 options.RefineInitialized = NO; 519 options.PrintStat = YES; 520 */ 521 set_default_options_dist(&options); 522 523 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));CHKERRQ(ierr); 524 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 525 /* Default num of process columns and rows */ 526 ierr = PetscMPIIntCast((PetscInt)(0.5 + PetscSqrtReal((PetscReal)size)),&lu->npcol);CHKERRQ(ierr); 527 if (!lu->npcol) lu->npcol = 1; 528 while (lu->npcol > 0) { 529 ierr = PetscMPIIntCast(size/lu->npcol,&lu->nprow);CHKERRQ(ierr); 530 if (size == lu->nprow * lu->npcol) break; 531 lu->npcol--; 532 } 533 534 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr); 535 ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,&lu->nprow,NULL);CHKERRQ(ierr); 536 ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,&lu->npcol,NULL);CHKERRQ(ierr); 537 if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol); 538 539 lu->MatInputMode = DISTRIBUTED; 540 541 ierr = PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,NULL);CHKERRQ(ierr); 542 if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL; 543 544 ierr = PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr); 545 if (!flg) options.Equil = NO; 546 547 ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);CHKERRQ(ierr); 548 if (flg) { 549 switch (indx) { 550 case 0: 551 options.RowPerm = LargeDiag; 552 break; 553 case 1: 554 options.RowPerm = NOROWPERM; 555 break; 556 } 557 } 558 559 ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);CHKERRQ(ierr); 560 if (flg) { 561 switch (indx) { 562 case 0: 563 options.ColPerm = NATURAL; 564 break; 565 case 1: 566 options.ColPerm = MMD_AT_PLUS_A; 567 break; 568 case 2: 569 options.ColPerm = MMD_ATA; 570 break; 571 case 3: 572 options.ColPerm = METIS_AT_PLUS_A; 573 break; 574 case 4: 575 options.ColPerm = PARMETIS; /* only works for np>1 */ 576 break; 577 default: 578 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 579 } 580 } 581 582 ierr = PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr); 583 if (!flg) options.ReplaceTinyPivot = NO; 584 585 options.ParSymbFact = NO; 586 587 ierr = PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr); 588 if (flg) { 589 #if defined(PETSC_HAVE_PARMETIS) 590 options.ParSymbFact = YES; 591 options.ColPerm = PARMETIS; /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */ 592 #else 593 printf("parsymbfact needs PARMETIS"); 594 #endif 595 } 596 597 lu->FactPattern = SamePattern_SameRowPerm; 598 599 ierr = PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[1],&indx,&flg);CHKERRQ(ierr); 600 if (flg) { 601 switch (indx) { 602 case 0: 603 lu->FactPattern = SamePattern; 604 break; 605 case 1: 606 lu->FactPattern = SamePattern_SameRowPerm; 607 break; 608 } 609 } 610 611 options.IterRefine = NOREFINE; 612 ierr = PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr); 613 if (flg) options.IterRefine = SLU_DOUBLE; 614 615 if (PetscLogPrintInfo) options.PrintStat = YES; 616 else options.PrintStat = NO; 617 ierr = PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None", 618 (PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,0);CHKERRQ(ierr); 619 PetscOptionsEnd(); 620 621 lu->options = options; 622 lu->options.Fact = DOFACT; 623 lu->matsolve_iscalled = PETSC_FALSE; 624 lu->matmatsolve_iscalled = PETSC_FALSE; 625 626 *F = B; 627 PetscFunctionReturn(0); 628 } 629 630 EXTERN_C_BEGIN 631 #undef __FUNCT__ 632 #define __FUNCT__ "MatGetFactor_seqaij_superlu_dist" 633 PetscErrorCode MatGetFactor_seqaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 634 { 635 PetscErrorCode ierr; 636 637 PetscFunctionBegin; 638 ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr); 639 PetscFunctionReturn(0); 640 } 641 EXTERN_C_END 642 643 EXTERN_C_BEGIN 644 #undef __FUNCT__ 645 #define __FUNCT__ "MatGetFactor_mpiaij_superlu_dist" 646 PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 647 { 648 PetscErrorCode ierr; 649 650 PetscFunctionBegin; 651 ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr); 652 PetscFunctionReturn(0); 653 } 654 EXTERN_C_END 655 656 #undef __FUNCT__ 657 #define __FUNCT__ "MatFactorInfo_SuperLU_DIST" 658 PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer) 659 { 660 Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)A->spptr; 661 superlu_options_t options; 662 PetscErrorCode ierr; 663 664 PetscFunctionBegin; 665 /* check if matrix is superlu_dist type */ 666 if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(0); 667 668 options = lu->options; 669 ierr = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr); 670 ierr = PetscViewerASCIIPrintf(viewer," Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 671 ierr = PetscViewerASCIIPrintf(viewer," Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);CHKERRQ(ierr); 672 ierr = PetscViewerASCIIPrintf(viewer," Matrix input mode %d \n",lu->MatInputMode);CHKERRQ(ierr); 673 ierr = PetscViewerASCIIPrintf(viewer," Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);CHKERRQ(ierr); 674 ierr = PetscViewerASCIIPrintf(viewer," Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);CHKERRQ(ierr); 675 ierr = PetscViewerASCIIPrintf(viewer," Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 676 ierr = PetscViewerASCIIPrintf(viewer," Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL" : "LargeDiag");CHKERRQ(ierr); 677 678 switch (options.ColPerm) { 679 case NATURAL: 680 ierr = PetscViewerASCIIPrintf(viewer," Column permutation NATURAL\n");CHKERRQ(ierr); 681 break; 682 case MMD_AT_PLUS_A: 683 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_AT_PLUS_A\n");CHKERRQ(ierr); 684 break; 685 case MMD_ATA: 686 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_ATA\n");CHKERRQ(ierr); 687 break; 688 case METIS_AT_PLUS_A: 689 ierr = PetscViewerASCIIPrintf(viewer," Column permutation METIS_AT_PLUS_A\n");CHKERRQ(ierr); 690 break; 691 case PARMETIS: 692 ierr = PetscViewerASCIIPrintf(viewer," Column permutation PARMETIS\n");CHKERRQ(ierr); 693 break; 694 default: 695 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 696 } 697 698 ierr = PetscViewerASCIIPrintf(viewer," Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);CHKERRQ(ierr); 699 700 if (lu->FactPattern == SamePattern) { 701 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern\n");CHKERRQ(ierr); 702 } else { 703 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern_SameRowPerm\n");CHKERRQ(ierr); 704 } 705 PetscFunctionReturn(0); 706 } 707 708 #undef __FUNCT__ 709 #define __FUNCT__ "MatView_SuperLU_DIST" 710 PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer) 711 { 712 PetscErrorCode ierr; 713 PetscBool iascii; 714 PetscViewerFormat format; 715 716 PetscFunctionBegin; 717 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 718 if (iascii) { 719 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 720 if (format == PETSC_VIEWER_ASCII_INFO) { 721 ierr = MatFactorInfo_SuperLU_DIST(A,viewer);CHKERRQ(ierr); 722 } 723 } 724 PetscFunctionReturn(0); 725 } 726 727 728 /*MC 729 MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization 730 731 Works with AIJ matrices 732 733 Options Database Keys: 734 + -mat_superlu_dist_r <n> - number of rows in processor partition 735 . -mat_superlu_dist_c <n> - number of columns in processor partition 736 . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed 737 . -mat_superlu_dist_equil - equilibrate the matrix 738 . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation 739 . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation 740 . -mat_superlu_dist_replacetinypivot - replace tiny pivots 741 . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm 742 . -mat_superlu_dist_iterrefine - use iterative refinement 743 - -mat_superlu_dist_statprint - print factorization information 744 745 Level: beginner 746 747 .seealso: PCLU 748 749 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage 750 751 M*/ 752 753