1 2 /* 3 Provides an interface to the SuperLU_DIST_2.2 sparse solver 4 */ 5 6 #include <../src/mat/impls/aij/seq/aij.h> 7 #include <../src/mat/impls/aij/mpi/mpiaij.h> 8 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */ 9 #include <stdlib.h> 10 #endif 11 12 #if defined(PETSC_USE_64BIT_INDICES) 13 /* ugly SuperLU_Dist variable telling it to use long long int */ 14 #define _LONGINT 15 #endif 16 17 EXTERN_C_BEGIN 18 #if defined(PETSC_USE_COMPLEX) 19 #include <superlu_zdefs.h> 20 #else 21 #include <superlu_ddefs.h> 22 #endif 23 EXTERN_C_END 24 25 typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode; 26 const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0}; 27 28 typedef struct { 29 int_t nprow,npcol,*row,*col; 30 gridinfo_t grid; 31 superlu_options_t options; 32 SuperMatrix A_sup; 33 ScalePermstruct_t ScalePermstruct; 34 LUstruct_t LUstruct; 35 int StatPrint; 36 SuperLU_MatInputMode MatInputMode; 37 SOLVEstruct_t SOLVEstruct; 38 fact_t FactPattern; 39 MPI_Comm comm_superlu; 40 #if defined(PETSC_USE_COMPLEX) 41 doublecomplex *val; 42 #else 43 double *val; 44 #endif 45 PetscBool matsolve_iscalled,matmatsolve_iscalled; 46 PetscBool CleanUpSuperLU_Dist; /* Flag to clean up (non-global) SuperLU objects during Destroy */ 47 } Mat_SuperLU_DIST; 48 49 extern PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat,PetscViewer); 50 extern PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat,Mat,const MatFactorInfo *); 51 extern PetscErrorCode MatDestroy_SuperLU_DIST(Mat); 52 extern PetscErrorCode MatView_SuperLU_DIST(Mat,PetscViewer); 53 extern PetscErrorCode MatSolve_SuperLU_DIST(Mat,Vec,Vec); 54 extern PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat,Mat,IS,IS,const MatFactorInfo *); 55 extern PetscErrorCode MatDestroy_MPIAIJ(Mat); 56 57 #undef __FUNCT__ 58 #define __FUNCT__ "MatDestroy_SuperLU_DIST" 59 PetscErrorCode MatDestroy_SuperLU_DIST(Mat A) 60 { 61 PetscErrorCode ierr; 62 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 63 PetscBool flg; 64 65 PetscFunctionBegin; 66 if (lu && lu->CleanUpSuperLU_Dist) { 67 /* Deallocate SuperLU_DIST storage */ 68 if (lu->MatInputMode == GLOBAL) { 69 Destroy_CompCol_Matrix_dist(&lu->A_sup); 70 } else { 71 Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); 72 if ( lu->options.SolveInitialized ) { 73 #if defined(PETSC_USE_COMPLEX) 74 zSolveFinalize(&lu->options, &lu->SOLVEstruct); 75 #else 76 dSolveFinalize(&lu->options, &lu->SOLVEstruct); 77 #endif 78 } 79 } 80 Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct); 81 ScalePermstructFree(&lu->ScalePermstruct); 82 LUstructFree(&lu->LUstruct); 83 84 /* Release the SuperLU_DIST process grid. */ 85 superlu_gridexit(&lu->grid); 86 87 ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr); 88 } 89 ierr = PetscFree(A->spptr);CHKERRQ(ierr); 90 91 ierr = PetscTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 92 if (flg) { 93 ierr = MatDestroy_SeqAIJ(A);CHKERRQ(ierr); 94 } else { 95 ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr); 96 } 97 PetscFunctionReturn(0); 98 } 99 100 #undef __FUNCT__ 101 #define __FUNCT__ "MatSolve_SuperLU_DIST" 102 PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x) 103 { 104 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 105 PetscErrorCode ierr; 106 PetscMPIInt size; 107 PetscInt m=A->rmap->n,M=A->rmap->N,N=A->cmap->N; 108 SuperLUStat_t stat; 109 double berr[1]; 110 PetscScalar *bptr; 111 PetscInt nrhs=1; 112 Vec x_seq; 113 IS iden; 114 VecScatter scat; 115 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 116 117 PetscFunctionBegin; 118 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 119 if (size > 1 && lu->MatInputMode == GLOBAL) { 120 /* global mat input, convert b to x_seq */ 121 ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr); 122 ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr); 123 ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr); 124 ierr = ISDestroy(&iden);CHKERRQ(ierr); 125 126 ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 127 ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 128 ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr); 129 } else { /* size==1 || distributed mat input */ 130 if (lu->options.SolveInitialized && !lu->matsolve_iscalled){ 131 /* see comments in MatMatSolve() */ 132 #if defined(PETSC_USE_COMPLEX) 133 zSolveFinalize(&lu->options, &lu->SOLVEstruct); 134 #else 135 dSolveFinalize(&lu->options, &lu->SOLVEstruct); 136 #endif 137 lu->options.SolveInitialized = NO; 138 } 139 ierr = VecCopy(b_mpi,x);CHKERRQ(ierr); 140 ierr = VecGetArray(x,&bptr);CHKERRQ(ierr); 141 } 142 143 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 144 145 PStatInit(&stat); /* Initialize the statistics variables. */ 146 if (lu->MatInputMode == GLOBAL) { 147 #if defined(PETSC_USE_COMPLEX) 148 pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs, 149 &lu->grid,&lu->LUstruct,berr,&stat,&info); 150 #else 151 pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs, 152 &lu->grid,&lu->LUstruct,berr,&stat,&info); 153 #endif 154 } else { /* distributed mat input */ 155 #if defined(PETSC_USE_COMPLEX) 156 pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, 157 &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info); 158 #else 159 pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid, 160 &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info); 161 #endif 162 } 163 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 164 165 if (lu->options.PrintStat) { 166 PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 167 } 168 PStatFree(&stat); 169 170 if (size > 1 && lu->MatInputMode == GLOBAL) { 171 /* convert seq x to mpi x */ 172 ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr); 173 ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 174 ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 175 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 176 ierr = VecDestroy(&x_seq);CHKERRQ(ierr); 177 } else { 178 ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr); 179 lu->matsolve_iscalled = PETSC_TRUE; 180 lu->matmatsolve_iscalled = PETSC_FALSE; 181 } 182 PetscFunctionReturn(0); 183 } 184 185 #undef __FUNCT__ 186 #define __FUNCT__ "MatMatSolve_SuperLU_DIST" 187 PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X) 188 { 189 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 190 PetscErrorCode ierr; 191 PetscMPIInt size; 192 PetscInt M=A->rmap->N,m=A->rmap->n,nrhs; 193 SuperLUStat_t stat; 194 double berr[1]; 195 PetscScalar *bptr; 196 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 197 198 PetscFunctionBegin; 199 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 200 201 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 202 if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size); 203 /* size==1 or distributed mat input */ 204 if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled){ 205 /* communication pattern of SOLVEstruct is unlikely created for matmatsolve, 206 thus destroy it and create a new SOLVEstruct. 207 Otherwise it may result in memory corruption or incorrect solution 208 See src/mat/examples/tests/ex125.c */ 209 #if defined(PETSC_USE_COMPLEX) 210 zSolveFinalize(&lu->options, &lu->SOLVEstruct); 211 #else 212 dSolveFinalize(&lu->options, &lu->SOLVEstruct); 213 #endif 214 lu->options.SolveInitialized = NO; 215 } 216 ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 217 218 ierr = MatGetSize(B_mpi,PETSC_NULL,&nrhs);CHKERRQ(ierr); 219 220 PStatInit(&stat); /* Initialize the statistics variables. */ 221 ierr = MatGetArray(X,&bptr);CHKERRQ(ierr); 222 if (lu->MatInputMode == GLOBAL) { /* size == 1 */ 223 #if defined(PETSC_USE_COMPLEX) 224 pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs, 225 &lu->grid, &lu->LUstruct, berr, &stat, &info); 226 #else 227 pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, 228 &lu->grid, &lu->LUstruct, berr, &stat, &info); 229 #endif 230 } else { /* distributed mat input */ 231 #if defined(PETSC_USE_COMPLEX) 232 pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, 233 &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info); 234 #else 235 pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid, 236 &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info); 237 #endif 238 } 239 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 240 ierr = MatRestoreArray(X,&bptr);CHKERRQ(ierr); 241 242 if (lu->options.PrintStat) { 243 PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 244 } 245 PStatFree(&stat); 246 lu->matsolve_iscalled = PETSC_FALSE; 247 lu->matmatsolve_iscalled = PETSC_TRUE; 248 PetscFunctionReturn(0); 249 } 250 251 252 #undef __FUNCT__ 253 #define __FUNCT__ "MatLUFactorNumeric_SuperLU_DIST" 254 PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info) 255 { 256 Mat *tseq,A_seq = PETSC_NULL; 257 Mat_SeqAIJ *aa,*bb; 258 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->spptr; 259 PetscErrorCode ierr; 260 PetscInt M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray, 261 m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj; 262 int sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */ 263 PetscMPIInt size,rank; 264 SuperLUStat_t stat; 265 double *berr=0; 266 IS isrow; 267 PetscLogDouble time0,time,time_min,time_max; 268 Mat F_diag=PETSC_NULL; 269 #if defined(PETSC_USE_COMPLEX) 270 doublecomplex *av, *bv; 271 #else 272 double *av, *bv; 273 #endif 274 275 PetscFunctionBegin; 276 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 277 ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr); 278 279 if (lu->options.PrintStat) { /* collect time for mat conversion */ 280 ierr = MPI_Barrier(((PetscObject)A)->comm);CHKERRQ(ierr); 281 ierr = PetscGetTime(&time0);CHKERRQ(ierr); 282 } 283 284 if (lu->MatInputMode == GLOBAL) { /* global mat input */ 285 if (size > 1) { /* convert mpi A to seq mat A */ 286 ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr); 287 ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr); 288 ierr = ISDestroy(&isrow);CHKERRQ(ierr); 289 290 A_seq = *tseq; 291 ierr = PetscFree(tseq);CHKERRQ(ierr); 292 aa = (Mat_SeqAIJ*)A_seq->data; 293 } else { 294 PetscBool flg; 295 ierr = PetscTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 296 if (flg) { 297 Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data; 298 A = At->A; 299 } 300 aa = (Mat_SeqAIJ*)A->data; 301 } 302 303 /* Convert Petsc NR matrix to SuperLU_DIST NC. 304 Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */ 305 if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */ 306 Destroy_CompCol_Matrix_dist(&lu->A_sup); 307 if (lu->FactPattern == SamePattern_SameRowPerm){ 308 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 309 } else { /* lu->FactPattern == SamePattern */ 310 Destroy_LU(N, &lu->grid, &lu->LUstruct); 311 lu->options.Fact = SamePattern; 312 } 313 } 314 #if defined(PETSC_USE_COMPLEX) 315 zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,aa->j,aa->i,&lu->val,&lu->col, &lu->row); 316 #else 317 dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,aa->j,aa->i,&lu->val, &lu->col, &lu->row); 318 #endif 319 320 /* Create compressed column matrix A_sup. */ 321 #if defined(PETSC_USE_COMPLEX) 322 zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE); 323 #else 324 dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE); 325 #endif 326 } else { /* distributed mat input */ 327 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 328 aa=(Mat_SeqAIJ*)(mat->A)->data; 329 bb=(Mat_SeqAIJ*)(mat->B)->data; 330 ai=aa->i; aj=aa->j; 331 bi=bb->i; bj=bb->j; 332 #if defined(PETSC_USE_COMPLEX) 333 av=(doublecomplex*)aa->a; 334 bv=(doublecomplex*)bb->a; 335 #else 336 av=aa->a; 337 bv=bb->a; 338 #endif 339 rstart = A->rmap->rstart; 340 nz = aa->nz + bb->nz; 341 garray = mat->garray; 342 343 if (lu->options.Fact == DOFACT) {/* first numeric factorization */ 344 #if defined(PETSC_USE_COMPLEX) 345 zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row); 346 #else 347 dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row); 348 #endif 349 } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */ 350 /* Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); */ /* this leads to crash! However, see SuperLU_DIST_2.5/EXAMPLE/pzdrive2.c */ 351 if (lu->FactPattern == SamePattern_SameRowPerm){ 352 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 353 } else { 354 Destroy_LU(N, &lu->grid, &lu->LUstruct); /* Deallocate storage associated with the L and U matrices. */ 355 lu->options.Fact = SamePattern; 356 } 357 } 358 nz = 0; 359 for ( i=0; i<m; i++ ) { 360 lu->row[i] = nz; 361 countA = ai[i+1] - ai[i]; 362 countB = bi[i+1] - bi[i]; 363 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 364 bjj = bj + bi[i]; 365 366 /* B part, smaller col index */ 367 colA_start = rstart + ajj[0]; /* the smallest global col index of A */ 368 jB = 0; 369 for (j=0; j<countB; j++){ 370 jcol = garray[bjj[j]]; 371 if (jcol > colA_start) { 372 jB = j; 373 break; 374 } 375 lu->col[nz] = jcol; 376 lu->val[nz++] = *bv++; 377 if (j==countB-1) jB = countB; 378 } 379 380 /* A part */ 381 for (j=0; j<countA; j++){ 382 lu->col[nz] = rstart + ajj[j]; 383 lu->val[nz++] = *av++; 384 } 385 386 /* B part, larger col index */ 387 for (j=jB; j<countB; j++){ 388 lu->col[nz] = garray[bjj[j]]; 389 lu->val[nz++] = *bv++; 390 } 391 } 392 lu->row[m] = nz; 393 #if defined(PETSC_USE_COMPLEX) 394 zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart, 395 lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE); 396 #else 397 dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart, 398 lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE); 399 #endif 400 } 401 if (lu->options.PrintStat) { 402 ierr = PetscGetTime(&time);CHKERRQ(ierr); 403 time0 = time - time0; 404 } 405 406 /* Factor the matrix. */ 407 PStatInit(&stat); /* Initialize the statistics variables. */ 408 if (lu->MatInputMode == GLOBAL) { /* global mat input */ 409 #if defined(PETSC_USE_COMPLEX) 410 pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0, 411 &lu->grid, &lu->LUstruct, berr, &stat, &sinfo); 412 #else 413 pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0, 414 &lu->grid, &lu->LUstruct, berr, &stat, &sinfo); 415 #endif 416 } else { /* distributed mat input */ 417 #if defined(PETSC_USE_COMPLEX) 418 pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid, 419 &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo); 420 if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pzgssvx fails, info: %d\n",sinfo); 421 #else 422 pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid, 423 &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo); 424 if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",sinfo); 425 #endif 426 } 427 428 if (lu->MatInputMode == GLOBAL && size > 1){ 429 ierr = MatDestroy(&A_seq);CHKERRQ(ierr); 430 } 431 432 if (lu->options.PrintStat) { 433 if (size > 1){ 434 ierr = MPI_Reduce(&time0,&time_max,1,MPI_DOUBLE,MPI_MAX,0,((PetscObject)A)->comm); 435 ierr = MPI_Reduce(&time0,&time_min,1,MPI_DOUBLE,MPI_MIN,0,((PetscObject)A)->comm); 436 ierr = MPI_Reduce(&time0,&time,1,MPI_DOUBLE,MPI_SUM,0,((PetscObject)A)->comm); 437 time = time/size; /* average time */ 438 if (!rank) { 439 ierr = PetscPrintf(PETSC_COMM_SELF, " Mat conversion(PETSc->SuperLU_DIST) time (max/min/avg): \n %g / %g / %g\n",time_max,time_min,time);CHKERRQ(ierr); 440 } 441 } else { 442 ierr = PetscPrintf(PETSC_COMM_SELF, " Mat conversion(PETSc->SuperLU_DIST) time: \n %g\n",time0);CHKERRQ(ierr); 443 } 444 PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 445 } 446 PStatFree(&stat); 447 if (size > 1){ 448 F_diag = ((Mat_MPIAIJ *)(F)->data)->A; 449 F_diag->assembled = PETSC_TRUE; 450 } 451 (F)->assembled = PETSC_TRUE; 452 (F)->preallocated = PETSC_TRUE; 453 lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */ 454 PetscFunctionReturn(0); 455 } 456 457 /* Note the Petsc r and c permutations are ignored */ 458 #undef __FUNCT__ 459 #define __FUNCT__ "MatLUFactorSymbolic_SuperLU_DIST" 460 PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 461 { 462 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->spptr; 463 PetscInt M=A->rmap->N,N=A->cmap->N; 464 465 PetscFunctionBegin; 466 /* Initialize the SuperLU process grid. */ 467 superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid); 468 469 /* Initialize ScalePermstruct and LUstruct. */ 470 ScalePermstructInit(M, N, &lu->ScalePermstruct); 471 LUstructInit(M, N, &lu->LUstruct); 472 F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST; 473 F->ops->solve = MatSolve_SuperLU_DIST; 474 F->ops->matsolve = MatMatSolve_SuperLU_DIST; 475 lu->CleanUpSuperLU_Dist = PETSC_TRUE; 476 PetscFunctionReturn(0); 477 } 478 479 EXTERN_C_BEGIN 480 #undef __FUNCT__ 481 #define __FUNCT__ "MatFactorGetSolverPackage_aij_superlu_dist" 482 PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type) 483 { 484 PetscFunctionBegin; 485 *type = MATSOLVERSUPERLU_DIST; 486 PetscFunctionReturn(0); 487 } 488 EXTERN_C_END 489 490 #undef __FUNCT__ 491 #define __FUNCT__ "MatGetFactor_aij_superlu_dist" 492 PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 493 { 494 Mat B; 495 Mat_SuperLU_DIST *lu; 496 PetscErrorCode ierr; 497 PetscInt M=A->rmap->N,N=A->cmap->N,indx; 498 PetscMPIInt size; 499 superlu_options_t options; 500 PetscBool flg; 501 const char *colperm[] = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"}; 502 const char *rowperm[] = {"LargeDiag","NATURAL"}; 503 const char *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"}; 504 505 PetscFunctionBegin; 506 /* Create the factorization matrix */ 507 ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr); 508 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);CHKERRQ(ierr); 509 ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 510 ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL); 511 ierr = MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 512 513 B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST; 514 B->ops->view = MatView_SuperLU_DIST; 515 B->ops->destroy = MatDestroy_SuperLU_DIST; 516 ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_aij_superlu_dist",MatFactorGetSolverPackage_aij_superlu_dist);CHKERRQ(ierr); 517 B->factortype = MAT_FACTOR_LU; 518 519 ierr = PetscNewLog(B,Mat_SuperLU_DIST,&lu);CHKERRQ(ierr); 520 B->spptr = lu; 521 522 /* Set the default input options: 523 options.Fact = DOFACT; 524 options.Equil = YES; 525 options.ParSymbFact = NO; 526 options.ColPerm = METIS_AT_PLUS_A; 527 options.RowPerm = LargeDiag; 528 options.ReplaceTinyPivot = YES; 529 options.IterRefine = DOUBLE; 530 options.Trans = NOTRANS; 531 options.SolveInitialized = NO; -hold the communication pattern used MatSolve() and MatMatSolve() 532 options.RefineInitialized = NO; 533 options.PrintStat = YES; 534 */ 535 set_default_options_dist(&options); 536 537 ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_superlu));CHKERRQ(ierr); 538 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 539 /* Default num of process columns and rows */ 540 lu->npcol = PetscMPIIntCast((PetscInt)(0.5 + sqrt((PetscReal)size))); 541 if (!lu->npcol) lu->npcol = 1; 542 while (lu->npcol > 0) { 543 lu->nprow = PetscMPIIntCast(size/lu->npcol); 544 if (size == lu->nprow * lu->npcol) break; 545 lu->npcol --; 546 } 547 548 ierr = PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr); 549 ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,&lu->nprow,PETSC_NULL);CHKERRQ(ierr); 550 ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,&lu->npcol,PETSC_NULL);CHKERRQ(ierr); 551 if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol); 552 553 lu->MatInputMode = DISTRIBUTED; 554 ierr = PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,PETSC_NULL);CHKERRQ(ierr); 555 if(lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL; 556 557 ierr = PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr); 558 if (!flg) { 559 options.Equil = NO; 560 } 561 562 ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);CHKERRQ(ierr); 563 if (flg) { 564 switch (indx) { 565 case 0: 566 options.RowPerm = LargeDiag; 567 break; 568 case 1: 569 options.RowPerm = NOROWPERM; 570 break; 571 } 572 } 573 574 ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);CHKERRQ(ierr); 575 if (flg) { 576 switch (indx) { 577 case 0: 578 options.ColPerm = NATURAL; 579 break; 580 case 1: 581 options.ColPerm = MMD_AT_PLUS_A; 582 break; 583 case 2: 584 options.ColPerm = MMD_ATA; 585 break; 586 case 3: 587 options.ColPerm = METIS_AT_PLUS_A; 588 break; 589 case 4: 590 options.ColPerm = PARMETIS; /* only works for np>1 */ 591 break; 592 default: 593 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 594 } 595 } 596 597 ierr = PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr); 598 if (!flg) { 599 options.ReplaceTinyPivot = NO; 600 } 601 602 options.ParSymbFact = NO; 603 ierr = PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr); 604 if (flg){ 605 #ifdef PETSC_HAVE_PARMETIS 606 options.ParSymbFact = YES; 607 options.ColPerm = PARMETIS; /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */ 608 #else 609 printf("parsymbfact needs PARMETIS"); 610 #endif 611 } 612 613 lu->FactPattern = SamePattern_SameRowPerm; 614 ierr = PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[1],&indx,&flg);CHKERRQ(ierr); 615 if (flg) { 616 switch (indx) { 617 case 0: 618 lu->FactPattern = SamePattern; 619 break; 620 case 1: 621 lu->FactPattern = SamePattern_SameRowPerm; 622 break; 623 } 624 } 625 626 options.IterRefine = NOREFINE; 627 ierr = PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr); 628 if (flg) { 629 options.IterRefine = DOUBLE; 630 } 631 632 if (PetscLogPrintInfo) { 633 options.PrintStat = YES; 634 } else { 635 options.PrintStat = NO; 636 } 637 ierr = PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None", 638 (PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,0);CHKERRQ(ierr); 639 PetscOptionsEnd(); 640 641 lu->options = options; 642 lu->options.Fact = DOFACT; 643 lu->matsolve_iscalled = PETSC_FALSE; 644 lu->matmatsolve_iscalled = PETSC_FALSE; 645 *F = B; 646 PetscFunctionReturn(0); 647 } 648 649 EXTERN_C_BEGIN 650 #undef __FUNCT__ 651 #define __FUNCT__ "MatGetFactor_seqaij_superlu_dist" 652 PetscErrorCode MatGetFactor_seqaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 653 { 654 PetscErrorCode ierr; 655 656 PetscFunctionBegin; 657 ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr); 658 PetscFunctionReturn(0); 659 } 660 EXTERN_C_END 661 662 EXTERN_C_BEGIN 663 #undef __FUNCT__ 664 #define __FUNCT__ "MatGetFactor_mpiaij_superlu_dist" 665 PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 666 { 667 PetscErrorCode ierr; 668 669 PetscFunctionBegin; 670 ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr); 671 PetscFunctionReturn(0); 672 } 673 EXTERN_C_END 674 675 #undef __FUNCT__ 676 #define __FUNCT__ "MatFactorInfo_SuperLU_DIST" 677 PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer) 678 { 679 Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)A->spptr; 680 superlu_options_t options; 681 PetscErrorCode ierr; 682 683 PetscFunctionBegin; 684 /* check if matrix is superlu_dist type */ 685 if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(0); 686 687 options = lu->options; 688 ierr = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr); 689 ierr = PetscViewerASCIIPrintf(viewer," Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 690 ierr = PetscViewerASCIIPrintf(viewer," Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);CHKERRQ(ierr); 691 ierr = PetscViewerASCIIPrintf(viewer," Matrix input mode %d \n",lu->MatInputMode);CHKERRQ(ierr); 692 ierr = PetscViewerASCIIPrintf(viewer," Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);CHKERRQ(ierr); 693 ierr = PetscViewerASCIIPrintf(viewer," Use iterative refinement %s \n",PetscBools[options.IterRefine == DOUBLE]);CHKERRQ(ierr); 694 ierr = PetscViewerASCIIPrintf(viewer," Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 695 ierr = PetscViewerASCIIPrintf(viewer," Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL": "LargeDiag");CHKERRQ(ierr); 696 697 switch(options.ColPerm){ 698 case NATURAL: 699 ierr = PetscViewerASCIIPrintf(viewer," Column permutation NATURAL\n");CHKERRQ(ierr); 700 break; 701 case MMD_AT_PLUS_A: 702 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_AT_PLUS_A\n");CHKERRQ(ierr); 703 break; 704 case MMD_ATA: 705 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_ATA\n");CHKERRQ(ierr); 706 break; 707 case METIS_AT_PLUS_A: 708 ierr = PetscViewerASCIIPrintf(viewer," Column permutation METIS_AT_PLUS_A\n");CHKERRQ(ierr); 709 break; 710 case PARMETIS: 711 ierr = PetscViewerASCIIPrintf(viewer," Column permutation PARMETIS\n");CHKERRQ(ierr); 712 break; 713 default: 714 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 715 } 716 717 ierr = PetscViewerASCIIPrintf(viewer," Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);CHKERRQ(ierr); 718 719 if (lu->FactPattern == SamePattern){ 720 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern\n");CHKERRQ(ierr); 721 } else { 722 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern_SameRowPerm\n");CHKERRQ(ierr); 723 } 724 PetscFunctionReturn(0); 725 } 726 727 #undef __FUNCT__ 728 #define __FUNCT__ "MatView_SuperLU_DIST" 729 PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer) 730 { 731 PetscErrorCode ierr; 732 PetscBool iascii; 733 PetscViewerFormat format; 734 735 PetscFunctionBegin; 736 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 737 if (iascii) { 738 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 739 if (format == PETSC_VIEWER_ASCII_INFO) { 740 ierr = MatFactorInfo_SuperLU_DIST(A,viewer);CHKERRQ(ierr); 741 } 742 } 743 PetscFunctionReturn(0); 744 } 745 746 747 /*MC 748 MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization 749 750 Works with AIJ matrices 751 752 Options Database Keys: 753 + -mat_superlu_dist_r <n> - number of rows in processor partition 754 . -mat_superlu_dist_c <n> - number of columns in processor partition 755 . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed 756 . -mat_superlu_dist_equil - equilibrate the matrix 757 . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation 758 . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation 759 . -mat_superlu_dist_replacetinypivot - replace tiny pivots 760 . -mat_superlu_dist_fact <SamePattern> (choose one of) SamePattern SamePattern_SameRowPerm 761 . -mat_superlu_dist_iterrefine - use iterative refinement 762 - -mat_superlu_dist_statprint - print factorization information 763 764 Level: beginner 765 766 .seealso: PCLU 767 768 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage 769 770 M*/ 771 772