1 2 /* 3 Provides an interface to the SuperLU_DIST_2.2 sparse solver 4 */ 5 6 #include <../src/mat/impls/aij/seq/aij.h> 7 #include <../src/mat/impls/aij/mpi/mpiaij.h> 8 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */ 9 #include <stdlib.h> 10 #endif 11 12 #if defined(PETSC_USE_64BIT_INDICES) 13 /* ugly SuperLU_Dist variable telling it to use long long int */ 14 #define _LONGINT 15 #endif 16 17 EXTERN_C_BEGIN 18 #if defined(PETSC_USE_COMPLEX) 19 #include <superlu_zdefs.h> 20 #else 21 #include <superlu_ddefs.h> 22 #endif 23 EXTERN_C_END 24 25 /* 26 GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized 27 DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator. 28 */ 29 typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode; 30 const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0}; 31 32 typedef struct { 33 int_t nprow,npcol,*row,*col; 34 gridinfo_t grid; 35 superlu_options_t options; 36 SuperMatrix A_sup; 37 ScalePermstruct_t ScalePermstruct; 38 LUstruct_t LUstruct; 39 int StatPrint; 40 SuperLU_MatInputMode MatInputMode; 41 SOLVEstruct_t SOLVEstruct; 42 fact_t FactPattern; 43 MPI_Comm comm_superlu; 44 #if defined(PETSC_USE_COMPLEX) 45 doublecomplex *val; 46 #else 47 double *val; 48 #endif 49 PetscBool matsolve_iscalled,matmatsolve_iscalled; 50 PetscBool CleanUpSuperLU_Dist; /* Flag to clean up (non-global) SuperLU objects during Destroy */ 51 } Mat_SuperLU_DIST; 52 53 extern PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat,PetscViewer); 54 extern PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat,Mat,const MatFactorInfo *); 55 extern PetscErrorCode MatDestroy_SuperLU_DIST(Mat); 56 extern PetscErrorCode MatView_SuperLU_DIST(Mat,PetscViewer); 57 extern PetscErrorCode MatSolve_SuperLU_DIST(Mat,Vec,Vec); 58 extern PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat,Mat,IS,IS,const MatFactorInfo *); 59 extern PetscErrorCode MatDestroy_MPIAIJ(Mat); 60 61 #undef __FUNCT__ 62 #define __FUNCT__ "MatDestroy_SuperLU_DIST" 63 PetscErrorCode MatDestroy_SuperLU_DIST(Mat A) 64 { 65 PetscErrorCode ierr; 66 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 67 PetscBool flg; 68 69 PetscFunctionBegin; 70 if (lu && lu->CleanUpSuperLU_Dist) { 71 /* Deallocate SuperLU_DIST storage */ 72 if (lu->MatInputMode == GLOBAL) { 73 Destroy_CompCol_Matrix_dist(&lu->A_sup); 74 } else { 75 Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); 76 if ( lu->options.SolveInitialized ) { 77 #if defined(PETSC_USE_COMPLEX) 78 zSolveFinalize(&lu->options, &lu->SOLVEstruct); 79 #else 80 dSolveFinalize(&lu->options, &lu->SOLVEstruct); 81 #endif 82 } 83 } 84 Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct); 85 ScalePermstructFree(&lu->ScalePermstruct); 86 LUstructFree(&lu->LUstruct); 87 88 /* Release the SuperLU_DIST process grid. */ 89 superlu_gridexit(&lu->grid); 90 91 ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr); 92 } 93 ierr = PetscFree(A->spptr);CHKERRQ(ierr); 94 95 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 96 if (flg) { 97 ierr = MatDestroy_SeqAIJ(A);CHKERRQ(ierr); 98 } else { 99 ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr); 100 } 101 PetscFunctionReturn(0); 102 } 103 104 #undef __FUNCT__ 105 #define __FUNCT__ "MatSolve_SuperLU_DIST" 106 PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x) 107 { 108 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 109 PetscErrorCode ierr; 110 PetscMPIInt size; 111 PetscInt m=A->rmap->n,M=A->rmap->N,N=A->cmap->N; 112 SuperLUStat_t stat; 113 double berr[1]; 114 PetscScalar *bptr; 115 PetscInt nrhs=1; 116 Vec x_seq; 117 IS iden; 118 VecScatter scat; 119 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 120 121 PetscFunctionBegin; 122 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 123 if (size > 1 && lu->MatInputMode == GLOBAL) { 124 /* global mat input, convert b to x_seq */ 125 ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr); 126 ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr); 127 ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr); 128 ierr = ISDestroy(&iden);CHKERRQ(ierr); 129 130 ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 131 ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 132 ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr); 133 } else { /* size==1 || distributed mat input */ 134 if (lu->options.SolveInitialized && !lu->matsolve_iscalled){ 135 /* see comments in MatMatSolve() */ 136 #if defined(PETSC_USE_COMPLEX) 137 zSolveFinalize(&lu->options, &lu->SOLVEstruct); 138 #else 139 dSolveFinalize(&lu->options, &lu->SOLVEstruct); 140 #endif 141 lu->options.SolveInitialized = NO; 142 } 143 ierr = VecCopy(b_mpi,x);CHKERRQ(ierr); 144 ierr = VecGetArray(x,&bptr);CHKERRQ(ierr); 145 } 146 147 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 148 149 PStatInit(&stat); /* Initialize the statistics variables. */ 150 if (lu->MatInputMode == GLOBAL) { 151 #if defined(PETSC_USE_COMPLEX) 152 pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs, 153 &lu->grid,&lu->LUstruct,berr,&stat,&info); 154 #else 155 pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs, 156 &lu->grid,&lu->LUstruct,berr,&stat,&info); 157 #endif 158 } else { /* distributed mat input */ 159 #if defined(PETSC_USE_COMPLEX) 160 pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, 161 &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info); 162 #else 163 pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid, 164 &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info); 165 #endif 166 } 167 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 168 169 if (lu->options.PrintStat) { 170 PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 171 } 172 PStatFree(&stat); 173 174 if (size > 1 && lu->MatInputMode == GLOBAL) { 175 /* convert seq x to mpi x */ 176 ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr); 177 ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 178 ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 179 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 180 ierr = VecDestroy(&x_seq);CHKERRQ(ierr); 181 } else { 182 ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr); 183 lu->matsolve_iscalled = PETSC_TRUE; 184 lu->matmatsolve_iscalled = PETSC_FALSE; 185 } 186 PetscFunctionReturn(0); 187 } 188 189 #undef __FUNCT__ 190 #define __FUNCT__ "MatMatSolve_SuperLU_DIST" 191 PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X) 192 { 193 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 194 PetscErrorCode ierr; 195 PetscMPIInt size; 196 PetscInt M=A->rmap->N,m=A->rmap->n,nrhs; 197 SuperLUStat_t stat; 198 double berr[1]; 199 PetscScalar *bptr; 200 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 201 PetscBool flg; 202 203 PetscFunctionBegin; 204 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 205 ierr = PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,PETSC_NULL);CHKERRQ(ierr); 206 if (!flg) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix"); 207 ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,PETSC_NULL);CHKERRQ(ierr); 208 if (!flg) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix"); 209 210 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 211 if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size); 212 /* size==1 or distributed mat input */ 213 if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled){ 214 /* communication pattern of SOLVEstruct is unlikely created for matmatsolve, 215 thus destroy it and create a new SOLVEstruct. 216 Otherwise it may result in memory corruption or incorrect solution 217 See src/mat/examples/tests/ex125.c */ 218 #if defined(PETSC_USE_COMPLEX) 219 zSolveFinalize(&lu->options, &lu->SOLVEstruct); 220 #else 221 dSolveFinalize(&lu->options, &lu->SOLVEstruct); 222 #endif 223 lu->options.SolveInitialized = NO; 224 } 225 ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 226 227 ierr = MatGetSize(B_mpi,PETSC_NULL,&nrhs);CHKERRQ(ierr); 228 229 PStatInit(&stat); /* Initialize the statistics variables. */ 230 ierr = MatDenseGetArray(X,&bptr);CHKERRQ(ierr); 231 if (lu->MatInputMode == GLOBAL) { /* size == 1 */ 232 #if defined(PETSC_USE_COMPLEX) 233 pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs, 234 &lu->grid, &lu->LUstruct, berr, &stat, &info); 235 #else 236 pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, 237 &lu->grid, &lu->LUstruct, berr, &stat, &info); 238 #endif 239 } else { /* distributed mat input */ 240 #if defined(PETSC_USE_COMPLEX) 241 pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, 242 &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info); 243 #else 244 pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid, 245 &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info); 246 #endif 247 } 248 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 249 ierr = MatDenseRestoreArray(X,&bptr);CHKERRQ(ierr); 250 251 if (lu->options.PrintStat) { 252 PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 253 } 254 PStatFree(&stat); 255 lu->matsolve_iscalled = PETSC_FALSE; 256 lu->matmatsolve_iscalled = PETSC_TRUE; 257 PetscFunctionReturn(0); 258 } 259 260 261 #undef __FUNCT__ 262 #define __FUNCT__ "MatLUFactorNumeric_SuperLU_DIST" 263 PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info) 264 { 265 Mat *tseq,A_seq = PETSC_NULL; 266 Mat_SeqAIJ *aa,*bb; 267 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->spptr; 268 PetscErrorCode ierr; 269 PetscInt M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray, 270 m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj; 271 int sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */ 272 PetscMPIInt size; 273 SuperLUStat_t stat; 274 double *berr=0; 275 IS isrow; 276 PetscLogDouble time0,time,time_min,time_max; 277 Mat F_diag=PETSC_NULL; 278 #if defined(PETSC_USE_COMPLEX) 279 doublecomplex *av, *bv; 280 #else 281 double *av, *bv; 282 #endif 283 284 PetscFunctionBegin; 285 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 286 287 if (lu->options.PrintStat) { /* collect time for mat conversion */ 288 ierr = MPI_Barrier(((PetscObject)A)->comm);CHKERRQ(ierr); 289 ierr = PetscGetTime(&time0);CHKERRQ(ierr); 290 } 291 292 if (lu->MatInputMode == GLOBAL) { /* global mat input */ 293 if (size > 1) { /* convert mpi A to seq mat A */ 294 ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr); 295 ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr); 296 ierr = ISDestroy(&isrow);CHKERRQ(ierr); 297 298 A_seq = *tseq; 299 ierr = PetscFree(tseq);CHKERRQ(ierr); 300 aa = (Mat_SeqAIJ*)A_seq->data; 301 } else { 302 PetscBool flg; 303 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 304 if (flg) { 305 Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data; 306 A = At->A; 307 } 308 aa = (Mat_SeqAIJ*)A->data; 309 } 310 311 /* Convert Petsc NR matrix to SuperLU_DIST NC. 312 Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */ 313 if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */ 314 Destroy_CompCol_Matrix_dist(&lu->A_sup); 315 if (lu->FactPattern == SamePattern_SameRowPerm){ 316 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 317 } else { /* lu->FactPattern == SamePattern */ 318 Destroy_LU(N, &lu->grid, &lu->LUstruct); 319 lu->options.Fact = SamePattern; 320 } 321 } 322 #if defined(PETSC_USE_COMPLEX) 323 zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,aa->j,aa->i,&lu->val,&lu->col, &lu->row); 324 #else 325 dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,aa->j,aa->i,&lu->val, &lu->col, &lu->row); 326 #endif 327 328 /* Create compressed column matrix A_sup. */ 329 #if defined(PETSC_USE_COMPLEX) 330 zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE); 331 #else 332 dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE); 333 #endif 334 } else { /* distributed mat input */ 335 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 336 aa=(Mat_SeqAIJ*)(mat->A)->data; 337 bb=(Mat_SeqAIJ*)(mat->B)->data; 338 ai=aa->i; aj=aa->j; 339 bi=bb->i; bj=bb->j; 340 #if defined(PETSC_USE_COMPLEX) 341 av=(doublecomplex*)aa->a; 342 bv=(doublecomplex*)bb->a; 343 #else 344 av=aa->a; 345 bv=bb->a; 346 #endif 347 rstart = A->rmap->rstart; 348 nz = aa->nz + bb->nz; 349 garray = mat->garray; 350 351 if (lu->options.Fact == DOFACT) {/* first numeric factorization */ 352 #if defined(PETSC_USE_COMPLEX) 353 zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row); 354 #else 355 dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row); 356 #endif 357 } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */ 358 /* Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); */ /* this leads to crash! However, see SuperLU_DIST_2.5/EXAMPLE/pzdrive2.c */ 359 if (lu->FactPattern == SamePattern_SameRowPerm){ 360 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 361 } else { 362 Destroy_LU(N, &lu->grid, &lu->LUstruct); /* Deallocate storage associated with the L and U matrices. */ 363 lu->options.Fact = SamePattern; 364 } 365 } 366 nz = 0; 367 for ( i=0; i<m; i++ ) { 368 lu->row[i] = nz; 369 countA = ai[i+1] - ai[i]; 370 countB = bi[i+1] - bi[i]; 371 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 372 bjj = bj + bi[i]; 373 374 /* B part, smaller col index */ 375 colA_start = rstart + ajj[0]; /* the smallest global col index of A */ 376 jB = 0; 377 for (j=0; j<countB; j++){ 378 jcol = garray[bjj[j]]; 379 if (jcol > colA_start) { 380 jB = j; 381 break; 382 } 383 lu->col[nz] = jcol; 384 lu->val[nz++] = *bv++; 385 if (j==countB-1) jB = countB; 386 } 387 388 /* A part */ 389 for (j=0; j<countA; j++){ 390 lu->col[nz] = rstart + ajj[j]; 391 lu->val[nz++] = *av++; 392 } 393 394 /* B part, larger col index */ 395 for (j=jB; j<countB; j++){ 396 lu->col[nz] = garray[bjj[j]]; 397 lu->val[nz++] = *bv++; 398 } 399 } 400 lu->row[m] = nz; 401 #if defined(PETSC_USE_COMPLEX) 402 zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE); 403 #else 404 dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE); 405 #endif 406 } 407 if (lu->options.PrintStat) { 408 ierr = PetscGetTime(&time);CHKERRQ(ierr); 409 time0 = time - time0; 410 } 411 412 /* Factor the matrix. */ 413 PStatInit(&stat); /* Initialize the statistics variables. */ 414 if (lu->MatInputMode == GLOBAL) { /* global mat input */ 415 #if defined(PETSC_USE_COMPLEX) 416 pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo); 417 #else 418 pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo); 419 #endif 420 } else { /* distributed mat input */ 421 #if defined(PETSC_USE_COMPLEX) 422 pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo); 423 if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pzgssvx fails, info: %d\n",sinfo); 424 #else 425 pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo); 426 if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",sinfo); 427 #endif 428 } 429 430 if (lu->MatInputMode == GLOBAL && size > 1){ 431 ierr = MatDestroy(&A_seq);CHKERRQ(ierr); 432 } 433 434 if (lu->options.PrintStat) { 435 ierr = MPI_Reduce(&time0,&time_max,1,MPI_DOUBLE,MPI_MAX,0,((PetscObject)A)->comm); 436 ierr = MPI_Reduce(&time0,&time_min,1,MPI_DOUBLE,MPI_MIN,0,((PetscObject)A)->comm); 437 ierr = MPI_Reduce(&time0,&time,1,MPI_DOUBLE,MPI_SUM,0,((PetscObject)A)->comm); 438 time = time/size; /* average time */ 439 ierr = PetscPrintf(((PetscObject)A)->comm, " Mat conversion(PETSc->SuperLU_DIST) time (max/min/avg): \n %g / %g / %g\n",time_max,time_min,time);CHKERRQ(ierr); 440 PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 441 } 442 PStatFree(&stat); 443 if (size > 1){ 444 F_diag = ((Mat_MPIAIJ *)(F)->data)->A; 445 F_diag->assembled = PETSC_TRUE; 446 } 447 (F)->assembled = PETSC_TRUE; 448 (F)->preallocated = PETSC_TRUE; 449 lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */ 450 PetscFunctionReturn(0); 451 } 452 453 /* Note the Petsc r and c permutations are ignored */ 454 #undef __FUNCT__ 455 #define __FUNCT__ "MatLUFactorSymbolic_SuperLU_DIST" 456 PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 457 { 458 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->spptr; 459 PetscInt M=A->rmap->N,N=A->cmap->N; 460 461 PetscFunctionBegin; 462 /* Initialize the SuperLU process grid. */ 463 superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid); 464 465 /* Initialize ScalePermstruct and LUstruct. */ 466 ScalePermstructInit(M, N, &lu->ScalePermstruct); 467 LUstructInit(M, N, &lu->LUstruct); 468 F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST; 469 F->ops->solve = MatSolve_SuperLU_DIST; 470 F->ops->matsolve = MatMatSolve_SuperLU_DIST; 471 lu->CleanUpSuperLU_Dist = PETSC_TRUE; 472 PetscFunctionReturn(0); 473 } 474 475 EXTERN_C_BEGIN 476 #undef __FUNCT__ 477 #define __FUNCT__ "MatFactorGetSolverPackage_aij_superlu_dist" 478 PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type) 479 { 480 PetscFunctionBegin; 481 *type = MATSOLVERSUPERLU_DIST; 482 PetscFunctionReturn(0); 483 } 484 EXTERN_C_END 485 486 #undef __FUNCT__ 487 #define __FUNCT__ "MatGetFactor_aij_superlu_dist" 488 PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 489 { 490 Mat B; 491 Mat_SuperLU_DIST *lu; 492 PetscErrorCode ierr; 493 PetscInt M=A->rmap->N,N=A->cmap->N,indx; 494 PetscMPIInt size; 495 superlu_options_t options; 496 PetscBool flg; 497 const char *colperm[] = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"}; 498 const char *rowperm[] = {"LargeDiag","NATURAL"}; 499 const char *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"}; 500 501 PetscFunctionBegin; 502 /* Create the factorization matrix */ 503 ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr); 504 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);CHKERRQ(ierr); 505 ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 506 ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL); 507 ierr = MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 508 509 B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST; 510 B->ops->view = MatView_SuperLU_DIST; 511 B->ops->destroy = MatDestroy_SuperLU_DIST; 512 ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_aij_superlu_dist",MatFactorGetSolverPackage_aij_superlu_dist);CHKERRQ(ierr); 513 B->factortype = MAT_FACTOR_LU; 514 515 ierr = PetscNewLog(B,Mat_SuperLU_DIST,&lu);CHKERRQ(ierr); 516 B->spptr = lu; 517 518 /* Set the default input options: 519 options.Fact = DOFACT; 520 options.Equil = YES; 521 options.ParSymbFact = NO; 522 options.ColPerm = METIS_AT_PLUS_A; 523 options.RowPerm = LargeDiag; 524 options.ReplaceTinyPivot = YES; 525 options.IterRefine = DOUBLE; 526 options.Trans = NOTRANS; 527 options.SolveInitialized = NO; -hold the communication pattern used MatSolve() and MatMatSolve() 528 options.RefineInitialized = NO; 529 options.PrintStat = YES; 530 */ 531 set_default_options_dist(&options); 532 533 ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_superlu));CHKERRQ(ierr); 534 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 535 /* Default num of process columns and rows */ 536 lu->npcol = PetscMPIIntCast((PetscInt)(0.5 + PetscSqrtReal((PetscReal)size))); 537 if (!lu->npcol) lu->npcol = 1; 538 while (lu->npcol > 0) { 539 lu->nprow = PetscMPIIntCast(size/lu->npcol); 540 if (size == lu->nprow * lu->npcol) break; 541 lu->npcol --; 542 } 543 544 ierr = PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr); 545 ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,&lu->nprow,PETSC_NULL);CHKERRQ(ierr); 546 ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,&lu->npcol,PETSC_NULL);CHKERRQ(ierr); 547 if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol); 548 549 lu->MatInputMode = DISTRIBUTED; 550 ierr = PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,PETSC_NULL);CHKERRQ(ierr); 551 if(lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL; 552 553 ierr = PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr); 554 if (!flg) { 555 options.Equil = NO; 556 } 557 558 ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);CHKERRQ(ierr); 559 if (flg) { 560 switch (indx) { 561 case 0: 562 options.RowPerm = LargeDiag; 563 break; 564 case 1: 565 options.RowPerm = NOROWPERM; 566 break; 567 } 568 } 569 570 ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);CHKERRQ(ierr); 571 if (flg) { 572 switch (indx) { 573 case 0: 574 options.ColPerm = NATURAL; 575 break; 576 case 1: 577 options.ColPerm = MMD_AT_PLUS_A; 578 break; 579 case 2: 580 options.ColPerm = MMD_ATA; 581 break; 582 case 3: 583 options.ColPerm = METIS_AT_PLUS_A; 584 break; 585 case 4: 586 options.ColPerm = PARMETIS; /* only works for np>1 */ 587 break; 588 default: 589 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 590 } 591 } 592 593 ierr = PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr); 594 if (!flg) { 595 options.ReplaceTinyPivot = NO; 596 } 597 598 options.ParSymbFact = NO; 599 ierr = PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr); 600 if (flg){ 601 #ifdef PETSC_HAVE_PARMETIS 602 options.ParSymbFact = YES; 603 options.ColPerm = PARMETIS; /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */ 604 #else 605 printf("parsymbfact needs PARMETIS"); 606 #endif 607 } 608 609 lu->FactPattern = SamePattern_SameRowPerm; 610 ierr = PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[1],&indx,&flg);CHKERRQ(ierr); 611 if (flg) { 612 switch (indx) { 613 case 0: 614 lu->FactPattern = SamePattern; 615 break; 616 case 1: 617 lu->FactPattern = SamePattern_SameRowPerm; 618 break; 619 } 620 } 621 622 options.IterRefine = NOREFINE; 623 ierr = PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr); 624 if (flg) { 625 options.IterRefine = SLU_DOUBLE; 626 } 627 628 if (PetscLogPrintInfo) { 629 options.PrintStat = YES; 630 } else { 631 options.PrintStat = NO; 632 } 633 ierr = PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None", 634 (PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,0);CHKERRQ(ierr); 635 PetscOptionsEnd(); 636 637 lu->options = options; 638 lu->options.Fact = DOFACT; 639 lu->matsolve_iscalled = PETSC_FALSE; 640 lu->matmatsolve_iscalled = PETSC_FALSE; 641 *F = B; 642 PetscFunctionReturn(0); 643 } 644 645 EXTERN_C_BEGIN 646 #undef __FUNCT__ 647 #define __FUNCT__ "MatGetFactor_seqaij_superlu_dist" 648 PetscErrorCode MatGetFactor_seqaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 649 { 650 PetscErrorCode ierr; 651 652 PetscFunctionBegin; 653 ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr); 654 PetscFunctionReturn(0); 655 } 656 EXTERN_C_END 657 658 EXTERN_C_BEGIN 659 #undef __FUNCT__ 660 #define __FUNCT__ "MatGetFactor_mpiaij_superlu_dist" 661 PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 662 { 663 PetscErrorCode ierr; 664 665 PetscFunctionBegin; 666 ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr); 667 PetscFunctionReturn(0); 668 } 669 EXTERN_C_END 670 671 #undef __FUNCT__ 672 #define __FUNCT__ "MatFactorInfo_SuperLU_DIST" 673 PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer) 674 { 675 Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)A->spptr; 676 superlu_options_t options; 677 PetscErrorCode ierr; 678 679 PetscFunctionBegin; 680 /* check if matrix is superlu_dist type */ 681 if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(0); 682 683 options = lu->options; 684 ierr = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr); 685 ierr = PetscViewerASCIIPrintf(viewer," Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 686 ierr = PetscViewerASCIIPrintf(viewer," Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);CHKERRQ(ierr); 687 ierr = PetscViewerASCIIPrintf(viewer," Matrix input mode %d \n",lu->MatInputMode);CHKERRQ(ierr); 688 ierr = PetscViewerASCIIPrintf(viewer," Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);CHKERRQ(ierr); 689 ierr = PetscViewerASCIIPrintf(viewer," Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);CHKERRQ(ierr); 690 ierr = PetscViewerASCIIPrintf(viewer," Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 691 ierr = PetscViewerASCIIPrintf(viewer," Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL": "LargeDiag");CHKERRQ(ierr); 692 693 switch(options.ColPerm){ 694 case NATURAL: 695 ierr = PetscViewerASCIIPrintf(viewer," Column permutation NATURAL\n");CHKERRQ(ierr); 696 break; 697 case MMD_AT_PLUS_A: 698 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_AT_PLUS_A\n");CHKERRQ(ierr); 699 break; 700 case MMD_ATA: 701 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_ATA\n");CHKERRQ(ierr); 702 break; 703 case METIS_AT_PLUS_A: 704 ierr = PetscViewerASCIIPrintf(viewer," Column permutation METIS_AT_PLUS_A\n");CHKERRQ(ierr); 705 break; 706 case PARMETIS: 707 ierr = PetscViewerASCIIPrintf(viewer," Column permutation PARMETIS\n");CHKERRQ(ierr); 708 break; 709 default: 710 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 711 } 712 713 ierr = PetscViewerASCIIPrintf(viewer," Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);CHKERRQ(ierr); 714 715 if (lu->FactPattern == SamePattern){ 716 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern\n");CHKERRQ(ierr); 717 } else { 718 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern_SameRowPerm\n");CHKERRQ(ierr); 719 } 720 PetscFunctionReturn(0); 721 } 722 723 #undef __FUNCT__ 724 #define __FUNCT__ "MatView_SuperLU_DIST" 725 PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer) 726 { 727 PetscErrorCode ierr; 728 PetscBool iascii; 729 PetscViewerFormat format; 730 731 PetscFunctionBegin; 732 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 733 if (iascii) { 734 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 735 if (format == PETSC_VIEWER_ASCII_INFO) { 736 ierr = MatFactorInfo_SuperLU_DIST(A,viewer);CHKERRQ(ierr); 737 } 738 } 739 PetscFunctionReturn(0); 740 } 741 742 743 /*MC 744 MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization 745 746 Works with AIJ matrices 747 748 Options Database Keys: 749 + -mat_superlu_dist_r <n> - number of rows in processor partition 750 . -mat_superlu_dist_c <n> - number of columns in processor partition 751 . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed 752 . -mat_superlu_dist_equil - equilibrate the matrix 753 . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation 754 . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation 755 . -mat_superlu_dist_replacetinypivot - replace tiny pivots 756 . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm 757 . -mat_superlu_dist_iterrefine - use iterative refinement 758 - -mat_superlu_dist_statprint - print factorization information 759 760 Level: beginner 761 762 .seealso: PCLU 763 764 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage 765 766 M*/ 767 768