1 2 /* 3 Provides an interface to the SuperLU_DIST_2.2 sparse solver 4 */ 5 6 #include <../src/mat/impls/aij/seq/aij.h> 7 #include <../src/mat/impls/aij/mpi/mpiaij.h> 8 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */ 9 #include <stdlib.h> 10 #endif 11 12 #if defined(PETSC_USE_64BIT_INDICES) 13 /* ugly SuperLU_Dist variable telling it to use long long int */ 14 #define _LONGINT 15 #endif 16 17 EXTERN_C_BEGIN 18 #if defined(PETSC_USE_COMPLEX) 19 #include <superlu_zdefs.h> 20 #else 21 #include <superlu_ddefs.h> 22 #endif 23 EXTERN_C_END 24 25 typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode; 26 const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0}; 27 28 typedef struct { 29 int_t nprow,npcol,*row,*col; 30 gridinfo_t grid; 31 superlu_options_t options; 32 SuperMatrix A_sup; 33 ScalePermstruct_t ScalePermstruct; 34 LUstruct_t LUstruct; 35 int StatPrint; 36 SuperLU_MatInputMode MatInputMode; 37 SOLVEstruct_t SOLVEstruct; 38 fact_t FactPattern; 39 MPI_Comm comm_superlu; 40 #if defined(PETSC_USE_COMPLEX) 41 doublecomplex *val; 42 #else 43 double *val; 44 #endif 45 PetscBool matsolve_iscalled,matmatsolve_iscalled; 46 PetscBool CleanUpSuperLU_Dist; /* Flag to clean up (non-global) SuperLU objects during Destroy */ 47 } Mat_SuperLU_DIST; 48 49 extern PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat,PetscViewer); 50 extern PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat,Mat,const MatFactorInfo *); 51 extern PetscErrorCode MatDestroy_SuperLU_DIST(Mat); 52 extern PetscErrorCode MatView_SuperLU_DIST(Mat,PetscViewer); 53 extern PetscErrorCode MatSolve_SuperLU_DIST(Mat,Vec,Vec); 54 extern PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat,Mat,IS,IS,const MatFactorInfo *); 55 extern PetscErrorCode MatDestroy_MPIAIJ(Mat); 56 57 #undef __FUNCT__ 58 #define __FUNCT__ "MatDestroy_SuperLU_DIST" 59 PetscErrorCode MatDestroy_SuperLU_DIST(Mat A) 60 { 61 PetscErrorCode ierr; 62 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 63 PetscBool flg; 64 65 PetscFunctionBegin; 66 if (lu && lu->CleanUpSuperLU_Dist) { 67 /* Deallocate SuperLU_DIST storage */ 68 if (lu->MatInputMode == GLOBAL) { 69 Destroy_CompCol_Matrix_dist(&lu->A_sup); 70 } else { 71 Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); 72 if ( lu->options.SolveInitialized ) { 73 #if defined(PETSC_USE_COMPLEX) 74 zSolveFinalize(&lu->options, &lu->SOLVEstruct); 75 #else 76 dSolveFinalize(&lu->options, &lu->SOLVEstruct); 77 #endif 78 } 79 } 80 Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct); 81 ScalePermstructFree(&lu->ScalePermstruct); 82 LUstructFree(&lu->LUstruct); 83 84 /* Release the SuperLU_DIST process grid. */ 85 superlu_gridexit(&lu->grid); 86 87 ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr); 88 } 89 ierr = PetscFree(A->spptr);CHKERRQ(ierr); 90 91 ierr = PetscTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 92 if (flg) { 93 ierr = MatDestroy_SeqAIJ(A);CHKERRQ(ierr); 94 } else { 95 ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr); 96 } 97 PetscFunctionReturn(0); 98 } 99 100 #undef __FUNCT__ 101 #define __FUNCT__ "MatSolve_SuperLU_DIST" 102 PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x) 103 { 104 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 105 PetscErrorCode ierr; 106 PetscMPIInt size; 107 PetscInt m=A->rmap->n,M=A->rmap->N,N=A->cmap->N; 108 SuperLUStat_t stat; 109 double berr[1]; 110 PetscScalar *bptr; 111 PetscInt nrhs=1; 112 Vec x_seq; 113 IS iden; 114 VecScatter scat; 115 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 116 117 PetscFunctionBegin; 118 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 119 if (size > 1 && lu->MatInputMode == GLOBAL) { 120 /* global mat input, convert b to x_seq */ 121 ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr); 122 ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr); 123 ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr); 124 ierr = ISDestroy(&iden);CHKERRQ(ierr); 125 126 ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 127 ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 128 ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr); 129 } else { /* size==1 || distributed mat input */ 130 if (lu->options.SolveInitialized && !lu->matsolve_iscalled){ 131 /* see comments in MatMatSolve() */ 132 #if defined(PETSC_USE_COMPLEX) 133 zSolveFinalize(&lu->options, &lu->SOLVEstruct); 134 #else 135 dSolveFinalize(&lu->options, &lu->SOLVEstruct); 136 #endif 137 lu->options.SolveInitialized = NO; 138 } 139 ierr = VecCopy(b_mpi,x);CHKERRQ(ierr); 140 ierr = VecGetArray(x,&bptr);CHKERRQ(ierr); 141 } 142 143 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 144 145 PStatInit(&stat); /* Initialize the statistics variables. */ 146 if (lu->MatInputMode == GLOBAL) { 147 #if defined(PETSC_USE_COMPLEX) 148 pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs, 149 &lu->grid,&lu->LUstruct,berr,&stat,&info); 150 #else 151 pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs, 152 &lu->grid,&lu->LUstruct,berr,&stat,&info); 153 #endif 154 } else { /* distributed mat input */ 155 #if defined(PETSC_USE_COMPLEX) 156 pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, 157 &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info); 158 #else 159 pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid, 160 &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info); 161 #endif 162 } 163 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 164 165 if (lu->options.PrintStat) { 166 PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 167 } 168 PStatFree(&stat); 169 170 if (size > 1 && lu->MatInputMode == GLOBAL) { 171 /* convert seq x to mpi x */ 172 ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr); 173 ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 174 ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 175 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 176 ierr = VecDestroy(&x_seq);CHKERRQ(ierr); 177 } else { 178 ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr); 179 lu->matsolve_iscalled = PETSC_TRUE; 180 lu->matmatsolve_iscalled = PETSC_FALSE; 181 } 182 PetscFunctionReturn(0); 183 } 184 185 #undef __FUNCT__ 186 #define __FUNCT__ "MatMatSolve_SuperLU_DIST" 187 PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X) 188 { 189 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; 190 PetscErrorCode ierr; 191 PetscMPIInt size; 192 PetscInt M=A->rmap->N,m=A->rmap->n,nrhs; 193 SuperLUStat_t stat; 194 double berr[1]; 195 PetscScalar *bptr; 196 int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ 197 PetscBool flg; 198 199 PetscFunctionBegin; 200 if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); 201 ierr = PetscTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,PETSC_NULL);CHKERRQ(ierr); 202 if (!flg) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix"); 203 ierr = PetscTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,PETSC_NULL);CHKERRQ(ierr); 204 if (!flg) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix"); 205 206 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 207 if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size); 208 /* size==1 or distributed mat input */ 209 if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled){ 210 /* communication pattern of SOLVEstruct is unlikely created for matmatsolve, 211 thus destroy it and create a new SOLVEstruct. 212 Otherwise it may result in memory corruption or incorrect solution 213 See src/mat/examples/tests/ex125.c */ 214 #if defined(PETSC_USE_COMPLEX) 215 zSolveFinalize(&lu->options, &lu->SOLVEstruct); 216 #else 217 dSolveFinalize(&lu->options, &lu->SOLVEstruct); 218 #endif 219 lu->options.SolveInitialized = NO; 220 } 221 ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 222 223 ierr = MatGetSize(B_mpi,PETSC_NULL,&nrhs);CHKERRQ(ierr); 224 225 PStatInit(&stat); /* Initialize the statistics variables. */ 226 ierr = MatGetArray(X,&bptr);CHKERRQ(ierr); 227 if (lu->MatInputMode == GLOBAL) { /* size == 1 */ 228 #if defined(PETSC_USE_COMPLEX) 229 pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs, 230 &lu->grid, &lu->LUstruct, berr, &stat, &info); 231 #else 232 pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, 233 &lu->grid, &lu->LUstruct, berr, &stat, &info); 234 #endif 235 } else { /* distributed mat input */ 236 #if defined(PETSC_USE_COMPLEX) 237 pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, 238 &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info); 239 #else 240 pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid, 241 &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info); 242 #endif 243 } 244 if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); 245 ierr = MatRestoreArray(X,&bptr);CHKERRQ(ierr); 246 247 if (lu->options.PrintStat) { 248 PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 249 } 250 PStatFree(&stat); 251 lu->matsolve_iscalled = PETSC_FALSE; 252 lu->matmatsolve_iscalled = PETSC_TRUE; 253 PetscFunctionReturn(0); 254 } 255 256 257 #undef __FUNCT__ 258 #define __FUNCT__ "MatLUFactorNumeric_SuperLU_DIST" 259 PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info) 260 { 261 Mat *tseq,A_seq = PETSC_NULL; 262 Mat_SeqAIJ *aa,*bb; 263 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->spptr; 264 PetscErrorCode ierr; 265 PetscInt M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray, 266 m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj; 267 int sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */ 268 PetscMPIInt size,rank; 269 SuperLUStat_t stat; 270 double *berr=0; 271 IS isrow; 272 PetscLogDouble time0,time,time_min,time_max; 273 Mat F_diag=PETSC_NULL; 274 #if defined(PETSC_USE_COMPLEX) 275 doublecomplex *av, *bv; 276 #else 277 double *av, *bv; 278 #endif 279 280 PetscFunctionBegin; 281 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 282 ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr); 283 284 if (lu->options.PrintStat) { /* collect time for mat conversion */ 285 ierr = MPI_Barrier(((PetscObject)A)->comm);CHKERRQ(ierr); 286 ierr = PetscGetTime(&time0);CHKERRQ(ierr); 287 } 288 289 if (lu->MatInputMode == GLOBAL) { /* global mat input */ 290 if (size > 1) { /* convert mpi A to seq mat A */ 291 ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr); 292 ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr); 293 ierr = ISDestroy(&isrow);CHKERRQ(ierr); 294 295 A_seq = *tseq; 296 ierr = PetscFree(tseq);CHKERRQ(ierr); 297 aa = (Mat_SeqAIJ*)A_seq->data; 298 } else { 299 PetscBool flg; 300 ierr = PetscTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 301 if (flg) { 302 Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data; 303 A = At->A; 304 } 305 aa = (Mat_SeqAIJ*)A->data; 306 } 307 308 /* Convert Petsc NR matrix to SuperLU_DIST NC. 309 Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */ 310 if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */ 311 Destroy_CompCol_Matrix_dist(&lu->A_sup); 312 if (lu->FactPattern == SamePattern_SameRowPerm){ 313 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 314 } else { /* lu->FactPattern == SamePattern */ 315 Destroy_LU(N, &lu->grid, &lu->LUstruct); 316 lu->options.Fact = SamePattern; 317 } 318 } 319 #if defined(PETSC_USE_COMPLEX) 320 zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,aa->j,aa->i,&lu->val,&lu->col, &lu->row); 321 #else 322 dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,aa->j,aa->i,&lu->val, &lu->col, &lu->row); 323 #endif 324 325 /* Create compressed column matrix A_sup. */ 326 #if defined(PETSC_USE_COMPLEX) 327 zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE); 328 #else 329 dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE); 330 #endif 331 } else { /* distributed mat input */ 332 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 333 aa=(Mat_SeqAIJ*)(mat->A)->data; 334 bb=(Mat_SeqAIJ*)(mat->B)->data; 335 ai=aa->i; aj=aa->j; 336 bi=bb->i; bj=bb->j; 337 #if defined(PETSC_USE_COMPLEX) 338 av=(doublecomplex*)aa->a; 339 bv=(doublecomplex*)bb->a; 340 #else 341 av=aa->a; 342 bv=bb->a; 343 #endif 344 rstart = A->rmap->rstart; 345 nz = aa->nz + bb->nz; 346 garray = mat->garray; 347 348 if (lu->options.Fact == DOFACT) {/* first numeric factorization */ 349 #if defined(PETSC_USE_COMPLEX) 350 zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row); 351 #else 352 dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row); 353 #endif 354 } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */ 355 /* Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); */ /* this leads to crash! However, see SuperLU_DIST_2.5/EXAMPLE/pzdrive2.c */ 356 if (lu->FactPattern == SamePattern_SameRowPerm){ 357 lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ 358 } else { 359 Destroy_LU(N, &lu->grid, &lu->LUstruct); /* Deallocate storage associated with the L and U matrices. */ 360 lu->options.Fact = SamePattern; 361 } 362 } 363 nz = 0; 364 for ( i=0; i<m; i++ ) { 365 lu->row[i] = nz; 366 countA = ai[i+1] - ai[i]; 367 countB = bi[i+1] - bi[i]; 368 ajj = aj + ai[i]; /* ptr to the beginning of this row */ 369 bjj = bj + bi[i]; 370 371 /* B part, smaller col index */ 372 colA_start = rstart + ajj[0]; /* the smallest global col index of A */ 373 jB = 0; 374 for (j=0; j<countB; j++){ 375 jcol = garray[bjj[j]]; 376 if (jcol > colA_start) { 377 jB = j; 378 break; 379 } 380 lu->col[nz] = jcol; 381 lu->val[nz++] = *bv++; 382 if (j==countB-1) jB = countB; 383 } 384 385 /* A part */ 386 for (j=0; j<countA; j++){ 387 lu->col[nz] = rstart + ajj[j]; 388 lu->val[nz++] = *av++; 389 } 390 391 /* B part, larger col index */ 392 for (j=jB; j<countB; j++){ 393 lu->col[nz] = garray[bjj[j]]; 394 lu->val[nz++] = *bv++; 395 } 396 } 397 lu->row[m] = nz; 398 #if defined(PETSC_USE_COMPLEX) 399 zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart, 400 lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE); 401 #else 402 dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart, 403 lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE); 404 #endif 405 } 406 if (lu->options.PrintStat) { 407 ierr = PetscGetTime(&time);CHKERRQ(ierr); 408 time0 = time - time0; 409 } 410 411 /* Factor the matrix. */ 412 PStatInit(&stat); /* Initialize the statistics variables. */ 413 if (lu->MatInputMode == GLOBAL) { /* global mat input */ 414 #if defined(PETSC_USE_COMPLEX) 415 pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0, 416 &lu->grid, &lu->LUstruct, berr, &stat, &sinfo); 417 #else 418 pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0, 419 &lu->grid, &lu->LUstruct, berr, &stat, &sinfo); 420 #endif 421 } else { /* distributed mat input */ 422 #if defined(PETSC_USE_COMPLEX) 423 pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid, 424 &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo); 425 if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pzgssvx fails, info: %d\n",sinfo); 426 #else 427 pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid, 428 &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo); 429 if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",sinfo); 430 #endif 431 } 432 433 if (lu->MatInputMode == GLOBAL && size > 1){ 434 ierr = MatDestroy(&A_seq);CHKERRQ(ierr); 435 } 436 437 if (lu->options.PrintStat) { 438 if (size > 1){ 439 ierr = MPI_Reduce(&time0,&time_max,1,MPI_DOUBLE,MPI_MAX,0,((PetscObject)A)->comm); 440 ierr = MPI_Reduce(&time0,&time_min,1,MPI_DOUBLE,MPI_MIN,0,((PetscObject)A)->comm); 441 ierr = MPI_Reduce(&time0,&time,1,MPI_DOUBLE,MPI_SUM,0,((PetscObject)A)->comm); 442 time = time/size; /* average time */ 443 if (!rank) { 444 ierr = PetscPrintf(PETSC_COMM_SELF, " Mat conversion(PETSc->SuperLU_DIST) time (max/min/avg): \n %g / %g / %g\n",time_max,time_min,time);CHKERRQ(ierr); 445 } 446 } else { 447 ierr = PetscPrintf(PETSC_COMM_SELF, " Mat conversion(PETSc->SuperLU_DIST) time: \n %g\n",time0);CHKERRQ(ierr); 448 } 449 PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ 450 } 451 PStatFree(&stat); 452 if (size > 1){ 453 F_diag = ((Mat_MPIAIJ *)(F)->data)->A; 454 F_diag->assembled = PETSC_TRUE; 455 } 456 (F)->assembled = PETSC_TRUE; 457 (F)->preallocated = PETSC_TRUE; 458 lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */ 459 PetscFunctionReturn(0); 460 } 461 462 /* Note the Petsc r and c permutations are ignored */ 463 #undef __FUNCT__ 464 #define __FUNCT__ "MatLUFactorSymbolic_SuperLU_DIST" 465 PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 466 { 467 Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->spptr; 468 PetscInt M=A->rmap->N,N=A->cmap->N; 469 470 PetscFunctionBegin; 471 /* Initialize the SuperLU process grid. */ 472 superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid); 473 474 /* Initialize ScalePermstruct and LUstruct. */ 475 ScalePermstructInit(M, N, &lu->ScalePermstruct); 476 LUstructInit(M, N, &lu->LUstruct); 477 F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST; 478 F->ops->solve = MatSolve_SuperLU_DIST; 479 F->ops->matsolve = MatMatSolve_SuperLU_DIST; 480 lu->CleanUpSuperLU_Dist = PETSC_TRUE; 481 PetscFunctionReturn(0); 482 } 483 484 EXTERN_C_BEGIN 485 #undef __FUNCT__ 486 #define __FUNCT__ "MatFactorGetSolverPackage_aij_superlu_dist" 487 PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type) 488 { 489 PetscFunctionBegin; 490 *type = MATSOLVERSUPERLU_DIST; 491 PetscFunctionReturn(0); 492 } 493 EXTERN_C_END 494 495 #undef __FUNCT__ 496 #define __FUNCT__ "MatGetFactor_aij_superlu_dist" 497 PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 498 { 499 Mat B; 500 Mat_SuperLU_DIST *lu; 501 PetscErrorCode ierr; 502 PetscInt M=A->rmap->N,N=A->cmap->N,indx; 503 PetscMPIInt size; 504 superlu_options_t options; 505 PetscBool flg; 506 const char *colperm[] = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"}; 507 const char *rowperm[] = {"LargeDiag","NATURAL"}; 508 const char *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"}; 509 510 PetscFunctionBegin; 511 /* Create the factorization matrix */ 512 ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr); 513 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);CHKERRQ(ierr); 514 ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 515 ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL); 516 ierr = MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 517 518 B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST; 519 B->ops->view = MatView_SuperLU_DIST; 520 B->ops->destroy = MatDestroy_SuperLU_DIST; 521 ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_aij_superlu_dist",MatFactorGetSolverPackage_aij_superlu_dist);CHKERRQ(ierr); 522 B->factortype = MAT_FACTOR_LU; 523 524 ierr = PetscNewLog(B,Mat_SuperLU_DIST,&lu);CHKERRQ(ierr); 525 B->spptr = lu; 526 527 /* Set the default input options: 528 options.Fact = DOFACT; 529 options.Equil = YES; 530 options.ParSymbFact = NO; 531 options.ColPerm = METIS_AT_PLUS_A; 532 options.RowPerm = LargeDiag; 533 options.ReplaceTinyPivot = YES; 534 options.IterRefine = DOUBLE; 535 options.Trans = NOTRANS; 536 options.SolveInitialized = NO; -hold the communication pattern used MatSolve() and MatMatSolve() 537 options.RefineInitialized = NO; 538 options.PrintStat = YES; 539 */ 540 set_default_options_dist(&options); 541 542 ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_superlu));CHKERRQ(ierr); 543 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 544 /* Default num of process columns and rows */ 545 lu->npcol = PetscMPIIntCast((PetscInt)(0.5 + PetscSqrtReal((PetscReal)size))); 546 if (!lu->npcol) lu->npcol = 1; 547 while (lu->npcol > 0) { 548 lu->nprow = PetscMPIIntCast(size/lu->npcol); 549 if (size == lu->nprow * lu->npcol) break; 550 lu->npcol --; 551 } 552 553 ierr = PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr); 554 ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,&lu->nprow,PETSC_NULL);CHKERRQ(ierr); 555 ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,&lu->npcol,PETSC_NULL);CHKERRQ(ierr); 556 if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol); 557 558 lu->MatInputMode = DISTRIBUTED; 559 ierr = PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,PETSC_NULL);CHKERRQ(ierr); 560 if(lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL; 561 562 ierr = PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr); 563 if (!flg) { 564 options.Equil = NO; 565 } 566 567 ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);CHKERRQ(ierr); 568 if (flg) { 569 switch (indx) { 570 case 0: 571 options.RowPerm = LargeDiag; 572 break; 573 case 1: 574 options.RowPerm = NOROWPERM; 575 break; 576 } 577 } 578 579 ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);CHKERRQ(ierr); 580 if (flg) { 581 switch (indx) { 582 case 0: 583 options.ColPerm = NATURAL; 584 break; 585 case 1: 586 options.ColPerm = MMD_AT_PLUS_A; 587 break; 588 case 2: 589 options.ColPerm = MMD_ATA; 590 break; 591 case 3: 592 options.ColPerm = METIS_AT_PLUS_A; 593 break; 594 case 4: 595 options.ColPerm = PARMETIS; /* only works for np>1 */ 596 break; 597 default: 598 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 599 } 600 } 601 602 ierr = PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr); 603 if (!flg) { 604 options.ReplaceTinyPivot = NO; 605 } 606 607 options.ParSymbFact = NO; 608 ierr = PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr); 609 if (flg){ 610 #ifdef PETSC_HAVE_PARMETIS 611 options.ParSymbFact = YES; 612 options.ColPerm = PARMETIS; /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */ 613 #else 614 printf("parsymbfact needs PARMETIS"); 615 #endif 616 } 617 618 lu->FactPattern = SamePattern_SameRowPerm; 619 ierr = PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[1],&indx,&flg);CHKERRQ(ierr); 620 if (flg) { 621 switch (indx) { 622 case 0: 623 lu->FactPattern = SamePattern; 624 break; 625 case 1: 626 lu->FactPattern = SamePattern_SameRowPerm; 627 break; 628 } 629 } 630 631 options.IterRefine = NOREFINE; 632 ierr = PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr); 633 if (flg) { 634 options.IterRefine = DOUBLE; 635 } 636 637 if (PetscLogPrintInfo) { 638 options.PrintStat = YES; 639 } else { 640 options.PrintStat = NO; 641 } 642 ierr = PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None", 643 (PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,0);CHKERRQ(ierr); 644 PetscOptionsEnd(); 645 646 lu->options = options; 647 lu->options.Fact = DOFACT; 648 lu->matsolve_iscalled = PETSC_FALSE; 649 lu->matmatsolve_iscalled = PETSC_FALSE; 650 *F = B; 651 PetscFunctionReturn(0); 652 } 653 654 EXTERN_C_BEGIN 655 #undef __FUNCT__ 656 #define __FUNCT__ "MatGetFactor_seqaij_superlu_dist" 657 PetscErrorCode MatGetFactor_seqaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 658 { 659 PetscErrorCode ierr; 660 661 PetscFunctionBegin; 662 ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr); 663 PetscFunctionReturn(0); 664 } 665 EXTERN_C_END 666 667 EXTERN_C_BEGIN 668 #undef __FUNCT__ 669 #define __FUNCT__ "MatGetFactor_mpiaij_superlu_dist" 670 PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F) 671 { 672 PetscErrorCode ierr; 673 674 PetscFunctionBegin; 675 ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr); 676 PetscFunctionReturn(0); 677 } 678 EXTERN_C_END 679 680 #undef __FUNCT__ 681 #define __FUNCT__ "MatFactorInfo_SuperLU_DIST" 682 PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer) 683 { 684 Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)A->spptr; 685 superlu_options_t options; 686 PetscErrorCode ierr; 687 688 PetscFunctionBegin; 689 /* check if matrix is superlu_dist type */ 690 if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(0); 691 692 options = lu->options; 693 ierr = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr); 694 ierr = PetscViewerASCIIPrintf(viewer," Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 695 ierr = PetscViewerASCIIPrintf(viewer," Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);CHKERRQ(ierr); 696 ierr = PetscViewerASCIIPrintf(viewer," Matrix input mode %d \n",lu->MatInputMode);CHKERRQ(ierr); 697 ierr = PetscViewerASCIIPrintf(viewer," Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);CHKERRQ(ierr); 698 ierr = PetscViewerASCIIPrintf(viewer," Use iterative refinement %s \n",PetscBools[options.IterRefine == DOUBLE]);CHKERRQ(ierr); 699 ierr = PetscViewerASCIIPrintf(viewer," Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr); 700 ierr = PetscViewerASCIIPrintf(viewer," Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL": "LargeDiag");CHKERRQ(ierr); 701 702 switch(options.ColPerm){ 703 case NATURAL: 704 ierr = PetscViewerASCIIPrintf(viewer," Column permutation NATURAL\n");CHKERRQ(ierr); 705 break; 706 case MMD_AT_PLUS_A: 707 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_AT_PLUS_A\n");CHKERRQ(ierr); 708 break; 709 case MMD_ATA: 710 ierr = PetscViewerASCIIPrintf(viewer," Column permutation MMD_ATA\n");CHKERRQ(ierr); 711 break; 712 case METIS_AT_PLUS_A: 713 ierr = PetscViewerASCIIPrintf(viewer," Column permutation METIS_AT_PLUS_A\n");CHKERRQ(ierr); 714 break; 715 case PARMETIS: 716 ierr = PetscViewerASCIIPrintf(viewer," Column permutation PARMETIS\n");CHKERRQ(ierr); 717 break; 718 default: 719 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation"); 720 } 721 722 ierr = PetscViewerASCIIPrintf(viewer," Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);CHKERRQ(ierr); 723 724 if (lu->FactPattern == SamePattern){ 725 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern\n");CHKERRQ(ierr); 726 } else { 727 ierr = PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern_SameRowPerm\n");CHKERRQ(ierr); 728 } 729 PetscFunctionReturn(0); 730 } 731 732 #undef __FUNCT__ 733 #define __FUNCT__ "MatView_SuperLU_DIST" 734 PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer) 735 { 736 PetscErrorCode ierr; 737 PetscBool iascii; 738 PetscViewerFormat format; 739 740 PetscFunctionBegin; 741 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 742 if (iascii) { 743 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 744 if (format == PETSC_VIEWER_ASCII_INFO) { 745 ierr = MatFactorInfo_SuperLU_DIST(A,viewer);CHKERRQ(ierr); 746 } 747 } 748 PetscFunctionReturn(0); 749 } 750 751 752 /*MC 753 MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization 754 755 Works with AIJ matrices 756 757 Options Database Keys: 758 + -mat_superlu_dist_r <n> - number of rows in processor partition 759 . -mat_superlu_dist_c <n> - number of columns in processor partition 760 . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed 761 . -mat_superlu_dist_equil - equilibrate the matrix 762 . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation 763 . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation 764 . -mat_superlu_dist_replacetinypivot - replace tiny pivots 765 . -mat_superlu_dist_fact <SamePattern> (choose one of) SamePattern SamePattern_SameRowPerm 766 . -mat_superlu_dist_iterrefine - use iterative refinement 767 - -mat_superlu_dist_statprint - print factorization information 768 769 Level: beginner 770 771 .seealso: PCLU 772 773 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage 774 775 M*/ 776 777