xref: /petsc/src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c (revision d736bfeb4d37a01fcbdf00fe73fb60d6f0ba2142)
1 #define PETSCMAT_DLL
2 
3 /*
4         Provides an interface to the SuperLU_DIST_2.2 sparse solver
5 */
6 
7 #include "../src/mat/impls/aij/seq/aij.h"
8 #include "../src/mat/impls/aij/mpi/mpiaij.h"
9 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */
10 #include "stdlib.h"
11 #endif
12 
13 #if defined(PETSC_USE_64BIT_INDICES)
14 /* ugly SuperLU_Dist variable telling it to use long long int */
15 #define _LONGINT
16 #endif
17 
18 EXTERN_C_BEGIN
19 #if defined(PETSC_USE_COMPLEX)
20 #include "superlu_zdefs.h"
21 #else
22 #include "superlu_ddefs.h"
23 #endif
24 EXTERN_C_END
25 
26 typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode;
27 const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0};
28 
29 typedef struct {
30   int_t                   nprow,npcol,*row,*col;
31   gridinfo_t              grid;
32   superlu_options_t       options;
33   SuperMatrix             A_sup;
34   ScalePermstruct_t       ScalePermstruct;
35   LUstruct_t              LUstruct;
36   int                     StatPrint;
37   SuperLU_MatInputMode    MatInputMode;
38   SOLVEstruct_t           SOLVEstruct;
39   fact_t                  FactPattern;
40   MPI_Comm                comm_superlu;
41 #if defined(PETSC_USE_COMPLEX)
42   doublecomplex           *val;
43 #else
44   double                  *val;
45 #endif
46   PetscTruth              matsolve_iscalled,matmatsolve_iscalled;
47   PetscTruth CleanUpSuperLU_Dist; /* Flag to clean up (non-global) SuperLU objects during Destroy */
48 } Mat_SuperLU_DIST;
49 
50 extern PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat,PetscViewer);
51 extern PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat,Mat,const MatFactorInfo *);
52 extern PetscErrorCode MatDestroy_SuperLU_DIST(Mat);
53 extern PetscErrorCode MatView_SuperLU_DIST(Mat,PetscViewer);
54 extern PetscErrorCode MatSolve_SuperLU_DIST(Mat,Vec,Vec);
55 extern PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat,Mat,IS,IS,const MatFactorInfo *);
56 extern PetscErrorCode MatDestroy_MPIAIJ(Mat);
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatDestroy_SuperLU_DIST"
60 PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
61 {
62   PetscErrorCode   ierr;
63   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
64   PetscTruth       flg;
65 
66   PetscFunctionBegin;
67   if (lu->CleanUpSuperLU_Dist) {
68     /* Deallocate SuperLU_DIST storage */
69     if (lu->MatInputMode == GLOBAL) {
70       Destroy_CompCol_Matrix_dist(&lu->A_sup);
71     } else {
72       Destroy_CompRowLoc_Matrix_dist(&lu->A_sup);
73       if ( lu->options.SolveInitialized ) {
74 #if defined(PETSC_USE_COMPLEX)
75         zSolveFinalize(&lu->options, &lu->SOLVEstruct);
76 #else
77         dSolveFinalize(&lu->options, &lu->SOLVEstruct);
78 #endif
79       }
80     }
81     Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct);
82     ScalePermstructFree(&lu->ScalePermstruct);
83     LUstructFree(&lu->LUstruct);
84 
85     /* Release the SuperLU_DIST process grid. */
86     superlu_gridexit(&lu->grid);
87 
88     ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr);
89   }
90 
91   ierr = PetscTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
92   if (flg) {
93     ierr = MatDestroy_SeqAIJ(A);CHKERRQ(ierr);
94   } else {
95     ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr);
96   }
97   PetscFunctionReturn(0);
98 }
99 
100 #undef __FUNCT__
101 #define __FUNCT__ "MatSolve_SuperLU_DIST"
102 PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
103 {
104   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
105   PetscErrorCode   ierr;
106   PetscMPIInt      size;
107   PetscInt         m=A->rmap->n,M=A->rmap->N,N=A->cmap->N;
108   SuperLUStat_t    stat;
109   double           berr[1];
110   PetscScalar      *bptr;
111   PetscInt         nrhs=1;
112   Vec              x_seq;
113   IS               iden;
114   VecScatter       scat;
115   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
116 
117   PetscFunctionBegin;
118   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
119   if (size > 1 && lu->MatInputMode == GLOBAL) {
120     /* global mat input, convert b to x_seq */
121     ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr);
122     ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr);
123     ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr);
124     ierr = ISDestroy(iden);CHKERRQ(ierr);
125 
126     ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
127     ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
128     ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr);
129   } else { /* size==1 || distributed mat input */
130     if (lu->options.SolveInitialized && !lu->matsolve_iscalled){
131       /* see comments in MatMatSolve() */
132 #if defined(PETSC_USE_COMPLEX)
133       zSolveFinalize(&lu->options, &lu->SOLVEstruct);
134 #else
135       dSolveFinalize(&lu->options, &lu->SOLVEstruct);
136 #endif
137       lu->options.SolveInitialized = NO;
138     }
139     ierr = VecCopy(b_mpi,x);CHKERRQ(ierr);
140     ierr = VecGetArray(x,&bptr);CHKERRQ(ierr);
141   }
142 
143   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
144 
145   PStatInit(&stat);        /* Initialize the statistics variables. */
146   if (lu->MatInputMode == GLOBAL) {
147 #if defined(PETSC_USE_COMPLEX)
148     pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,
149                    &lu->grid,&lu->LUstruct,berr,&stat,&info);
150 #else
151     pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,
152                    &lu->grid,&lu->LUstruct,berr,&stat,&info);
153 #endif
154   } else { /* distributed mat input */
155 #if defined(PETSC_USE_COMPLEX)
156     pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,
157 	    &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info);
158 #else
159     pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,
160 	    &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info);
161 #endif
162   }
163   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
164 
165   if (lu->options.PrintStat) {
166      PStatPrint(&lu->options, &stat, &lu->grid);     /* Print the statistics. */
167   }
168   PStatFree(&stat);
169 
170   if (size > 1 && lu->MatInputMode == GLOBAL) {
171     /* convert seq x to mpi x */
172     ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr);
173     ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
174     ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
175     ierr = VecScatterDestroy(scat);CHKERRQ(ierr);
176     ierr = VecDestroy(x_seq);CHKERRQ(ierr);
177   } else {
178     ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr);
179     lu->matsolve_iscalled    = PETSC_TRUE;
180     lu->matmatsolve_iscalled = PETSC_FALSE;
181   }
182   PetscFunctionReturn(0);
183 }
184 
185 #undef __FUNCT__
186 #define __FUNCT__ "MatMatSolve_SuperLU_DIST"
187 PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X)
188 {
189   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
190   PetscErrorCode   ierr;
191   PetscMPIInt      size;
192   PetscInt         M=A->rmap->N,m=A->rmap->n,nrhs;
193   SuperLUStat_t    stat;
194   double           berr[1];
195   PetscScalar      *bptr;
196   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
197 
198   PetscFunctionBegin;
199   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
200 
201   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
202   if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size);
203   /* size==1 or distributed mat input */
204   if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled){
205     /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
206        thus destroy it and create a new SOLVEstruct.
207        Otherwise it may result in memory corruption or incorrect solution
208        See src/mat/examples/tests/ex125.c */
209 #if defined(PETSC_USE_COMPLEX)
210     zSolveFinalize(&lu->options, &lu->SOLVEstruct);
211 #else
212     dSolveFinalize(&lu->options, &lu->SOLVEstruct);
213 #endif
214     lu->options.SolveInitialized  = NO;
215   }
216   ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
217 
218   ierr = MatGetSize(B_mpi,PETSC_NULL,&nrhs);CHKERRQ(ierr);
219 
220   PStatInit(&stat);        /* Initialize the statistics variables. */
221   ierr = MatGetArray(X,&bptr);CHKERRQ(ierr);
222   if (lu->MatInputMode == GLOBAL) { /* size == 1 */
223 #if defined(PETSC_USE_COMPLEX)
224     pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,
225                    &lu->grid, &lu->LUstruct, berr, &stat, &info);
226 #else
227     pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs,
228                    &lu->grid, &lu->LUstruct, berr, &stat, &info);
229 #endif
230   } else { /* distributed mat input */
231 #if defined(PETSC_USE_COMPLEX)
232     pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,
233 	    &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info);
234 #else
235     pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,
236 	    &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info);
237 #endif
238   }
239   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
240   ierr = MatRestoreArray(X,&bptr);CHKERRQ(ierr);
241 
242   if (lu->options.PrintStat) {
243      PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
244   }
245   PStatFree(&stat);
246   lu->matsolve_iscalled    = PETSC_FALSE;
247   lu->matmatsolve_iscalled = PETSC_TRUE;
248   PetscFunctionReturn(0);
249 }
250 
251 
252 #undef __FUNCT__
253 #define __FUNCT__ "MatLUFactorNumeric_SuperLU_DIST"
254 PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info)
255 {
256   Mat              *tseq,A_seq = PETSC_NULL;
257   Mat_SeqAIJ       *aa,*bb;
258   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->spptr;
259   PetscErrorCode   ierr;
260   PetscInt         M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
261                    m=A->rmap->n, irow,colA_start,j,jcol,jB,countA,countB,*bjj,*ajj;
262   int              sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */
263   PetscMPIInt      size,rank;
264   SuperLUStat_t    stat;
265   double           *berr=0;
266   IS               isrow;
267   PetscLogDouble   time0,time,time_min,time_max;
268   Mat              F_diag=PETSC_NULL;
269 #if defined(PETSC_USE_COMPLEX)
270   doublecomplex    *av, *bv;
271 #else
272   double           *av, *bv;
273 #endif
274 
275   PetscFunctionBegin;
276   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
277   ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr);
278 
279   if (lu->options.PrintStat) { /* collect time for mat conversion */
280     ierr = MPI_Barrier(((PetscObject)A)->comm);CHKERRQ(ierr);
281     ierr = PetscGetTime(&time0);CHKERRQ(ierr);
282   }
283 
284   if (lu->MatInputMode == GLOBAL) { /* global mat input */
285     if (size > 1) { /* convert mpi A to seq mat A */
286       ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr);
287       ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr);
288       ierr = ISDestroy(isrow);CHKERRQ(ierr);
289 
290       A_seq = *tseq;
291       ierr = PetscFree(tseq);CHKERRQ(ierr);
292       aa =  (Mat_SeqAIJ*)A_seq->data;
293     } else {
294       PetscTruth flg;
295       ierr = PetscTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
296       if (flg) {
297         Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data;
298         A = At->A;
299       }
300       aa =  (Mat_SeqAIJ*)A->data;
301     }
302 
303     /* Convert Petsc NR matrix to SuperLU_DIST NC.
304        Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */
305     if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */
306       if (lu->FactPattern == SamePattern_SameRowPerm){
307         Destroy_CompCol_Matrix_dist(&lu->A_sup);
308         /* Destroy_LU(N, &lu->grid, &lu->LUstruct); Crash! Comment it out does not lead to mem leak. */
309         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
310       } else {
311         Destroy_CompCol_Matrix_dist(&lu->A_sup);
312         Destroy_LU(N, &lu->grid, &lu->LUstruct);
313         lu->options.Fact = SamePattern;
314       }
315     }
316 #if defined(PETSC_USE_COMPLEX)
317     zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,aa->j,aa->i,&lu->val,&lu->col, &lu->row);
318 #else
319     dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,aa->j,aa->i,&lu->val, &lu->col, &lu->row);
320 #endif
321 
322     /* Create compressed column matrix A_sup. */
323 #if defined(PETSC_USE_COMPLEX)
324     zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE);
325 #else
326     dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE);
327 #endif
328   } else { /* distributed mat input */
329     Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
330     aa=(Mat_SeqAIJ*)(mat->A)->data;
331     bb=(Mat_SeqAIJ*)(mat->B)->data;
332     ai=aa->i; aj=aa->j;
333     bi=bb->i; bj=bb->j;
334 #if defined(PETSC_USE_COMPLEX)
335     av=(doublecomplex*)aa->a;
336     bv=(doublecomplex*)bb->a;
337 #else
338     av=aa->a;
339     bv=bb->a;
340 #endif
341     rstart = A->rmap->rstart;
342     nz     = aa->nz + bb->nz;
343     garray = mat->garray;
344 
345     if (lu->options.Fact == DOFACT) {/* first numeric factorization */
346 #if defined(PETSC_USE_COMPLEX)
347       zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row);
348 #else
349       dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row);
350 #endif
351     } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
352       if (lu->FactPattern == SamePattern_SameRowPerm){
353         /* Destroy_LU(N, &lu->grid, &lu->LUstruct); Crash! Comment it out does not lead to mem leak. */
354         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
355       } else {
356         Destroy_LU(N, &lu->grid, &lu->LUstruct); /* Deallocate storage associated with the L and U matrices. */
357         lu->options.Fact = SamePattern;
358       }
359     }
360     nz = 0; irow = rstart;
361     for ( i=0; i<m; i++ ) {
362       lu->row[i] = nz;
363       countA = ai[i+1] - ai[i];
364       countB = bi[i+1] - bi[i];
365       ajj = aj + ai[i];  /* ptr to the beginning of this row */
366       bjj = bj + bi[i];
367 
368       /* B part, smaller col index */
369       colA_start = rstart + ajj[0]; /* the smallest global col index of A */
370       jB = 0;
371       for (j=0; j<countB; j++){
372         jcol = garray[bjj[j]];
373         if (jcol > colA_start) {
374           jB = j;
375           break;
376         }
377         lu->col[nz] = jcol;
378         lu->val[nz++] = *bv++;
379         if (j==countB-1) jB = countB;
380       }
381 
382       /* A part */
383       for (j=0; j<countA; j++){
384         lu->col[nz] = rstart + ajj[j];
385         lu->val[nz++] = *av++;
386       }
387 
388       /* B part, larger col index */
389       for (j=jB; j<countB; j++){
390         lu->col[nz] = garray[bjj[j]];
391         lu->val[nz++] = *bv++;
392       }
393     }
394     lu->row[m] = nz;
395 #if defined(PETSC_USE_COMPLEX)
396     zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,
397 				   lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE);
398 #else
399     dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,
400 				   lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE);
401 #endif
402   }
403   if (lu->options.PrintStat) {
404     ierr = PetscGetTime(&time);CHKERRQ(ierr);
405     time0 = time - time0;
406   }
407 
408   /* Factor the matrix. */
409   PStatInit(&stat);   /* Initialize the statistics variables. */
410   if (lu->MatInputMode == GLOBAL) { /* global mat input */
411 #if defined(PETSC_USE_COMPLEX)
412     pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,
413                    &lu->grid, &lu->LUstruct, berr, &stat, &sinfo);
414 #else
415     pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,
416                    &lu->grid, &lu->LUstruct, berr, &stat, &sinfo);
417 #endif
418   } else { /* distributed mat input */
419 #if defined(PETSC_USE_COMPLEX)
420     pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,
421 	    &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo);
422     if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pzgssvx fails, info: %d\n",sinfo);
423 #else
424     pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,
425 	    &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo);
426     if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",sinfo);
427 #endif
428   }
429 
430   if (lu->MatInputMode == GLOBAL && size > 1){
431     ierr = MatDestroy(A_seq);CHKERRQ(ierr);
432   }
433 
434   if (lu->options.PrintStat) {
435     if (size > 1){
436       ierr = MPI_Reduce(&time0,&time_max,1,MPI_DOUBLE,MPI_MAX,0,((PetscObject)A)->comm);
437       ierr = MPI_Reduce(&time0,&time_min,1,MPI_DOUBLE,MPI_MIN,0,((PetscObject)A)->comm);
438       ierr = MPI_Reduce(&time0,&time,1,MPI_DOUBLE,MPI_SUM,0,((PetscObject)A)->comm);
439       time = time/size; /* average time */
440       if (!rank) {
441         ierr = PetscPrintf(PETSC_COMM_SELF, "        Mat conversion(PETSc->SuperLU_DIST) time (max/min/avg): \n                              %g / %g / %g\n",time_max,time_min,time);CHKERRQ(ierr);
442       }
443     } else {
444       ierr = PetscPrintf(PETSC_COMM_SELF, "        Mat conversion(PETSc->SuperLU_DIST) time: \n    %g\n",time0);CHKERRQ(ierr);
445     }
446     PStatPrint(&lu->options, &stat, &lu->grid);  /* Print the statistics. */
447   }
448   PStatFree(&stat);
449   if (size > 1){
450     F_diag = ((Mat_MPIAIJ *)(F)->data)->A;
451     F_diag->assembled = PETSC_TRUE;
452   }
453   (F)->assembled    = PETSC_TRUE;
454   (F)->preallocated = PETSC_TRUE;
455   lu->options.Fact  = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
456   PetscFunctionReturn(0);
457 }
458 
459 /* Note the Petsc r and c permutations are ignored */
460 #undef __FUNCT__
461 #define __FUNCT__ "MatLUFactorSymbolic_SuperLU_DIST"
462 PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
463 {
464   Mat_SuperLU_DIST  *lu = (Mat_SuperLU_DIST*) (F)->spptr;
465   PetscInt          M=A->rmap->N,N=A->cmap->N;
466 
467   PetscFunctionBegin;
468   /* Initialize the SuperLU process grid. */
469   superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid);
470 
471   /* Initialize ScalePermstruct and LUstruct. */
472   ScalePermstructInit(M, N, &lu->ScalePermstruct);
473   LUstructInit(M, N, &lu->LUstruct);
474   (F)->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
475   (F)->ops->solve           = MatSolve_SuperLU_DIST;
476   (F)->ops->matsolve        = MatMatSolve_SuperLU_DIST;
477   PetscFunctionReturn(0);
478 }
479 
480 EXTERN_C_BEGIN
481 #undef __FUNCT__
482 #define __FUNCT__ "MatFactorGetSolverPackage_aij_superlu_dist"
483 PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type)
484 {
485   PetscFunctionBegin;
486   *type = MAT_SOLVER_SUPERLU_DIST;
487   PetscFunctionReturn(0);
488 }
489 EXTERN_C_END
490 
491 #undef __FUNCT__
492 #define __FUNCT__ "MatGetFactor_aij_superlu_dist"
493 PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
494 {
495   Mat               B;
496   Mat_SuperLU_DIST  *lu;
497   PetscErrorCode    ierr;
498   PetscInt          M=A->rmap->N,N=A->cmap->N,indx;
499   PetscMPIInt       size;
500   superlu_options_t options;
501   PetscTruth        flg;
502   const char        *pctype[] = {"MMD_AT_PLUS_A","NATURAL","MMD_ATA","PARMETIS"};
503   const char        *prtype[] = {"LargeDiag","NATURAL"};
504   const char        *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"};
505 
506   PetscFunctionBegin;
507   /* Create the factorization matrix */
508   ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
509   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);CHKERRQ(ierr);
510   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
511   ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
512   ierr = MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
513 
514   B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
515   B->ops->view             = MatView_SuperLU_DIST;
516   B->ops->destroy          = MatDestroy_SuperLU_DIST;
517   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_aij_superlu_dist",MatFactorGetSolverPackage_aij_superlu_dist);CHKERRQ(ierr);
518   B->factortype            = MAT_FACTOR_LU;
519 
520   ierr = PetscNewLog(B,Mat_SuperLU_DIST,&lu);CHKERRQ(ierr);
521   B->spptr = lu;
522 
523   /* Set the default input options:
524      options.Fact              = DOFACT;
525      options.Equil             = YES;
526      options.ParSymbFact       = NO;
527      options.ColPerm           = MMD_AT_PLUS_A;
528      options.RowPerm           = LargeDiag;
529      options.ReplaceTinyPivot  = YES;
530      options.IterRefine        = DOUBLE;
531      options.Trans             = NOTRANS;
532      options.SolveInitialized  = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
533      options.RefineInitialized = NO;
534      options.PrintStat         = YES;
535   */
536   set_default_options_dist(&options);
537 
538   ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_superlu));CHKERRQ(ierr);
539   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
540   /* Default num of process columns and rows */
541   lu->npcol = PetscMPIIntCast((PetscInt)(0.5 + sqrt((PetscReal)size)));
542   if (!lu->npcol) lu->npcol = 1;
543   while (lu->npcol > 0) {
544     lu->nprow = PetscMPIIntCast(size/lu->npcol);
545     if (size == lu->nprow * lu->npcol) break;
546     lu->npcol --;
547   }
548 
549   ierr = PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr);
550     ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,&lu->nprow,PETSC_NULL);CHKERRQ(ierr);
551     ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,&lu->npcol,PETSC_NULL);CHKERRQ(ierr);
552     if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol);
553 
554     lu->MatInputMode = DISTRIBUTED;
555     ierr = PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,PETSC_NULL);CHKERRQ(ierr);
556     if(lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;
557 
558     ierr = PetscOptionsTruth("-mat_superlu_dist_equil","Equilibrate matrix","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr);
559     if (!flg) {
560       options.Equil = NO;
561     }
562 
563     ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",prtype,2,prtype[0],&indx,&flg);CHKERRQ(ierr);
564     if (flg) {
565       switch (indx) {
566       case 0:
567         options.RowPerm = LargeDiag;
568         break;
569       case 1:
570         options.RowPerm = NOROWPERM;
571         break;
572       }
573     }
574 
575     ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",pctype,4,pctype[0],&indx,&flg);CHKERRQ(ierr);
576     if (flg) {
577       switch (indx) {
578       case 0:
579         options.ColPerm = MMD_AT_PLUS_A;
580         break;
581       case 1:
582         options.ColPerm = NATURAL;
583         break;
584       case 2:
585         options.ColPerm = MMD_ATA;
586         break;
587       case 3:
588         options.ColPerm = PARMETIS;
589         break;
590       }
591     }
592 
593     ierr = PetscOptionsTruth("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr);
594     if (!flg) {
595       options.ReplaceTinyPivot = NO;
596     }
597 
598     options.ParSymbFact = NO;
599     ierr = PetscOptionsTruth("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr);
600     if (flg){
601 #ifdef PETSC_HAVE_PARMETIS
602       options.ParSymbFact = YES;
603       options.ColPerm     = PARMETIS; /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
604 #else
605       printf("parsymbfact needs PARMETIS");
606 #endif
607     }
608 
609     lu->FactPattern = SamePattern;
610     ierr = PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[0],&indx,&flg);CHKERRQ(ierr);
611     if (flg) {
612       switch (indx) {
613       case 0:
614         lu->FactPattern = SamePattern;
615         break;
616       case 1:
617         lu->FactPattern = SamePattern_SameRowPerm;
618         break;
619       }
620     }
621 
622     options.IterRefine = NOREFINE;
623     ierr = PetscOptionsTruth("-mat_superlu_dist_iterrefine","Use iterative refinement","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr);
624     if (flg) {
625       options.IterRefine = DOUBLE;
626     }
627 
628     if (PetscLogPrintInfo) {
629       options.PrintStat = YES;
630     } else {
631       options.PrintStat = NO;
632     }
633     ierr = PetscOptionsTruth("-mat_superlu_dist_statprint","Print factorization information","None",
634                               (PetscTruth)options.PrintStat,(PetscTruth*)&options.PrintStat,0);CHKERRQ(ierr);
635   PetscOptionsEnd();
636 
637   lu->options             = options;
638   lu->options.Fact        = DOFACT;
639   lu->CleanUpSuperLU_Dist = PETSC_TRUE;
640   lu->matsolve_iscalled    = PETSC_FALSE;
641   lu->matmatsolve_iscalled = PETSC_FALSE;
642   *F = B;
643   PetscFunctionReturn(0);
644 }
645 
646 EXTERN_C_BEGIN
647 #undef __FUNCT__
648 #define __FUNCT__ "MatGetFactor_seqaij_superlu_dist"
649 PetscErrorCode MatGetFactor_seqaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
650 {
651   PetscErrorCode ierr;
652 
653   PetscFunctionBegin;
654   ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr);
655   PetscFunctionReturn(0);
656 }
657 EXTERN_C_END
658 
659 EXTERN_C_BEGIN
660 #undef __FUNCT__
661 #define __FUNCT__ "MatGetFactor_mpiaij_superlu_dist"
662 PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
663 {
664   PetscErrorCode ierr;
665 
666   PetscFunctionBegin;
667   ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr);
668   PetscFunctionReturn(0);
669 }
670 EXTERN_C_END
671 
672 #undef __FUNCT__
673 #define __FUNCT__ "MatFactorInfo_SuperLU_DIST"
674 PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer)
675 {
676   Mat_SuperLU_DIST  *lu=(Mat_SuperLU_DIST*)A->spptr;
677   superlu_options_t options;
678   PetscErrorCode    ierr;
679 
680   PetscFunctionBegin;
681   /* check if matrix is superlu_dist type */
682   if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(0);
683 
684   options = lu->options;
685   ierr = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr);
686   ierr = PetscViewerASCIIPrintf(viewer,"  Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);CHKERRQ(ierr);
687   ierr = PetscViewerASCIIPrintf(viewer,"  Equilibrate matrix %s \n",PetscTruths[options.Equil != NO]);CHKERRQ(ierr);
688   ierr = PetscViewerASCIIPrintf(viewer,"  Matrix input mode %d \n",lu->MatInputMode);CHKERRQ(ierr);
689   ierr = PetscViewerASCIIPrintf(viewer,"  Replace tiny pivots %s \n",PetscTruths[options.ReplaceTinyPivot != NO]);CHKERRQ(ierr);
690   ierr = PetscViewerASCIIPrintf(viewer,"  Use iterative refinement %s \n",PetscTruths[options.IterRefine == DOUBLE]);CHKERRQ(ierr);
691   ierr = PetscViewerASCIIPrintf(viewer,"  Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr);
692   ierr = PetscViewerASCIIPrintf(viewer,"  Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL": "LargeDiag");CHKERRQ(ierr);
693   if (options.ColPerm == NATURAL) {
694     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation NATURAL\n");CHKERRQ(ierr);
695   } else if (options.ColPerm == MMD_AT_PLUS_A) {
696     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_AT_PLUS_A\n");CHKERRQ(ierr);
697   } else if (options.ColPerm == MMD_ATA) {
698     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_ATA\n");CHKERRQ(ierr);
699   } else if (options.ColPerm == PARMETIS) {
700     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation PARMETIS\n");CHKERRQ(ierr);
701   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
702 
703   ierr = PetscViewerASCIIPrintf(viewer,"  Parallel symbolic factorization %s \n",PetscTruths[options.ParSymbFact != NO]);CHKERRQ(ierr);
704 
705   if (lu->FactPattern == SamePattern){
706     ierr = PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern\n");CHKERRQ(ierr);
707   } else {
708     ierr = PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern_SameRowPerm\n");CHKERRQ(ierr);
709   }
710   PetscFunctionReturn(0);
711 }
712 
713 #undef __FUNCT__
714 #define __FUNCT__ "MatView_SuperLU_DIST"
715 PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
716 {
717   PetscErrorCode    ierr;
718   PetscTruth        iascii;
719   PetscViewerFormat format;
720 
721   PetscFunctionBegin;
722   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
723   if (iascii) {
724     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
725     if (format == PETSC_VIEWER_ASCII_INFO) {
726       ierr = MatFactorInfo_SuperLU_DIST(A,viewer);CHKERRQ(ierr);
727     }
728   }
729   PetscFunctionReturn(0);
730 }
731 
732 
733 /*MC
734   MAT_SOLVER_SUPERLU_DIST - Parallel direct solver package for LU factorization
735 
736    Works with AIJ matrices
737 
738   Options Database Keys:
739 + -mat_superlu_dist_r <n> - number of rows in processor partition
740 . -mat_superlu_dist_c <n> - number of columns in processor partition
741 . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed
742 . -mat_superlu_dist_equil - equilibrate the matrix
743 . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation
744 . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation
745 . -mat_superlu_dist_replacetinypivot - replace tiny pivots
746 . -mat_superlu_dist_fact <SamePattern> (choose one of) SamePattern SamePattern_SameRowPerm
747 . -mat_superlu_dist_iterrefine - use iterative refinement
748 - -mat_superlu_dist_statprint - print factorization information
749 
750    Level: beginner
751 
752 .seealso: PCLU
753 
754 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
755 
756 M*/
757 
758