xref: /petsc/src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c (revision 2205254efee3a00a594e5e2a3a70f74dcb40bc03)
1 
2 /*
3         Provides an interface to the SuperLU_DIST_2.2 sparse solver
4 */
5 
6 #include <../src/mat/impls/aij/seq/aij.h>
7 #include <../src/mat/impls/aij/mpi/mpiaij.h>
8 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */
9 #include <stdlib.h>
10 #endif
11 
12 #if defined(PETSC_USE_64BIT_INDICES)
13 /* ugly SuperLU_Dist variable telling it to use long long int */
14 #define _LONGINT
15 #endif
16 
17 EXTERN_C_BEGIN
18 #if defined(PETSC_USE_COMPLEX)
19 #include <superlu_zdefs.h>
20 #else
21 #include <superlu_ddefs.h>
22 #endif
23 EXTERN_C_END
24 
25 /*
26     GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized
27     DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator.
28 */
29 typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode;
30 const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0};
31 
32 typedef struct {
33   int_t                nprow,npcol,*row,*col;
34   gridinfo_t           grid;
35   superlu_options_t    options;
36   SuperMatrix          A_sup;
37   ScalePermstruct_t    ScalePermstruct;
38   LUstruct_t           LUstruct;
39   int                  StatPrint;
40   SuperLU_MatInputMode MatInputMode;
41   SOLVEstruct_t        SOLVEstruct;
42   fact_t               FactPattern;
43   MPI_Comm             comm_superlu;
44 #if defined(PETSC_USE_COMPLEX)
45   doublecomplex *val;
46 #else
47   double *val;
48 #endif
49   PetscBool matsolve_iscalled,matmatsolve_iscalled;
50   PetscBool CleanUpSuperLU_Dist;  /* Flag to clean up (non-global) SuperLU objects during Destroy */
51 } Mat_SuperLU_DIST;
52 
53 extern PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat,PetscViewer);
54 extern PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat,Mat,const MatFactorInfo*);
55 extern PetscErrorCode MatDestroy_SuperLU_DIST(Mat);
56 extern PetscErrorCode MatView_SuperLU_DIST(Mat,PetscViewer);
57 extern PetscErrorCode MatSolve_SuperLU_DIST(Mat,Vec,Vec);
58 extern PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat,Mat,IS,IS,const MatFactorInfo*);
59 extern PetscErrorCode MatDestroy_MPIAIJ(Mat);
60 
61 #undef __FUNCT__
62 #define __FUNCT__ "MatDestroy_SuperLU_DIST"
63 PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
64 {
65   PetscErrorCode   ierr;
66   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
67   PetscBool        flg;
68 
69   PetscFunctionBegin;
70   if (lu && lu->CleanUpSuperLU_Dist) {
71     /* Deallocate SuperLU_DIST storage */
72     if (lu->MatInputMode == GLOBAL) {
73       Destroy_CompCol_Matrix_dist(&lu->A_sup);
74     } else {
75       Destroy_CompRowLoc_Matrix_dist(&lu->A_sup);
76       if (lu->options.SolveInitialized) {
77 #if defined(PETSC_USE_COMPLEX)
78         zSolveFinalize(&lu->options, &lu->SOLVEstruct);
79 #else
80         dSolveFinalize(&lu->options, &lu->SOLVEstruct);
81 #endif
82       }
83     }
84     Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct);
85     ScalePermstructFree(&lu->ScalePermstruct);
86     LUstructFree(&lu->LUstruct);
87 
88     /* Release the SuperLU_DIST process grid. */
89     superlu_gridexit(&lu->grid);
90 
91     ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr);
92   }
93   ierr = PetscFree(A->spptr);CHKERRQ(ierr);
94 
95   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
96   if (flg) {
97     ierr = MatDestroy_SeqAIJ(A);CHKERRQ(ierr);
98   } else {
99     ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr);
100   }
101   PetscFunctionReturn(0);
102 }
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "MatSolve_SuperLU_DIST"
106 PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
107 {
108   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
109   PetscErrorCode   ierr;
110   PetscMPIInt      size;
111   PetscInt         m=A->rmap->n,M=A->rmap->N,N=A->cmap->N;
112   SuperLUStat_t    stat;
113   double           berr[1];
114   PetscScalar      *bptr;
115   PetscInt         nrhs=1;
116   Vec              x_seq;
117   IS               iden;
118   VecScatter       scat;
119   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
120 
121   PetscFunctionBegin;
122   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
123   if (size > 1 && lu->MatInputMode == GLOBAL) {
124     /* global mat input, convert b to x_seq */
125     ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr);
126     ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr);
127     ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr);
128     ierr = ISDestroy(&iden);CHKERRQ(ierr);
129 
130     ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
131     ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
132     ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr);
133   } else { /* size==1 || distributed mat input */
134     if (lu->options.SolveInitialized && !lu->matsolve_iscalled) {
135       /* see comments in MatMatSolve() */
136 #if defined(PETSC_USE_COMPLEX)
137       zSolveFinalize(&lu->options, &lu->SOLVEstruct);
138 #else
139       dSolveFinalize(&lu->options, &lu->SOLVEstruct);
140 #endif
141       lu->options.SolveInitialized = NO;
142     }
143     ierr = VecCopy(b_mpi,x);CHKERRQ(ierr);
144     ierr = VecGetArray(x,&bptr);CHKERRQ(ierr);
145   }
146 
147   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
148 
149   PStatInit(&stat);        /* Initialize the statistics variables. */
150   if (lu->MatInputMode == GLOBAL) {
151 #if defined(PETSC_USE_COMPLEX)
152     pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info);
153 #else
154     pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info);
155 #endif
156   } else { /* distributed mat input */
157 #if defined(PETSC_USE_COMPLEX)
158     pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info);
159 #else
160     pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info);
161 #endif
162   }
163   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
164 
165   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid);      /* Print the statistics. */
166   PStatFree(&stat);
167 
168   if (size > 1 && lu->MatInputMode == GLOBAL) {
169     /* convert seq x to mpi x */
170     ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr);
171     ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
172     ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
173     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
174     ierr = VecDestroy(&x_seq);CHKERRQ(ierr);
175   } else {
176     ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr);
177 
178     lu->matsolve_iscalled    = PETSC_TRUE;
179     lu->matmatsolve_iscalled = PETSC_FALSE;
180   }
181   PetscFunctionReturn(0);
182 }
183 
184 #undef __FUNCT__
185 #define __FUNCT__ "MatMatSolve_SuperLU_DIST"
186 PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X)
187 {
188   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
189   PetscErrorCode   ierr;
190   PetscMPIInt      size;
191   PetscInt         M=A->rmap->N,m=A->rmap->n,nrhs;
192   SuperLUStat_t    stat;
193   double           berr[1];
194   PetscScalar      *bptr;
195   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
196   PetscBool        flg;
197 
198   PetscFunctionBegin;
199   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
200   ierr = PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,PETSC_NULL);CHKERRQ(ierr);
201   if (!flg) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
202   ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,PETSC_NULL);CHKERRQ(ierr);
203   if (!flg) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
204 
205   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
206   if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size);
207   /* size==1 or distributed mat input */
208   if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) {
209     /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
210        thus destroy it and create a new SOLVEstruct.
211        Otherwise it may result in memory corruption or incorrect solution
212        See src/mat/examples/tests/ex125.c */
213 #if defined(PETSC_USE_COMPLEX)
214     zSolveFinalize(&lu->options, &lu->SOLVEstruct);
215 #else
216     dSolveFinalize(&lu->options, &lu->SOLVEstruct);
217 #endif
218     lu->options.SolveInitialized = NO;
219   }
220   ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
221 
222   ierr = MatGetSize(B_mpi,PETSC_NULL,&nrhs);CHKERRQ(ierr);
223 
224   PStatInit(&stat);        /* Initialize the statistics variables. */
225   ierr = MatDenseGetArray(X,&bptr);CHKERRQ(ierr);
226   if (lu->MatInputMode == GLOBAL) { /* size == 1 */
227 #if defined(PETSC_USE_COMPLEX)
228     pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info);
229 #else
230     pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info);
231 #endif
232   } else { /* distributed mat input */
233 #if defined(PETSC_USE_COMPLEX)
234     pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info);
235 #else
236     pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,
237             &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info);
238 #endif
239   }
240   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
241   ierr = MatDenseRestoreArray(X,&bptr);CHKERRQ(ierr);
242 
243   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
244   PStatFree(&stat);
245   lu->matsolve_iscalled    = PETSC_FALSE;
246   lu->matmatsolve_iscalled = PETSC_TRUE;
247   PetscFunctionReturn(0);
248 }
249 
250 
251 #undef __FUNCT__
252 #define __FUNCT__ "MatLUFactorNumeric_SuperLU_DIST"
253 PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info)
254 {
255   Mat              *tseq,A_seq = PETSC_NULL;
256   Mat_SeqAIJ       *aa,*bb;
257   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->spptr;
258   PetscErrorCode   ierr;
259   PetscInt         M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
260                    m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj;
261   int            sinfo;   /* SuperLU_Dist info flag is always an int even with long long indices */
262   PetscMPIInt    size;
263   SuperLUStat_t  stat;
264   double         *berr=0;
265   IS             isrow;
266   PetscLogDouble time0,time,time_min,time_max;
267   Mat            F_diag=PETSC_NULL;
268 #if defined(PETSC_USE_COMPLEX)
269   doublecomplex *av, *bv;
270 #else
271   double *av, *bv;
272 #endif
273 
274   PetscFunctionBegin;
275   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
276 
277   if (lu->options.PrintStat) { /* collect time for mat conversion */
278     ierr = MPI_Barrier(((PetscObject)A)->comm);CHKERRQ(ierr);
279     ierr = PetscGetTime(&time0);CHKERRQ(ierr);
280   }
281 
282   if (lu->MatInputMode == GLOBAL) { /* global mat input */
283     if (size > 1) { /* convert mpi A to seq mat A */
284       ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr);
285       ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr);
286       ierr = ISDestroy(&isrow);CHKERRQ(ierr);
287 
288       A_seq = *tseq;
289       ierr  = PetscFree(tseq);CHKERRQ(ierr);
290       aa    = (Mat_SeqAIJ*)A_seq->data;
291     } else {
292       PetscBool flg;
293       ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
294       if (flg) {
295         Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data;
296         A = At->A;
297       }
298       aa =  (Mat_SeqAIJ*)A->data;
299     }
300 
301     /* Convert Petsc NR matrix to SuperLU_DIST NC.
302        Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */
303     if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */
304       Destroy_CompCol_Matrix_dist(&lu->A_sup);
305       if (lu->FactPattern == SamePattern_SameRowPerm) {
306         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
307       } else { /* lu->FactPattern == SamePattern */
308         Destroy_LU(N, &lu->grid, &lu->LUstruct);
309         lu->options.Fact = SamePattern;
310       }
311     }
312 #if defined(PETSC_USE_COMPLEX)
313     zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,aa->j,aa->i,&lu->val,&lu->col, &lu->row);
314 #else
315     dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,aa->j,aa->i,&lu->val, &lu->col, &lu->row);
316 #endif
317 
318     /* Create compressed column matrix A_sup. */
319 #if defined(PETSC_USE_COMPLEX)
320     zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE);
321 #else
322     dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE);
323 #endif
324   } else { /* distributed mat input */
325     Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
326     aa=(Mat_SeqAIJ*)(mat->A)->data;
327     bb=(Mat_SeqAIJ*)(mat->B)->data;
328     ai=aa->i; aj=aa->j;
329     bi=bb->i; bj=bb->j;
330 #if defined(PETSC_USE_COMPLEX)
331     av=(doublecomplex*)aa->a;
332     bv=(doublecomplex*)bb->a;
333 #else
334     av=aa->a;
335     bv=bb->a;
336 #endif
337     rstart = A->rmap->rstart;
338     nz     = aa->nz + bb->nz;
339     garray = mat->garray;
340 
341     if (lu->options.Fact == DOFACT) { /* first numeric factorization */
342 #if defined(PETSC_USE_COMPLEX)
343       zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row);
344 #else
345       dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row);
346 #endif
347     } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
348       /* Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); */ /* this leads to crash! However, see SuperLU_DIST_2.5/EXAMPLE/pzdrive2.c */
349       if (lu->FactPattern == SamePattern_SameRowPerm) {
350         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
351       } else {
352         Destroy_LU(N, &lu->grid, &lu->LUstruct); /* Deallocate storage associated with the L and U matrices. */
353         lu->options.Fact = SamePattern;
354       }
355     }
356     nz = 0;
357     for (i=0; i<m; i++) {
358       lu->row[i] = nz;
359       countA     = ai[i+1] - ai[i];
360       countB     = bi[i+1] - bi[i];
361       ajj        = aj + ai[i]; /* ptr to the beginning of this row */
362       bjj        = bj + bi[i];
363 
364       /* B part, smaller col index */
365       colA_start = rstart + ajj[0]; /* the smallest global col index of A */
366       jB         = 0;
367       for (j=0; j<countB; j++) {
368         jcol = garray[bjj[j]];
369         if (jcol > colA_start) {
370           jB = j;
371           break;
372         }
373         lu->col[nz]   = jcol;
374         lu->val[nz++] = *bv++;
375         if (j==countB-1) jB = countB;
376       }
377 
378       /* A part */
379       for (j=0; j<countA; j++) {
380         lu->col[nz]   = rstart + ajj[j];
381         lu->val[nz++] = *av++;
382       }
383 
384       /* B part, larger col index */
385       for (j=jB; j<countB; j++) {
386         lu->col[nz]   = garray[bjj[j]];
387         lu->val[nz++] = *bv++;
388       }
389     }
390     lu->row[m] = nz;
391 #if defined(PETSC_USE_COMPLEX)
392     zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE);
393 #else
394     dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE);
395 #endif
396   }
397   if (lu->options.PrintStat) {
398     ierr  = PetscGetTime(&time);CHKERRQ(ierr);
399     time0 = time - time0;
400   }
401 
402   /* Factor the matrix. */
403   PStatInit(&stat);   /* Initialize the statistics variables. */
404   if (lu->MatInputMode == GLOBAL) { /* global mat input */
405 #if defined(PETSC_USE_COMPLEX)
406     pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo);
407 #else
408     pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo);
409 #endif
410   } else { /* distributed mat input */
411 #if defined(PETSC_USE_COMPLEX)
412     pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo);
413     if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pzgssvx fails, info: %d\n",sinfo);
414 #else
415     pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo);
416     if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",sinfo);
417 #endif
418   }
419 
420   if (lu->MatInputMode == GLOBAL && size > 1) {
421     ierr = MatDestroy(&A_seq);CHKERRQ(ierr);
422   }
423 
424   if (lu->options.PrintStat) {
425     ierr = MPI_Reduce(&time0,&time_max,1,MPI_DOUBLE,MPI_MAX,0,((PetscObject)A)->comm);
426     ierr = MPI_Reduce(&time0,&time_min,1,MPI_DOUBLE,MPI_MIN,0,((PetscObject)A)->comm);
427     ierr = MPI_Reduce(&time0,&time,1,MPI_DOUBLE,MPI_SUM,0,((PetscObject)A)->comm);
428     time = time/size; /* average time */
429     ierr = PetscPrintf(((PetscObject)A)->comm, "        Mat conversion(PETSc->SuperLU_DIST) time (max/min/avg): \n                              %g / %g / %g\n",time_max,time_min,time);CHKERRQ(ierr);
430     PStatPrint(&lu->options, &stat, &lu->grid);  /* Print the statistics. */
431   }
432   PStatFree(&stat);
433   if (size > 1) {
434     F_diag            = ((Mat_MPIAIJ*)(F)->data)->A;
435     F_diag->assembled = PETSC_TRUE;
436   }
437   (F)->assembled    = PETSC_TRUE;
438   (F)->preallocated = PETSC_TRUE;
439   lu->options.Fact  = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
440   PetscFunctionReturn(0);
441 }
442 
443 /* Note the Petsc r and c permutations are ignored */
444 #undef __FUNCT__
445 #define __FUNCT__ "MatLUFactorSymbolic_SuperLU_DIST"
446 PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
447 {
448   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->spptr;
449   PetscInt         M   = A->rmap->N,N=A->cmap->N;
450 
451   PetscFunctionBegin;
452   /* Initialize the SuperLU process grid. */
453   superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid);
454 
455   /* Initialize ScalePermstruct and LUstruct. */
456   ScalePermstructInit(M, N, &lu->ScalePermstruct);
457   LUstructInit(M, N, &lu->LUstruct);
458   F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
459   F->ops->solve           = MatSolve_SuperLU_DIST;
460   F->ops->matsolve        = MatMatSolve_SuperLU_DIST;
461   lu->CleanUpSuperLU_Dist = PETSC_TRUE;
462   PetscFunctionReturn(0);
463 }
464 
465 EXTERN_C_BEGIN
466 #undef __FUNCT__
467 #define __FUNCT__ "MatFactorGetSolverPackage_aij_superlu_dist"
468 PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type)
469 {
470   PetscFunctionBegin;
471   *type = MATSOLVERSUPERLU_DIST;
472   PetscFunctionReturn(0);
473 }
474 EXTERN_C_END
475 
476 #undef __FUNCT__
477 #define __FUNCT__ "MatGetFactor_aij_superlu_dist"
478 PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
479 {
480   Mat               B;
481   Mat_SuperLU_DIST  *lu;
482   PetscErrorCode    ierr;
483   PetscInt          M=A->rmap->N,N=A->cmap->N,indx;
484   PetscMPIInt       size;
485   superlu_options_t options;
486   PetscBool         flg;
487   const char        *colperm[]     = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"};
488   const char        *rowperm[]     = {"LargeDiag","NATURAL"};
489   const char        *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"};
490 
491   PetscFunctionBegin;
492   /* Create the factorization matrix */
493   ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
494   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);CHKERRQ(ierr);
495   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
496   ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
497   ierr = MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
498 
499   B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
500   B->ops->view             = MatView_SuperLU_DIST;
501   B->ops->destroy          = MatDestroy_SuperLU_DIST;
502 
503   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_aij_superlu_dist",MatFactorGetSolverPackage_aij_superlu_dist);CHKERRQ(ierr);
504 
505   B->factortype            = MAT_FACTOR_LU;
506 
507   ierr     = PetscNewLog(B,Mat_SuperLU_DIST,&lu);CHKERRQ(ierr);
508   B->spptr = lu;
509 
510   /* Set the default input options:
511      options.Fact              = DOFACT;
512      options.Equil             = YES;
513      options.ParSymbFact       = NO;
514      options.ColPerm           = METIS_AT_PLUS_A;
515      options.RowPerm           = LargeDiag;
516      options.ReplaceTinyPivot  = YES;
517      options.IterRefine        = DOUBLE;
518      options.Trans             = NOTRANS;
519      options.SolveInitialized  = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
520      options.RefineInitialized = NO;
521      options.PrintStat         = YES;
522   */
523   set_default_options_dist(&options);
524 
525   ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_superlu));CHKERRQ(ierr);
526   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
527   /* Default num of process columns and rows */
528   ierr = PetscMPIIntCast((PetscInt)(0.5 + PetscSqrtReal((PetscReal)size)),&lu->npcol);CHKERRQ(ierr);
529   if (!lu->npcol) lu->npcol = 1;
530   while (lu->npcol > 0) {
531     ierr = PetscMPIIntCast(size/lu->npcol,&lu->nprow);CHKERRQ(ierr);
532     if (size == lu->nprow * lu->npcol) break;
533     lu->npcol--;
534   }
535 
536   ierr = PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr);
537   ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,&lu->nprow,PETSC_NULL);CHKERRQ(ierr);
538   ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,&lu->npcol,PETSC_NULL);CHKERRQ(ierr);
539   if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol);
540 
541   lu->MatInputMode = DISTRIBUTED;
542 
543   ierr = PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,PETSC_NULL);CHKERRQ(ierr);
544   if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;
545 
546   ierr = PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr);
547   if (!flg) options.Equil = NO;
548 
549   ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);CHKERRQ(ierr);
550   if (flg) {
551     switch (indx) {
552     case 0:
553       options.RowPerm = LargeDiag;
554       break;
555     case 1:
556       options.RowPerm = NOROWPERM;
557       break;
558     }
559   }
560 
561   ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);CHKERRQ(ierr);
562   if (flg) {
563     switch (indx) {
564     case 0:
565       options.ColPerm = NATURAL;
566       break;
567     case 1:
568       options.ColPerm = MMD_AT_PLUS_A;
569       break;
570     case 2:
571       options.ColPerm = MMD_ATA;
572       break;
573     case 3:
574       options.ColPerm = METIS_AT_PLUS_A;
575       break;
576     case 4:
577       options.ColPerm = PARMETIS;   /* only works for np>1 */
578       break;
579     default:
580       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
581     }
582   }
583 
584   ierr = PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr);
585   if (!flg) options.ReplaceTinyPivot = NO;
586 
587   options.ParSymbFact = NO;
588 
589   ierr = PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr);
590   if (flg) {
591 #if defined(PETSC_HAVE_PARMETIS)
592     options.ParSymbFact = YES;
593     options.ColPerm     = PARMETIS;   /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
594 #else
595     printf("parsymbfact needs PARMETIS");
596 #endif
597   }
598 
599   lu->FactPattern = SamePattern_SameRowPerm;
600 
601   ierr = PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[1],&indx,&flg);CHKERRQ(ierr);
602   if (flg) {
603     switch (indx) {
604     case 0:
605       lu->FactPattern = SamePattern;
606       break;
607     case 1:
608       lu->FactPattern = SamePattern_SameRowPerm;
609       break;
610     }
611   }
612 
613   options.IterRefine = NOREFINE;
614   ierr = PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr);
615   if (flg) options.IterRefine = SLU_DOUBLE;
616 
617   if (PetscLogPrintInfo) options.PrintStat = YES;
618   else options.PrintStat = NO;
619   ierr = PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",
620                           (PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,0);CHKERRQ(ierr);
621   PetscOptionsEnd();
622 
623   lu->options              = options;
624   lu->options.Fact         = DOFACT;
625   lu->matsolve_iscalled    = PETSC_FALSE;
626   lu->matmatsolve_iscalled = PETSC_FALSE;
627 
628   *F = B;
629   PetscFunctionReturn(0);
630 }
631 
632 EXTERN_C_BEGIN
633 #undef __FUNCT__
634 #define __FUNCT__ "MatGetFactor_seqaij_superlu_dist"
635 PetscErrorCode MatGetFactor_seqaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
636 {
637   PetscErrorCode ierr;
638 
639   PetscFunctionBegin;
640   ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr);
641   PetscFunctionReturn(0);
642 }
643 EXTERN_C_END
644 
645 EXTERN_C_BEGIN
646 #undef __FUNCT__
647 #define __FUNCT__ "MatGetFactor_mpiaij_superlu_dist"
648 PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
649 {
650   PetscErrorCode ierr;
651 
652   PetscFunctionBegin;
653   ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr);
654   PetscFunctionReturn(0);
655 }
656 EXTERN_C_END
657 
658 #undef __FUNCT__
659 #define __FUNCT__ "MatFactorInfo_SuperLU_DIST"
660 PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer)
661 {
662   Mat_SuperLU_DIST  *lu=(Mat_SuperLU_DIST*)A->spptr;
663   superlu_options_t options;
664   PetscErrorCode    ierr;
665 
666   PetscFunctionBegin;
667   /* check if matrix is superlu_dist type */
668   if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(0);
669 
670   options = lu->options;
671   ierr    = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr);
672   ierr    = PetscViewerASCIIPrintf(viewer,"  Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);CHKERRQ(ierr);
673   ierr    = PetscViewerASCIIPrintf(viewer,"  Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);CHKERRQ(ierr);
674   ierr    = PetscViewerASCIIPrintf(viewer,"  Matrix input mode %d \n",lu->MatInputMode);CHKERRQ(ierr);
675   ierr    = PetscViewerASCIIPrintf(viewer,"  Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);CHKERRQ(ierr);
676   ierr    = PetscViewerASCIIPrintf(viewer,"  Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);CHKERRQ(ierr);
677   ierr    = PetscViewerASCIIPrintf(viewer,"  Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr);
678   ierr    = PetscViewerASCIIPrintf(viewer,"  Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL" : "LargeDiag");CHKERRQ(ierr);
679 
680   switch (options.ColPerm) {
681   case NATURAL:
682     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation NATURAL\n");CHKERRQ(ierr);
683     break;
684   case MMD_AT_PLUS_A:
685     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_AT_PLUS_A\n");CHKERRQ(ierr);
686     break;
687   case MMD_ATA:
688     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_ATA\n");CHKERRQ(ierr);
689     break;
690   case METIS_AT_PLUS_A:
691     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation METIS_AT_PLUS_A\n");CHKERRQ(ierr);
692     break;
693   case PARMETIS:
694     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation PARMETIS\n");CHKERRQ(ierr);
695     break;
696   default:
697     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
698   }
699 
700   ierr = PetscViewerASCIIPrintf(viewer,"  Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);CHKERRQ(ierr);
701 
702   if (lu->FactPattern == SamePattern) {
703     ierr = PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern\n");CHKERRQ(ierr);
704   } else {
705     ierr = PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern_SameRowPerm\n");CHKERRQ(ierr);
706   }
707   PetscFunctionReturn(0);
708 }
709 
710 #undef __FUNCT__
711 #define __FUNCT__ "MatView_SuperLU_DIST"
712 PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
713 {
714   PetscErrorCode    ierr;
715   PetscBool         iascii;
716   PetscViewerFormat format;
717 
718   PetscFunctionBegin;
719   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
720   if (iascii) {
721     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
722     if (format == PETSC_VIEWER_ASCII_INFO) {
723       ierr = MatFactorInfo_SuperLU_DIST(A,viewer);CHKERRQ(ierr);
724     }
725   }
726   PetscFunctionReturn(0);
727 }
728 
729 
730 /*MC
731   MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization
732 
733    Works with AIJ matrices
734 
735   Options Database Keys:
736 + -mat_superlu_dist_r <n> - number of rows in processor partition
737 . -mat_superlu_dist_c <n> - number of columns in processor partition
738 . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed
739 . -mat_superlu_dist_equil - equilibrate the matrix
740 . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation
741 . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation
742 . -mat_superlu_dist_replacetinypivot - replace tiny pivots
743 . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm
744 . -mat_superlu_dist_iterrefine - use iterative refinement
745 - -mat_superlu_dist_statprint - print factorization information
746 
747    Level: beginner
748 
749 .seealso: PCLU
750 
751 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
752 
753 M*/
754 
755