xref: /petsc/src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c (revision 19bfd8f8fa516b4fd4ffe950a75fc2bc24267a71)
1 
2 /*
3         Provides an interface to the SuperLU_DIST_2.2 sparse solver
4 */
5 
6 #include <../src/mat/impls/aij/seq/aij.h>
7 #include <../src/mat/impls/aij/mpi/mpiaij.h>
8 #include <petsctime.h>
9 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */
10 #include <stdlib.h>
11 #endif
12 
13 #if defined(PETSC_USE_64BIT_INDICES)
14 /* ugly SuperLU_Dist variable telling it to use long long int */
15 #define _LONGINT
16 #endif
17 
18 EXTERN_C_BEGIN
19 #if defined(PETSC_USE_COMPLEX)
20 #include <superlu_zdefs.h>
21 #else
22 #include <superlu_ddefs.h>
23 #endif
24 EXTERN_C_END
25 
26 /*
27     GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized
28     DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator.
29 */
30 typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode;
31 const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0};
32 
33 typedef struct {
34   int_t                nprow,npcol,*row,*col;
35   gridinfo_t           grid;
36   superlu_options_t    options;
37   SuperMatrix          A_sup;
38   ScalePermstruct_t    ScalePermstruct;
39   LUstruct_t           LUstruct;
40   int                  StatPrint;
41   SuperLU_MatInputMode MatInputMode;
42   SOLVEstruct_t        SOLVEstruct;
43   fact_t               FactPattern;
44   MPI_Comm             comm_superlu;
45 #if defined(PETSC_USE_COMPLEX)
46   doublecomplex        *val;
47 #else
48   double               *val;
49 #endif
50   PetscBool            matsolve_iscalled,matmatsolve_iscalled;
51   PetscBool            CleanUpSuperLU_Dist;  /* Flag to clean up (non-global) SuperLU objects during Destroy */
52 } Mat_SuperLU_DIST;
53 
54 extern PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat,PetscViewer);
55 extern PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat,Mat,const MatFactorInfo*);
56 extern PetscErrorCode MatDestroy_SuperLU_DIST(Mat);
57 extern PetscErrorCode MatView_SuperLU_DIST(Mat,PetscViewer);
58 extern PetscErrorCode MatSolve_SuperLU_DIST(Mat,Vec,Vec);
59 extern PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat,Mat,IS,IS,const MatFactorInfo*);
60 extern PetscErrorCode MatDestroy_MPIAIJ(Mat);
61 
62 #undef __FUNCT__
63 #define __FUNCT__ "MatDestroy_SuperLU_DIST"
64 PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
65 {
66   PetscErrorCode   ierr;
67   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
68   PetscBool        flg;
69 
70   PetscFunctionBegin;
71   if (lu && lu->CleanUpSuperLU_Dist) {
72     /* Deallocate SuperLU_DIST storage */
73     if (lu->MatInputMode == GLOBAL) {
74       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
75     } else {
76       PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
77       if (lu->options.SolveInitialized) {
78 #if defined(PETSC_USE_COMPLEX)
79         PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
80 #else
81         PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
82 #endif
83       }
84     }
85     PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct));
86     PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct));
87     PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct));
88 
89     /* Release the SuperLU_DIST process grid. */
90     PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid));
91     ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr);
92   }
93   ierr = PetscFree(A->spptr);CHKERRQ(ierr);
94 
95   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
96   if (flg) {
97     ierr = MatDestroy_SeqAIJ(A);CHKERRQ(ierr);
98   } else {
99     ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr);
100   }
101   PetscFunctionReturn(0);
102 }
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "MatSolve_SuperLU_DIST"
106 PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
107 {
108   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
109   PetscErrorCode   ierr;
110   PetscMPIInt      size;
111   PetscInt         m=A->rmap->n,M=A->rmap->N,N=A->cmap->N;
112   SuperLUStat_t    stat;
113   double           berr[1];
114   PetscScalar      *bptr;
115   PetscInt         nrhs=1;
116   Vec              x_seq;
117   IS               iden;
118   VecScatter       scat;
119   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
120   static PetscBool cite = PETSC_FALSE;
121 
122   PetscFunctionBegin;
123   ierr = PetscCitationsRegister("@article{lidemmel03,\n  author = {Xiaoye S. Li and James W. Demmel},\n  title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n           Solver for Unsymmetric Linear Systems},\n  journal = {ACM Trans. Mathematical Software},\n  volume = {29},\n  number = {2},\n  pages = {110-140},\n  year = 2003\n}\n",&cite);CHKERRQ(ierr);
124   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
125   if (size > 1 && lu->MatInputMode == GLOBAL) {
126     /* global mat input, convert b to x_seq */
127     ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr);
128     ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr);
129     ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr);
130     ierr = ISDestroy(&iden);CHKERRQ(ierr);
131 
132     ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
133     ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
134     ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr);
135   } else { /* size==1 || distributed mat input */
136     if (lu->options.SolveInitialized && !lu->matsolve_iscalled) {
137       /* see comments in MatMatSolve() */
138 #if defined(PETSC_USE_COMPLEX)
139       PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
140 #else
141       PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
142 #endif
143       lu->options.SolveInitialized = NO;
144     }
145     ierr = VecCopy(b_mpi,x);CHKERRQ(ierr);
146     ierr = VecGetArray(x,&bptr);CHKERRQ(ierr);
147   }
148 
149   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
150 
151   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
152   if (lu->MatInputMode == GLOBAL) {
153 #if defined(PETSC_USE_COMPLEX)
154     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
155 #else
156     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
157 #endif
158   } else { /* distributed mat input */
159 #if defined(PETSC_USE_COMPLEX)
160     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
161 #else
162     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
163 #endif
164   }
165   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
166 
167   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid);      /* Print the statistics. */
168   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
169 
170   if (size > 1 && lu->MatInputMode == GLOBAL) {
171     /* convert seq x to mpi x */
172     ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr);
173     ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
174     ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
175     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
176     ierr = VecDestroy(&x_seq);CHKERRQ(ierr);
177   } else {
178     ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr);
179 
180     lu->matsolve_iscalled    = PETSC_TRUE;
181     lu->matmatsolve_iscalled = PETSC_FALSE;
182   }
183   PetscFunctionReturn(0);
184 }
185 
186 #undef __FUNCT__
187 #define __FUNCT__ "MatMatSolve_SuperLU_DIST"
188 PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X)
189 {
190   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
191   PetscErrorCode   ierr;
192   PetscMPIInt      size;
193   PetscInt         M=A->rmap->N,m=A->rmap->n,nrhs;
194   SuperLUStat_t    stat;
195   double           berr[1];
196   PetscScalar      *bptr;
197   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
198   PetscBool        flg;
199 
200   PetscFunctionBegin;
201   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
202   ierr = PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
203   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
204   ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
205   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
206 
207   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
208   if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size);
209   /* size==1 or distributed mat input */
210   if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) {
211     /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
212        thus destroy it and create a new SOLVEstruct.
213        Otherwise it may result in memory corruption or incorrect solution
214        See src/mat/examples/tests/ex125.c */
215 #if defined(PETSC_USE_COMPLEX)
216     PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
217 #else
218     PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
219 #endif
220     lu->options.SolveInitialized = NO;
221   }
222   ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
223 
224   ierr = MatGetSize(B_mpi,NULL,&nrhs);CHKERRQ(ierr);
225 
226   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
227   ierr = MatDenseGetArray(X,&bptr);CHKERRQ(ierr);
228   if (lu->MatInputMode == GLOBAL) { /* size == 1 */
229 #if defined(PETSC_USE_COMPLEX)
230     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info));
231 #else
232     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info));
233 #endif
234   } else { /* distributed mat input */
235 #if defined(PETSC_USE_COMPLEX)
236     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
237 #else
238     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
239 #endif
240   }
241   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
242   ierr = MatDenseRestoreArray(X,&bptr);CHKERRQ(ierr);
243 
244   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
245   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
246   lu->matsolve_iscalled    = PETSC_FALSE;
247   lu->matmatsolve_iscalled = PETSC_TRUE;
248   PetscFunctionReturn(0);
249 }
250 
251 
252 #undef __FUNCT__
253 #define __FUNCT__ "MatLUFactorNumeric_SuperLU_DIST"
254 PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info)
255 {
256   Mat              *tseq,A_seq = NULL;
257   Mat_SeqAIJ       *aa,*bb;
258   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->spptr;
259   PetscErrorCode   ierr;
260   PetscInt         M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
261                    m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj;
262   int              sinfo;   /* SuperLU_Dist info flag is always an int even with long long indices */
263   PetscMPIInt      size;
264   SuperLUStat_t    stat;
265   double           *berr=0;
266   IS               isrow;
267   PetscLogDouble   time0,time,time_min,time_max;
268   Mat              F_diag=NULL;
269 #if defined(PETSC_USE_COMPLEX)
270   doublecomplex    *av, *bv;
271 #else
272   double           *av, *bv;
273 #endif
274 
275   PetscFunctionBegin;
276   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
277 
278   if (lu->options.PrintStat) { /* collect time for mat conversion */
279     ierr = MPI_Barrier(PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
280     ierr = PetscTime(&time0);CHKERRQ(ierr);
281   }
282 
283   if (lu->MatInputMode == GLOBAL) { /* global mat input */
284     if (size > 1) { /* convert mpi A to seq mat A */
285       ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr);
286       ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr);
287       ierr = ISDestroy(&isrow);CHKERRQ(ierr);
288 
289       A_seq = *tseq;
290       ierr  = PetscFree(tseq);CHKERRQ(ierr);
291       aa    = (Mat_SeqAIJ*)A_seq->data;
292     } else {
293       PetscBool flg;
294       ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
295       if (flg) {
296         Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data;
297         A = At->A;
298       }
299       aa =  (Mat_SeqAIJ*)A->data;
300     }
301 
302     /* Convert Petsc NR matrix to SuperLU_DIST NC.
303        Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */
304     if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */
305       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
306       if (lu->FactPattern == SamePattern_SameRowPerm) {
307         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
308       } else { /* lu->FactPattern == SamePattern */
309         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct));
310         lu->options.Fact = SamePattern;
311       }
312     }
313 #if defined(PETSC_USE_COMPLEX)
314     PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,aa->j,aa->i,&lu->val,&lu->col, &lu->row));
315 #else
316     PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,aa->j,aa->i,&lu->val, &lu->col, &lu->row));
317 #endif
318 
319     /* Create compressed column matrix A_sup. */
320 #if defined(PETSC_USE_COMPLEX)
321     PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE));
322 #else
323     PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE));
324 #endif
325   } else { /* distributed mat input */
326     Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
327     aa=(Mat_SeqAIJ*)(mat->A)->data;
328     bb=(Mat_SeqAIJ*)(mat->B)->data;
329     ai=aa->i; aj=aa->j;
330     bi=bb->i; bj=bb->j;
331 #if defined(PETSC_USE_COMPLEX)
332     av=(doublecomplex*)aa->a;
333     bv=(doublecomplex*)bb->a;
334 #else
335     av=aa->a;
336     bv=bb->a;
337 #endif
338     rstart = A->rmap->rstart;
339     nz     = aa->nz + bb->nz;
340     garray = mat->garray;
341 
342     if (lu->options.Fact == DOFACT) { /* first numeric factorization */
343 #if defined(PETSC_USE_COMPLEX)
344       PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
345 #else
346       PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
347 #endif
348     } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
349       /* Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); */ /* this leads to crash! However, see SuperLU_DIST_2.5/EXAMPLE/pzdrive2.c */
350       if (lu->FactPattern == SamePattern_SameRowPerm) {
351         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
352       } else {
353         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate storage associated with the L and U matrices. */
354         lu->options.Fact = SamePattern;
355       }
356     }
357     nz = 0;
358     for (i=0; i<m; i++) {
359       lu->row[i] = nz;
360       countA     = ai[i+1] - ai[i];
361       countB     = bi[i+1] - bi[i];
362       ajj        = aj + ai[i]; /* ptr to the beginning of this row */
363       bjj        = bj + bi[i];
364 
365       /* B part, smaller col index */
366       colA_start = rstart + ajj[0]; /* the smallest global col index of A */
367       jB         = 0;
368       for (j=0; j<countB; j++) {
369         jcol = garray[bjj[j]];
370         if (jcol > colA_start) {
371           jB = j;
372           break;
373         }
374         lu->col[nz]   = jcol;
375         lu->val[nz++] = *bv++;
376         if (j==countB-1) jB = countB;
377       }
378 
379       /* A part */
380       for (j=0; j<countA; j++) {
381         lu->col[nz]   = rstart + ajj[j];
382         lu->val[nz++] = *av++;
383       }
384 
385       /* B part, larger col index */
386       for (j=jB; j<countB; j++) {
387         lu->col[nz]   = garray[bjj[j]];
388         lu->val[nz++] = *bv++;
389       }
390     }
391     lu->row[m] = nz;
392 #if defined(PETSC_USE_COMPLEX)
393     PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE));
394 #else
395     PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE));
396 #endif
397   }
398   if (lu->options.PrintStat) {
399     ierr  = PetscTime(&time);CHKERRQ(ierr);
400     time0 = time - time0;
401   }
402 
403   /* Factor the matrix. */
404   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));   /* Initialize the statistics variables. */
405   if (lu->MatInputMode == GLOBAL) { /* global mat input */
406 #if defined(PETSC_USE_COMPLEX)
407     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
408 #else
409     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
410 #endif
411   } else { /* distributed mat input */
412 #if defined(PETSC_USE_COMPLEX)
413     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
414     if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pzgssvx fails, info: %d\n",sinfo);
415 #else
416     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
417     if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",sinfo);
418 #endif
419   }
420 
421   if (lu->MatInputMode == GLOBAL && size > 1) {
422     ierr = MatDestroy(&A_seq);CHKERRQ(ierr);
423   }
424 
425   if (lu->options.PrintStat) {
426     ierr = MPI_Reduce(&time0,&time_max,1,MPI_DOUBLE,MPI_MAX,0,PetscObjectComm((PetscObject)A));
427     ierr = MPI_Reduce(&time0,&time_min,1,MPI_DOUBLE,MPI_MIN,0,PetscObjectComm((PetscObject)A));
428     ierr = MPI_Reduce(&time0,&time,1,MPI_DOUBLE,MPI_SUM,0,PetscObjectComm((PetscObject)A));
429     time = time/size; /* average time */
430     ierr = PetscPrintf(PetscObjectComm((PetscObject)A), "        Mat conversion(PETSc->SuperLU_DIST) time (max/min/avg): \n                              %g / %g / %g\n",time_max,time_min,time);CHKERRQ(ierr);
431     PStatPrint(&lu->options, &stat, &lu->grid);  /* Print the statistics. */
432   }
433   PStatFree(&stat);
434   if (size > 1) {
435     F_diag            = ((Mat_MPIAIJ*)(F)->data)->A;
436     F_diag->assembled = PETSC_TRUE;
437   }
438   (F)->assembled    = PETSC_TRUE;
439   (F)->preallocated = PETSC_TRUE;
440   lu->options.Fact  = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
441   PetscFunctionReturn(0);
442 }
443 
444 /* Note the Petsc r and c permutations are ignored */
445 #undef __FUNCT__
446 #define __FUNCT__ "MatLUFactorSymbolic_SuperLU_DIST"
447 PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
448 {
449   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->spptr;
450   PetscInt         M   = A->rmap->N,N=A->cmap->N;
451 
452   PetscFunctionBegin;
453   /* Initialize the SuperLU process grid. */
454   PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid));
455 
456   /* Initialize ScalePermstruct and LUstruct. */
457   PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct));
458   PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(M, N, &lu->LUstruct));
459   F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
460   F->ops->solve           = MatSolve_SuperLU_DIST;
461   F->ops->matsolve        = MatMatSolve_SuperLU_DIST;
462   lu->CleanUpSuperLU_Dist = PETSC_TRUE;
463   PetscFunctionReturn(0);
464 }
465 
466 #undef __FUNCT__
467 #define __FUNCT__ "MatFactorGetSolverPackage_aij_superlu_dist"
468 PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type)
469 {
470   PetscFunctionBegin;
471   *type = MATSOLVERSUPERLU_DIST;
472   PetscFunctionReturn(0);
473 }
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "MatGetFactor_aij_superlu_dist"
477 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
478 {
479   Mat               B;
480   Mat_SuperLU_DIST  *lu;
481   PetscErrorCode    ierr;
482   PetscInt          M=A->rmap->N,N=A->cmap->N,indx;
483   PetscMPIInt       size;
484   superlu_options_t options;
485   PetscBool         flg;
486   const char        *colperm[]     = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"};
487   const char        *rowperm[]     = {"LargeDiag","NATURAL"};
488   const char        *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"};
489 
490   PetscFunctionBegin;
491   /* Create the factorization matrix */
492   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
493   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);CHKERRQ(ierr);
494   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
495   ierr = MatSeqAIJSetPreallocation(B,0,NULL);
496   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
497 
498   B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
499   B->ops->view             = MatView_SuperLU_DIST;
500   B->ops->destroy          = MatDestroy_SuperLU_DIST;
501 
502   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_aij_superlu_dist);CHKERRQ(ierr);
503 
504   B->factortype = MAT_FACTOR_LU;
505 
506   ierr     = PetscNewLog(B,Mat_SuperLU_DIST,&lu);CHKERRQ(ierr);
507   B->spptr = lu;
508 
509   /* Set the default input options:
510      options.Fact              = DOFACT;
511      options.Equil             = YES;
512      options.ParSymbFact       = NO;
513      options.ColPerm           = METIS_AT_PLUS_A;
514      options.RowPerm           = LargeDiag;
515      options.ReplaceTinyPivot  = YES;
516      options.IterRefine        = DOUBLE;
517      options.Trans             = NOTRANS;
518      options.SolveInitialized  = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
519      options.RefineInitialized = NO;
520      options.PrintStat         = YES;
521   */
522   set_default_options_dist(&options);
523 
524   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));CHKERRQ(ierr);
525   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
526   /* Default num of process columns and rows */
527   ierr = PetscMPIIntCast((PetscInt)(0.5 + PetscSqrtReal((PetscReal)size)),&lu->npcol);CHKERRQ(ierr);
528   if (!lu->npcol) lu->npcol = 1;
529   while (lu->npcol > 0) {
530     ierr = PetscMPIIntCast(size/lu->npcol,&lu->nprow);CHKERRQ(ierr);
531     if (size == lu->nprow * lu->npcol) break;
532     lu->npcol--;
533   }
534 
535   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr);
536   ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,&lu->nprow,NULL);CHKERRQ(ierr);
537   ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,&lu->npcol,NULL);CHKERRQ(ierr);
538   if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol);
539 
540   lu->MatInputMode = DISTRIBUTED;
541 
542   ierr = PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,NULL);CHKERRQ(ierr);
543   if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;
544 
545   ierr = PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr);
546   if (!flg) options.Equil = NO;
547 
548   ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);CHKERRQ(ierr);
549   if (flg) {
550     switch (indx) {
551     case 0:
552       options.RowPerm = LargeDiag;
553       break;
554     case 1:
555       options.RowPerm = NOROWPERM;
556       break;
557     }
558   }
559 
560   ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);CHKERRQ(ierr);
561   if (flg) {
562     switch (indx) {
563     case 0:
564       options.ColPerm = NATURAL;
565       break;
566     case 1:
567       options.ColPerm = MMD_AT_PLUS_A;
568       break;
569     case 2:
570       options.ColPerm = MMD_ATA;
571       break;
572     case 3:
573       options.ColPerm = METIS_AT_PLUS_A;
574       break;
575     case 4:
576       options.ColPerm = PARMETIS;   /* only works for np>1 */
577       break;
578     default:
579       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
580     }
581   }
582 
583   ierr = PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr);
584   if (!flg) options.ReplaceTinyPivot = NO;
585 
586   options.ParSymbFact = NO;
587 
588   ierr = PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr);
589   if (flg) {
590 #if defined(PETSC_HAVE_PARMETIS)
591     options.ParSymbFact = YES;
592     options.ColPerm     = PARMETIS;   /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
593 #else
594     printf("parsymbfact needs PARMETIS");
595 #endif
596   }
597 
598   lu->FactPattern = SamePattern_SameRowPerm;
599 
600   ierr = PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[1],&indx,&flg);CHKERRQ(ierr);
601   if (flg) {
602     switch (indx) {
603     case 0:
604       lu->FactPattern = SamePattern;
605       break;
606     case 1:
607       lu->FactPattern = SamePattern_SameRowPerm;
608       break;
609     }
610   }
611 
612   options.IterRefine = NOREFINE;
613   ierr               = PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr);
614   if (flg) options.IterRefine = SLU_DOUBLE;
615 
616   if (PetscLogPrintInfo) options.PrintStat = YES;
617   else options.PrintStat = NO;
618   ierr = PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",
619                           (PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,0);CHKERRQ(ierr);
620   PetscOptionsEnd();
621 
622   lu->options              = options;
623   lu->options.Fact         = DOFACT;
624   lu->matsolve_iscalled    = PETSC_FALSE;
625   lu->matmatsolve_iscalled = PETSC_FALSE;
626 
627   *F = B;
628   PetscFunctionReturn(0);
629 }
630 
631 #undef __FUNCT__
632 #define __FUNCT__ "MatGetFactor_seqaij_superlu_dist"
633 PETSC_EXTERN PetscErrorCode MatGetFactor_seqaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
634 {
635   PetscErrorCode ierr;
636 
637   PetscFunctionBegin;
638   ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr);
639   PetscFunctionReturn(0);
640 }
641 
642 #undef __FUNCT__
643 #define __FUNCT__ "MatGetFactor_mpiaij_superlu_dist"
644 PETSC_EXTERN PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
645 {
646   PetscErrorCode ierr;
647 
648   PetscFunctionBegin;
649   ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr);
650   PetscFunctionReturn(0);
651 }
652 
653 #undef __FUNCT__
654 #define __FUNCT__ "MatFactorInfo_SuperLU_DIST"
655 PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer)
656 {
657   Mat_SuperLU_DIST  *lu=(Mat_SuperLU_DIST*)A->spptr;
658   superlu_options_t options;
659   PetscErrorCode    ierr;
660 
661   PetscFunctionBegin;
662   /* check if matrix is superlu_dist type */
663   if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(0);
664 
665   options = lu->options;
666   ierr    = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr);
667   ierr    = PetscViewerASCIIPrintf(viewer,"  Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);CHKERRQ(ierr);
668   ierr    = PetscViewerASCIIPrintf(viewer,"  Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);CHKERRQ(ierr);
669   ierr    = PetscViewerASCIIPrintf(viewer,"  Matrix input mode %d \n",lu->MatInputMode);CHKERRQ(ierr);
670   ierr    = PetscViewerASCIIPrintf(viewer,"  Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);CHKERRQ(ierr);
671   ierr    = PetscViewerASCIIPrintf(viewer,"  Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);CHKERRQ(ierr);
672   ierr    = PetscViewerASCIIPrintf(viewer,"  Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr);
673   ierr    = PetscViewerASCIIPrintf(viewer,"  Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL" : "LargeDiag");CHKERRQ(ierr);
674 
675   switch (options.ColPerm) {
676   case NATURAL:
677     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation NATURAL\n");CHKERRQ(ierr);
678     break;
679   case MMD_AT_PLUS_A:
680     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_AT_PLUS_A\n");CHKERRQ(ierr);
681     break;
682   case MMD_ATA:
683     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_ATA\n");CHKERRQ(ierr);
684     break;
685   case METIS_AT_PLUS_A:
686     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation METIS_AT_PLUS_A\n");CHKERRQ(ierr);
687     break;
688   case PARMETIS:
689     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation PARMETIS\n");CHKERRQ(ierr);
690     break;
691   default:
692     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
693   }
694 
695   ierr = PetscViewerASCIIPrintf(viewer,"  Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);CHKERRQ(ierr);
696 
697   if (lu->FactPattern == SamePattern) {
698     ierr = PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern\n");CHKERRQ(ierr);
699   } else {
700     ierr = PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern_SameRowPerm\n");CHKERRQ(ierr);
701   }
702   PetscFunctionReturn(0);
703 }
704 
705 #undef __FUNCT__
706 #define __FUNCT__ "MatView_SuperLU_DIST"
707 PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
708 {
709   PetscErrorCode    ierr;
710   PetscBool         iascii;
711   PetscViewerFormat format;
712 
713   PetscFunctionBegin;
714   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
715   if (iascii) {
716     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
717     if (format == PETSC_VIEWER_ASCII_INFO) {
718       ierr = MatFactorInfo_SuperLU_DIST(A,viewer);CHKERRQ(ierr);
719     }
720   }
721   PetscFunctionReturn(0);
722 }
723 
724 
725 /*MC
726   MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization
727 
728    Works with AIJ matrices
729 
730   Options Database Keys:
731 + -mat_superlu_dist_r <n> - number of rows in processor partition
732 . -mat_superlu_dist_c <n> - number of columns in processor partition
733 . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed
734 . -mat_superlu_dist_equil - equilibrate the matrix
735 . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation
736 . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation
737 . -mat_superlu_dist_replacetinypivot - replace tiny pivots
738 . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm
739 . -mat_superlu_dist_iterrefine - use iterative refinement
740 - -mat_superlu_dist_statprint - print factorization information
741 
742    Level: beginner
743 
744 .seealso: PCLU
745 
746 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
747 
748 M*/
749 
750