xref: /petsc/src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c (revision 3bf428d2faeb6f47675eb8c578bce36bde932d2e)
1 
2 /*
3         Provides an interface to the SuperLU_DIST_2.2 sparse solver
4 */
5 
6 #include <../src/mat/impls/aij/seq/aij.h>
7 #include <../src/mat/impls/aij/mpi/mpiaij.h>
8 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */
9 #include <stdlib.h>
10 #endif
11 
12 #if defined(PETSC_USE_64BIT_INDICES)
13 /* ugly SuperLU_Dist variable telling it to use long long int */
14 #define _LONGINT
15 #endif
16 
17 EXTERN_C_BEGIN
18 #if defined(PETSC_USE_COMPLEX)
19 #include <superlu_zdefs.h>
20 #else
21 #include <superlu_ddefs.h>
22 #endif
23 EXTERN_C_END
24 
25 /*
26     GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized
27     DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator.
28 */
29 typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode;
30 const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0};
31 
32 typedef struct {
33   int_t                  nprow,npcol,*row,*col;
34   gridinfo_t             grid;
35   superlu_dist_options_t options;
36   SuperMatrix            A_sup;
37   ScalePermstruct_t      ScalePermstruct;
38   LUstruct_t             LUstruct;
39   int                    StatPrint;
40   SuperLU_MatInputMode   MatInputMode;
41   SOLVEstruct_t          SOLVEstruct;
42   fact_t                 FactPattern;
43   MPI_Comm               comm_superlu;
44 #if defined(PETSC_USE_COMPLEX)
45   doublecomplex          *val;
46 #else
47   double                 *val;
48 #endif
49   PetscBool              matsolve_iscalled,matmatsolve_iscalled;
50   PetscBool              CleanUpSuperLU_Dist;  /* Flag to clean up (non-global) SuperLU objects during Destroy */
51 } Mat_SuperLU_DIST;
52 
53 extern PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat,PetscViewer);
54 extern PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat,Mat,const MatFactorInfo*);
55 extern PetscErrorCode MatDestroy_SuperLU_DIST(Mat);
56 extern PetscErrorCode MatView_SuperLU_DIST(Mat,PetscViewer);
57 extern PetscErrorCode MatSolve_SuperLU_DIST(Mat,Vec,Vec);
58 extern PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat,Mat,IS,IS,const MatFactorInfo*);
59 extern PetscErrorCode MatDestroy_MPIAIJ(Mat);
60 
61 #undef __FUNCT__
62 #define __FUNCT__ "MatGetDiagonal_SuperLU_DIST"
63 PetscErrorCode MatGetDiagonal_SuperLU_DIST(Mat A,Vec v)
64 {
65   PetscFunctionBegin;
66   SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type: SuperLU_DIST factor");
67   PetscFunctionReturn(0);
68 }
69 
70 #undef __FUNCT__
71 #define __FUNCT__ "MatDestroy_SuperLU_DIST"
72 PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
73 {
74   PetscErrorCode   ierr;
75   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
76   PetscBool        flg;
77 
78   PetscFunctionBegin;
79   if (lu && lu->CleanUpSuperLU_Dist) {
80     /* Deallocate SuperLU_DIST storage */
81     if (lu->MatInputMode == GLOBAL) {
82       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
83     } else {
84       PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
85       if (lu->options.SolveInitialized) {
86 #if defined(PETSC_USE_COMPLEX)
87         PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
88 #else
89         PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
90 #endif
91       }
92     }
93     PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct));
94     PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct));
95     PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct));
96 
97     /* Release the SuperLU_DIST process grid. */
98     PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid));
99     ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr);
100   }
101   ierr = PetscFree(A->spptr);CHKERRQ(ierr);
102 
103   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
104   if (flg) {
105     ierr = MatDestroy_SeqAIJ(A);CHKERRQ(ierr);
106   } else {
107     ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr);
108   }
109   PetscFunctionReturn(0);
110 }
111 
112 #undef __FUNCT__
113 #define __FUNCT__ "MatSolve_SuperLU_DIST"
114 PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
115 {
116   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
117   PetscErrorCode   ierr;
118   PetscMPIInt      size;
119   PetscInt         m=A->rmap->n,M=A->rmap->N,N=A->cmap->N;
120   SuperLUStat_t    stat;
121   double           berr[1];
122   PetscScalar      *bptr;
123   PetscInt         nrhs=1;
124   Vec              x_seq;
125   IS               iden;
126   VecScatter       scat;
127   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
128   static PetscBool cite = PETSC_FALSE;
129 
130   PetscFunctionBegin;
131   ierr = PetscCitationsRegister("@article{lidemmel03,\n  author = {Xiaoye S. Li and James W. Demmel},\n  title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n           Solver for Unsymmetric Linear Systems},\n  journal = {ACM Trans. Mathematical Software},\n  volume = {29},\n  number = {2},\n  pages = {110-140},\n  year = 2003\n}\n",&cite);CHKERRQ(ierr);
132   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
133   if (size > 1 && lu->MatInputMode == GLOBAL) {
134     /* global mat input, convert b to x_seq */
135     ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr);
136     ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr);
137     ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr);
138     ierr = ISDestroy(&iden);CHKERRQ(ierr);
139 
140     ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
141     ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
142     ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr);
143   } else { /* size==1 || distributed mat input */
144     if (lu->options.SolveInitialized && !lu->matsolve_iscalled) {
145       /* see comments in MatMatSolve() */
146 #if defined(PETSC_USE_COMPLEX)
147       PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
148 #else
149       PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
150 #endif
151       lu->options.SolveInitialized = NO;
152     }
153     ierr = VecCopy(b_mpi,x);CHKERRQ(ierr);
154     ierr = VecGetArray(x,&bptr);CHKERRQ(ierr);
155   }
156 
157   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
158 
159   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
160   if (lu->MatInputMode == GLOBAL) {
161 #if defined(PETSC_USE_COMPLEX)
162     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
163 #else
164     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
165 #endif
166   } else { /* distributed mat input */
167 #if defined(PETSC_USE_COMPLEX)
168     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
169 #else
170     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
171 #endif
172   }
173   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
174 
175   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid);      /* Print the statistics. */
176   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
177 
178   if (size > 1 && lu->MatInputMode == GLOBAL) {
179     /* convert seq x to mpi x */
180     ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr);
181     ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
182     ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
183     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
184     ierr = VecDestroy(&x_seq);CHKERRQ(ierr);
185   } else {
186     ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr);
187 
188     lu->matsolve_iscalled    = PETSC_TRUE;
189     lu->matmatsolve_iscalled = PETSC_FALSE;
190   }
191   PetscFunctionReturn(0);
192 }
193 
194 #undef __FUNCT__
195 #define __FUNCT__ "MatMatSolve_SuperLU_DIST"
196 PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X)
197 {
198   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
199   PetscErrorCode   ierr;
200   PetscMPIInt      size;
201   PetscInt         M=A->rmap->N,m=A->rmap->n,nrhs;
202   SuperLUStat_t    stat;
203   double           berr[1];
204   PetscScalar      *bptr;
205   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
206   PetscBool        flg;
207 
208   PetscFunctionBegin;
209   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
210   ierr = PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
211   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
212   ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
213   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
214 
215   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
216   if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size);
217   /* size==1 or distributed mat input */
218   if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) {
219     /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
220        thus destroy it and create a new SOLVEstruct.
221        Otherwise it may result in memory corruption or incorrect solution
222        See src/mat/examples/tests/ex125.c */
223 #if defined(PETSC_USE_COMPLEX)
224     PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
225 #else
226     PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
227 #endif
228     lu->options.SolveInitialized = NO;
229   }
230   ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
231 
232   ierr = MatGetSize(B_mpi,NULL,&nrhs);CHKERRQ(ierr);
233 
234   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
235   ierr = MatDenseGetArray(X,&bptr);CHKERRQ(ierr);
236   if (lu->MatInputMode == GLOBAL) { /* size == 1 */
237 #if defined(PETSC_USE_COMPLEX)
238     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info));
239 #else
240     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info));
241 #endif
242   } else { /* distributed mat input */
243 #if defined(PETSC_USE_COMPLEX)
244     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
245 #else
246     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
247 #endif
248   }
249   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
250   ierr = MatDenseRestoreArray(X,&bptr);CHKERRQ(ierr);
251 
252   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
253   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
254   lu->matsolve_iscalled    = PETSC_FALSE;
255   lu->matmatsolve_iscalled = PETSC_TRUE;
256   PetscFunctionReturn(0);
257 }
258 
259 
260 #undef __FUNCT__
261 #define __FUNCT__ "MatLUFactorNumeric_SuperLU_DIST"
262 PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info)
263 {
264   Mat              *tseq,A_seq = NULL;
265   Mat_SeqAIJ       *aa,*bb;
266   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->spptr;
267   PetscErrorCode   ierr;
268   PetscInt         M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
269                    m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj;
270   int              sinfo;   /* SuperLU_Dist info flag is always an int even with long long indices */
271   PetscMPIInt      size;
272   SuperLUStat_t    stat;
273   double           *berr=0;
274   IS               isrow;
275   Mat              F_diag=NULL;
276 #if defined(PETSC_USE_COMPLEX)
277   doublecomplex    *av, *bv;
278 #else
279   double           *av, *bv;
280 #endif
281 
282   PetscFunctionBegin;
283   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
284 
285   if (lu->MatInputMode == GLOBAL) { /* global mat input */
286     if (size > 1) { /* convert mpi A to seq mat A */
287       ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr);
288       ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr);
289       ierr = ISDestroy(&isrow);CHKERRQ(ierr);
290 
291       A_seq = *tseq;
292       ierr  = PetscFree(tseq);CHKERRQ(ierr);
293       aa    = (Mat_SeqAIJ*)A_seq->data;
294     } else {
295       PetscBool flg;
296       ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
297       if (flg) {
298         Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data;
299         A = At->A;
300       }
301       aa =  (Mat_SeqAIJ*)A->data;
302     }
303 
304     /* Convert Petsc NR matrix to SuperLU_DIST NC.
305        Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */
306     if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */
307       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
308       if (lu->FactPattern == SamePattern_SameRowPerm) {
309         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
310       } else { /* lu->FactPattern == SamePattern */
311         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct));
312         lu->options.Fact = SamePattern;
313       }
314     }
315 #if defined(PETSC_USE_COMPLEX)
316     PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val,&lu->col, &lu->row));
317 #else
318     PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val, &lu->col, &lu->row));
319 #endif
320 
321     /* Create compressed column matrix A_sup. */
322 #if defined(PETSC_USE_COMPLEX)
323     PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE));
324 #else
325     PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE));
326 #endif
327   } else { /* distributed mat input */
328     Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
329     aa=(Mat_SeqAIJ*)(mat->A)->data;
330     bb=(Mat_SeqAIJ*)(mat->B)->data;
331     ai=aa->i; aj=aa->j;
332     bi=bb->i; bj=bb->j;
333 #if defined(PETSC_USE_COMPLEX)
334     av=(doublecomplex*)aa->a;
335     bv=(doublecomplex*)bb->a;
336 #else
337     av=aa->a;
338     bv=bb->a;
339 #endif
340     rstart = A->rmap->rstart;
341     nz     = aa->nz + bb->nz;
342     garray = mat->garray;
343 
344     if (lu->options.Fact == DOFACT) { /* first numeric factorization */
345 #if defined(PETSC_USE_COMPLEX)
346       PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
347 #else
348       PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
349 #endif
350     } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
351       if (lu->FactPattern == SamePattern_SameRowPerm) {
352         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
353       } else if (lu->FactPattern == SamePattern) {
354         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate storage associated with the L and U matrices. */
355         lu->options.Fact = SamePattern;
356       } else {
357         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"options.Fact must be one of SamePattern SamePattern_SameRowPerm");
358       }
359     }
360     nz = 0;
361     for (i=0; i<m; i++) {
362       lu->row[i] = nz;
363       countA     = ai[i+1] - ai[i];
364       countB     = bi[i+1] - bi[i];
365       if (aj) {
366         ajj = aj + ai[i]; /* ptr to the beginning of this row */
367       } else {
368         ajj = NULL;
369       }
370       bjj = bj + bi[i];
371 
372       /* B part, smaller col index */
373       if (aj) {
374         colA_start = rstart + ajj[0]; /* the smallest global col index of A */
375       } else { /* superlu_dist does not require matrix has diagonal entries, thus aj=NULL would work */
376         colA_start = rstart;
377       }
378       jB         = 0;
379       for (j=0; j<countB; j++) {
380         jcol = garray[bjj[j]];
381         if (jcol > colA_start) {
382           jB = j;
383           break;
384         }
385         lu->col[nz]   = jcol;
386         lu->val[nz++] = *bv++;
387         if (j==countB-1) jB = countB;
388       }
389 
390       /* A part */
391       for (j=0; j<countA; j++) {
392         lu->col[nz]   = rstart + ajj[j];
393         lu->val[nz++] = *av++;
394       }
395 
396       /* B part, larger col index */
397       for (j=jB; j<countB; j++) {
398         lu->col[nz]   = garray[bjj[j]];
399         lu->val[nz++] = *bv++;
400       }
401     }
402     lu->row[m] = nz;
403 
404     if (lu->options.Fact == DOFACT) {
405 #if defined(PETSC_USE_COMPLEX)
406       PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE));
407 #else
408       PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE));
409 #endif
410     }
411   }
412 
413   /* Factor the matrix. */
414   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));   /* Initialize the statistics variables. */
415   if (lu->MatInputMode == GLOBAL) { /* global mat input */
416 #if defined(PETSC_USE_COMPLEX)
417     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
418 #else
419     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
420 #endif
421   } else { /* distributed mat input */
422 #if defined(PETSC_USE_COMPLEX)
423     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
424 #else
425     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
426 #endif
427   }
428 
429   if (sinfo > 0) {
430     if (A->erroriffailure) {
431       SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot in row %D",sinfo);
432     } else {
433       if (sinfo <= lu->A_sup.ncol) {
434         F->errortype = MAT_FACTOR_NUMERIC_ZEROPIVOT;
435         ierr = PetscInfo1(F,"U(i,i) is exactly zero, i= %D\n",sinfo);CHKERRQ(ierr);
436       } else if (sinfo > lu->A_sup.ncol) {
437         /*
438          number of bytes allocated when memory allocation
439          failure occurred, plus A->ncol.
440          */
441         F->errortype = MAT_FACTOR_OUTMEMORY;
442         ierr = PetscInfo1(F,"Number of bytes allocated when memory allocation fails %D\n",sinfo);CHKERRQ(ierr);
443       }
444     }
445   } else if (sinfo < 0) {
446     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB, "info = %D, argument in p*gssvx() had an illegal value", sinfo);
447   }
448 
449   if (lu->MatInputMode == GLOBAL && size > 1) {
450     ierr = MatDestroy(&A_seq);CHKERRQ(ierr);
451   }
452 
453   if (lu->options.PrintStat) {
454     PStatPrint(&lu->options, &stat, &lu->grid);  /* Print the statistics. */
455   }
456   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
457   if (size > 1) {
458     F_diag            = ((Mat_MPIAIJ*)(F)->data)->A;
459     F_diag->assembled = PETSC_TRUE;
460   }
461   (F)->assembled    = PETSC_TRUE;
462   (F)->preallocated = PETSC_TRUE;
463   lu->options.Fact  = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
464   PetscFunctionReturn(0);
465 }
466 
467 /* Note the Petsc r and c permutations are ignored */
468 #undef __FUNCT__
469 #define __FUNCT__ "MatLUFactorSymbolic_SuperLU_DIST"
470 PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
471 {
472   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->spptr;
473   PetscInt         M   = A->rmap->N,N=A->cmap->N;
474 
475   PetscFunctionBegin;
476   /* Initialize the SuperLU process grid. */
477   PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid));
478 
479   /* Initialize ScalePermstruct and LUstruct. */
480   PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct));
481   PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(N, &lu->LUstruct));
482   F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
483   F->ops->solve           = MatSolve_SuperLU_DIST;
484   F->ops->matsolve        = MatMatSolve_SuperLU_DIST;
485   lu->CleanUpSuperLU_Dist = PETSC_TRUE;
486   PetscFunctionReturn(0);
487 }
488 
489 #undef __FUNCT__
490 #define __FUNCT__ "MatFactorGetSolverPackage_aij_superlu_dist"
491 static PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type)
492 {
493   PetscFunctionBegin;
494   *type = MATSOLVERSUPERLU_DIST;
495   PetscFunctionReturn(0);
496 }
497 
498 #undef __FUNCT__
499 #define __FUNCT__ "MatGetFactor_aij_superlu_dist"
500 static PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
501 {
502   Mat                    B;
503   Mat_SuperLU_DIST       *lu;
504   PetscErrorCode         ierr;
505   PetscInt               M=A->rmap->N,N=A->cmap->N,indx;
506   PetscMPIInt            size;
507   superlu_dist_options_t options;
508   PetscBool              flg;
509   const char             *colperm[]     = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"};
510   const char             *rowperm[]     = {"LargeDiag","NATURAL"};
511   const char             *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"};
512   PetscBool              set;
513 
514   PetscFunctionBegin;
515   /* Create the factorization matrix */
516   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
517   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);CHKERRQ(ierr);
518   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
519   ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
520   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
521 
522   B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
523   B->ops->view             = MatView_SuperLU_DIST;
524   B->ops->destroy          = MatDestroy_SuperLU_DIST;
525   B->ops->getdiagonal      = MatGetDiagonal_SuperLU_DIST;
526 
527   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_aij_superlu_dist);CHKERRQ(ierr);
528 
529   B->factortype = MAT_FACTOR_LU;
530 
531   /* set solvertype */
532   ierr = PetscFree(B->solvertype);CHKERRQ(ierr);
533   ierr = PetscStrallocpy(MATSOLVERSUPERLU_DIST,&B->solvertype);CHKERRQ(ierr);
534 
535   ierr     = PetscNewLog(B,&lu);CHKERRQ(ierr);
536   B->spptr = lu;
537 
538   /* Set the default input options:
539      options.Fact              = DOFACT;
540      options.Equil             = YES;
541      options.ParSymbFact       = NO;
542      options.ColPerm           = METIS_AT_PLUS_A;
543      options.RowPerm           = LargeDiag;
544      options.ReplaceTinyPivot  = YES;
545      options.IterRefine        = DOUBLE;
546      options.Trans             = NOTRANS;
547      options.SolveInitialized  = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
548      options.RefineInitialized = NO;
549      options.PrintStat         = YES;
550   */
551   set_default_options_dist(&options);
552 
553   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));CHKERRQ(ierr);
554   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
555   /* Default num of process columns and rows */
556   lu->npcol = (int_t) (0.5 + PetscSqrtReal((PetscReal)size));
557   if (!lu->npcol) lu->npcol = 1;
558   while (lu->npcol > 0) {
559     lu->nprow = (int_t) (size/lu->npcol);
560     if (size == lu->nprow * lu->npcol) break;
561     lu->npcol--;
562   }
563 
564   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr);
565   ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,(PetscInt*)&lu->nprow,NULL);CHKERRQ(ierr);
566   ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,(PetscInt*)&lu->npcol,NULL);CHKERRQ(ierr);
567   if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol);
568 
569   lu->MatInputMode = DISTRIBUTED;
570 
571   ierr = PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,NULL);CHKERRQ(ierr);
572   if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;
573 
574   ierr = PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",options.Equil ? PETSC_TRUE : PETSC_FALSE,&flg,&set);CHKERRQ(ierr);
575   if (set && !flg) options.Equil = NO;
576 
577   ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);CHKERRQ(ierr);
578   if (flg) {
579     switch (indx) {
580     case 0:
581       options.RowPerm = LargeDiag;
582       break;
583     case 1:
584       options.RowPerm = NOROWPERM;
585       break;
586     }
587   }
588 
589   ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);CHKERRQ(ierr);
590   if (flg) {
591     switch (indx) {
592     case 0:
593       options.ColPerm = NATURAL;
594       break;
595     case 1:
596       options.ColPerm = MMD_AT_PLUS_A;
597       break;
598     case 2:
599       options.ColPerm = MMD_ATA;
600       break;
601     case 3:
602       options.ColPerm = METIS_AT_PLUS_A;
603       break;
604     case 4:
605       options.ColPerm = PARMETIS;   /* only works for np>1 */
606       break;
607     default:
608       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
609     }
610   }
611 
612   options.ReplaceTinyPivot = NO;
613   ierr = PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",options.ReplaceTinyPivot ? PETSC_TRUE : PETSC_FALSE,&flg,&set);CHKERRQ(ierr);
614   if (set && flg) options.ReplaceTinyPivot = YES;
615 
616   options.ParSymbFact = NO;
617   ierr = PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,&set);CHKERRQ(ierr);
618   if (set && flg && size>1) {
619     if (lu->MatInputMode == GLOBAL) {
620 #if defined(PETSC_USE_INFO)
621       ierr = PetscInfo(A,"Warning: '-mat_superlu_dist_parsymbfact' is ignored because MatInputMode=GLOBAL\n");CHKERRQ(ierr);
622 #endif
623     } else {
624 #if defined(PETSC_HAVE_PARMETIS)
625       options.ParSymbFact = YES;
626       options.ColPerm     = PARMETIS;   /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
627 #else
628       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"parsymbfact needs PARMETIS");
629 #endif
630     }
631   }
632 
633   lu->FactPattern = SamePattern_SameRowPerm;
634   ierr = PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[0],&indx,&flg);CHKERRQ(ierr);
635   if (flg) {
636     switch (indx) {
637     case 0:
638       lu->FactPattern = SamePattern;
639       break;
640     case 1:
641       lu->FactPattern = SamePattern_SameRowPerm;
642       break;
643     }
644   }
645 
646   options.IterRefine = NOREFINE;
647   ierr               = PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",options.IterRefine == NOREFINE ? PETSC_FALSE : PETSC_TRUE ,&flg,&set);CHKERRQ(ierr);
648   if (set) {
649     if (flg) options.IterRefine = SLU_DOUBLE;
650     else options.IterRefine = NOREFINE;
651   }
652 
653   if (PetscLogPrintInfo) options.PrintStat = YES;
654   else options.PrintStat = NO;
655   ierr = PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",(PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,NULL);CHKERRQ(ierr);
656   PetscOptionsEnd();
657 
658   lu->options              = options;
659   lu->options.Fact         = DOFACT;
660   lu->matsolve_iscalled    = PETSC_FALSE;
661   lu->matmatsolve_iscalled = PETSC_FALSE;
662 
663   *F = B;
664   PetscFunctionReturn(0);
665 }
666 
667 #undef __FUNCT__
668 #define __FUNCT__ "MatSolverPackageRegister_SuperLU_DIST"
669 PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_SuperLU_DIST(void)
670 {
671   PetscErrorCode ierr;
672   PetscFunctionBegin;
673   ierr = MatSolverPackageRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ,  MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);CHKERRQ(ierr);
674   ierr = MatSolverPackageRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ,  MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);CHKERRQ(ierr);
675   PetscFunctionReturn(0);
676 }
677 
678 #undef __FUNCT__
679 #define __FUNCT__ "MatFactorInfo_SuperLU_DIST"
680 PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer)
681 {
682   Mat_SuperLU_DIST       *lu=(Mat_SuperLU_DIST*)A->spptr;
683   superlu_dist_options_t options;
684   PetscErrorCode         ierr;
685 
686   PetscFunctionBegin;
687   /* check if matrix is superlu_dist type */
688   if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(0);
689 
690   options = lu->options;
691   ierr    = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr);
692   ierr    = PetscViewerASCIIPrintf(viewer,"  Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);CHKERRQ(ierr);
693   ierr    = PetscViewerASCIIPrintf(viewer,"  Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);CHKERRQ(ierr);
694   ierr    = PetscViewerASCIIPrintf(viewer,"  Matrix input mode %d \n",lu->MatInputMode);CHKERRQ(ierr);
695   ierr    = PetscViewerASCIIPrintf(viewer,"  Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);CHKERRQ(ierr);
696   ierr    = PetscViewerASCIIPrintf(viewer,"  Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);CHKERRQ(ierr);
697   ierr    = PetscViewerASCIIPrintf(viewer,"  Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr);
698   ierr    = PetscViewerASCIIPrintf(viewer,"  Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL" : "LargeDiag");CHKERRQ(ierr);
699 
700   switch (options.ColPerm) {
701   case NATURAL:
702     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation NATURAL\n");CHKERRQ(ierr);
703     break;
704   case MMD_AT_PLUS_A:
705     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_AT_PLUS_A\n");CHKERRQ(ierr);
706     break;
707   case MMD_ATA:
708     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_ATA\n");CHKERRQ(ierr);
709     break;
710   case METIS_AT_PLUS_A:
711     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation METIS_AT_PLUS_A\n");CHKERRQ(ierr);
712     break;
713   case PARMETIS:
714     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation PARMETIS\n");CHKERRQ(ierr);
715     break;
716   default:
717     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
718   }
719 
720   ierr = PetscViewerASCIIPrintf(viewer,"  Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);CHKERRQ(ierr);
721 
722   if (lu->FactPattern == SamePattern) {
723     ierr = PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern\n");CHKERRQ(ierr);
724   } else {
725     ierr = PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern_SameRowPerm\n");CHKERRQ(ierr);
726   }
727   PetscFunctionReturn(0);
728 }
729 
730 #undef __FUNCT__
731 #define __FUNCT__ "MatView_SuperLU_DIST"
732 PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
733 {
734   PetscErrorCode    ierr;
735   PetscBool         iascii;
736   PetscViewerFormat format;
737 
738   PetscFunctionBegin;
739   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
740   if (iascii) {
741     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
742     if (format == PETSC_VIEWER_ASCII_INFO) {
743       ierr = MatFactorInfo_SuperLU_DIST(A,viewer);CHKERRQ(ierr);
744     }
745   }
746   PetscFunctionReturn(0);
747 }
748 
749 
750 /*MC
751   MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization
752 
753   Use ./configure --download-superlu_dist --download-parmetis --download-metis --download-ptscotch  to have PETSc installed with SuperLU_DIST
754 
755   Use -pc_type lu -pc_factor_mat_solver_package superlu_dist to us this direct solver
756 
757    Works with AIJ matrices
758 
759   Options Database Keys:
760 + -mat_superlu_dist_r <n> - number of rows in processor partition
761 . -mat_superlu_dist_c <n> - number of columns in processor partition
762 . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed
763 . -mat_superlu_dist_equil - equilibrate the matrix
764 . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation
765 . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation
766 . -mat_superlu_dist_replacetinypivot - replace tiny pivots
767 . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm
768 . -mat_superlu_dist_iterrefine - use iterative refinement
769 - -mat_superlu_dist_statprint - print factorization information
770 
771    Level: beginner
772 
773 .seealso: PCLU
774 
775 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
776 
777 M*/
778 
779