xref: /petsc/src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c (revision e7f46db8d62cb2e4e59111bf21061e64ea60daab)
1 
2 /*
3         Provides an interface to the SuperLU_DIST sparse solver
4 */
5 
6 #include <../src/mat/impls/aij/seq/aij.h>
7 #include <../src/mat/impls/aij/mpi/mpiaij.h>
8 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */
9 #include <stdlib.h>
10 #endif
11 
12 EXTERN_C_BEGIN
13 #if defined(PETSC_USE_COMPLEX)
14 #include <superlu_zdefs.h>
15 #else
16 #include <superlu_ddefs.h>
17 #endif
18 EXTERN_C_END
19 
20 /*
21     GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized
22     DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator.
23 */
24 typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode;
25 const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0};
26 
27 typedef struct {
28   int_t                  nprow,npcol,*row,*col;
29   gridinfo_t             grid;
30   superlu_dist_options_t options;
31   SuperMatrix            A_sup;
32   ScalePermstruct_t      ScalePermstruct;
33   LUstruct_t             LUstruct;
34   int                    StatPrint;
35   SuperLU_MatInputMode   MatInputMode;
36   SOLVEstruct_t          SOLVEstruct;
37   fact_t                 FactPattern;
38   MPI_Comm               comm_superlu;
39 #if defined(PETSC_USE_COMPLEX)
40   doublecomplex          *val;
41 #else
42   double                 *val;
43 #endif
44   PetscBool              matsolve_iscalled,matmatsolve_iscalled;
45   PetscBool              CleanUpSuperLU_Dist;  /* Flag to clean up (non-global) SuperLU objects during Destroy */
46 } Mat_SuperLU_DIST;
47 
48 
49 PetscErrorCode MatSuperluDistGetDiagU_SuperLU_DIST(Mat F,PetscScalar *diagU)
50 {
51   Mat_SuperLU_DIST  *lu= (Mat_SuperLU_DIST*)F->data;
52 
53   PetscFunctionBegin;
54 #if defined(PETSC_USE_COMPLEX)
55   PetscStackCall("SuperLU_DIST:pzGetDiagU",pzGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,(doublecomplex*)diagU));
56 #else
57   PetscStackCall("SuperLU_DIST:pdGetDiagU",pdGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,diagU));
58 #endif
59   PetscFunctionReturn(0);
60 }
61 
62 PetscErrorCode MatSuperluDistGetDiagU(Mat F,PetscScalar *diagU)
63 {
64   PetscErrorCode    ierr;
65 
66   PetscFunctionBegin;
67   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
68   ierr = PetscTryMethod(F,"MatSuperluDistGetDiagU_C",(Mat,PetscScalar*),(F,diagU));CHKERRQ(ierr);
69   PetscFunctionReturn(0);
70 }
71 
72 static PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
73 {
74   PetscErrorCode   ierr;
75   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
76 
77   PetscFunctionBegin;
78   if (lu->CleanUpSuperLU_Dist) {
79     /* Deallocate SuperLU_DIST storage */
80     if (lu->MatInputMode == GLOBAL) {
81       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
82     } else {
83       PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
84       if (lu->options.SolveInitialized) {
85 #if defined(PETSC_USE_COMPLEX)
86         PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
87 #else
88         PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
89 #endif
90       }
91     }
92     PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct));
93     PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct));
94     PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct));
95 
96     /* Release the SuperLU_DIST process grid. */
97     PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid));
98     ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr);
99   }
100   ierr = PetscFree(A->data);CHKERRQ(ierr);
101   /* clear composed functions */
102   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);CHKERRQ(ierr);
103   ierr = PetscObjectComposeFunction((PetscObject)A,"MatSuperluDistGetDiagU_C",NULL);CHKERRQ(ierr);
104 
105   PetscFunctionReturn(0);
106 }
107 
108 static PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
109 {
110   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
111   PetscErrorCode   ierr;
112   PetscMPIInt      size;
113   PetscInt         m=A->rmap->n,M=A->rmap->N,N=A->cmap->N;
114   SuperLUStat_t    stat;
115   double           berr[1];
116   PetscScalar      *bptr;
117   PetscInt         nrhs=1;
118   Vec              x_seq;
119   IS               iden;
120   VecScatter       scat;
121   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
122   static PetscBool cite = PETSC_FALSE;
123 
124   PetscFunctionBegin;
125   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
126   ierr = PetscCitationsRegister("@article{lidemmel03,\n  author = {Xiaoye S. Li and James W. Demmel},\n  title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n           Solver for Unsymmetric Linear Systems},\n  journal = {ACM Trans. Mathematical Software},\n  volume = {29},\n  number = {2},\n  pages = {110-140},\n  year = 2003\n}\n",&cite);CHKERRQ(ierr);
127 
128   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
129   if (size > 1 && lu->MatInputMode == GLOBAL) {
130     /* global mat input, convert b to x_seq */
131     ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr);
132     ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr);
133     ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr);
134     ierr = ISDestroy(&iden);CHKERRQ(ierr);
135 
136     ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
137     ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
138     ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr);
139   } else { /* size==1 || distributed mat input */
140     if (lu->options.SolveInitialized && !lu->matsolve_iscalled) {
141       /* see comments in MatMatSolve() */
142 #if defined(PETSC_USE_COMPLEX)
143       PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
144 #else
145       PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
146 #endif
147       lu->options.SolveInitialized = NO;
148     }
149     ierr = VecCopy(b_mpi,x);CHKERRQ(ierr);
150     ierr = VecGetArray(x,&bptr);CHKERRQ(ierr);
151   }
152 
153   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
154   if (lu->MatInputMode == GLOBAL) {
155 #if defined(PETSC_USE_COMPLEX)
156     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
157 #else
158     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
159 #endif
160   } else { /* distributed mat input */
161 #if defined(PETSC_USE_COMPLEX)
162     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
163 #else
164     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
165 #endif
166   }
167   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
168 
169   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid);      /* Print the statistics. */
170   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
171 
172   if (size > 1 && lu->MatInputMode == GLOBAL) {
173     /* convert seq x to mpi x */
174     ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr);
175     ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
176     ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
177     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
178     ierr = VecDestroy(&x_seq);CHKERRQ(ierr);
179   } else {
180     ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr);
181 
182     lu->matsolve_iscalled    = PETSC_TRUE;
183     lu->matmatsolve_iscalled = PETSC_FALSE;
184   }
185   PetscFunctionReturn(0);
186 }
187 
188 static PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X)
189 {
190   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
191   PetscErrorCode   ierr;
192   PetscMPIInt      size;
193   PetscInt         M=A->rmap->N,m=A->rmap->n,nrhs;
194   SuperLUStat_t    stat;
195   double           berr[1];
196   PetscScalar      *bptr;
197   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
198   PetscBool        flg;
199 
200   PetscFunctionBegin;
201   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
202   ierr = PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
203   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
204   ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
205   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
206 
207   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
208   if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size);
209   /* size==1 or distributed mat input */
210   if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) {
211     /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
212        thus destroy it and create a new SOLVEstruct.
213        Otherwise it may result in memory corruption or incorrect solution
214        See src/mat/examples/tests/ex125.c */
215 #if defined(PETSC_USE_COMPLEX)
216     PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
217 #else
218     PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
219 #endif
220     lu->options.SolveInitialized = NO;
221   }
222   ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
223 
224   ierr = MatGetSize(B_mpi,NULL,&nrhs);CHKERRQ(ierr);
225 
226   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
227   ierr = MatDenseGetArray(X,&bptr);CHKERRQ(ierr);
228   if (lu->MatInputMode == GLOBAL) { /* size == 1 */
229 #if defined(PETSC_USE_COMPLEX)
230     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info));
231 #else
232     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info));
233 #endif
234   } else { /* distributed mat input */
235 #if defined(PETSC_USE_COMPLEX)
236     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
237 #else
238     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
239 #endif
240   }
241   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
242   ierr = MatDenseRestoreArray(X,&bptr);CHKERRQ(ierr);
243 
244   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
245   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
246   lu->matsolve_iscalled    = PETSC_FALSE;
247   lu->matmatsolve_iscalled = PETSC_TRUE;
248   PetscFunctionReturn(0);
249 }
250 
251 
252 static PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info)
253 {
254   Mat              *tseq,A_seq = NULL;
255   Mat_SeqAIJ       *aa,*bb;
256   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->data;
257   PetscErrorCode   ierr;
258   PetscInt         M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
259                    m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj;
260   int              sinfo;   /* SuperLU_Dist info flag is always an int even with long long indices */
261   PetscMPIInt      size;
262   SuperLUStat_t    stat;
263   double           *berr=0;
264   IS               isrow;
265 #if defined(PETSC_USE_COMPLEX)
266   doublecomplex    *av, *bv;
267 #else
268   double           *av, *bv;
269 #endif
270 
271   PetscFunctionBegin;
272   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
273 
274   if (lu->MatInputMode == GLOBAL) { /* global mat input */
275     if (size > 1) { /* convert mpi A to seq mat A */
276       ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr);
277       ierr = MatCreateSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr);
278       ierr = ISDestroy(&isrow);CHKERRQ(ierr);
279 
280       A_seq = *tseq;
281       ierr  = PetscFree(tseq);CHKERRQ(ierr);
282       aa    = (Mat_SeqAIJ*)A_seq->data;
283     } else {
284       PetscBool flg;
285       ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
286       if (flg) {
287         Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data;
288         A = At->A;
289       }
290       aa =  (Mat_SeqAIJ*)A->data;
291     }
292 
293     /* Convert Petsc NR matrix to SuperLU_DIST NC.
294        Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */
295     if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */
296       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
297       if (lu->FactPattern == SamePattern_SameRowPerm) {
298         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
299       } else { /* lu->FactPattern == SamePattern */
300         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct));
301         lu->options.Fact = SamePattern;
302       }
303     }
304 #if defined(PETSC_USE_COMPLEX)
305     PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val,&lu->col, &lu->row));
306 #else
307     PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val, &lu->col, &lu->row));
308 #endif
309 
310     /* Create compressed column matrix A_sup. */
311 #if defined(PETSC_USE_COMPLEX)
312     PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE));
313 #else
314     PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE));
315 #endif
316   } else { /* distributed mat input */
317     Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
318     aa=(Mat_SeqAIJ*)(mat->A)->data;
319     bb=(Mat_SeqAIJ*)(mat->B)->data;
320     ai=aa->i; aj=aa->j;
321     bi=bb->i; bj=bb->j;
322 #if defined(PETSC_USE_COMPLEX)
323     av=(doublecomplex*)aa->a;
324     bv=(doublecomplex*)bb->a;
325 #else
326     av=aa->a;
327     bv=bb->a;
328 #endif
329     rstart = A->rmap->rstart;
330     nz     = aa->nz + bb->nz;
331     garray = mat->garray;
332 
333     if (lu->options.Fact == DOFACT) { /* first numeric factorization */
334 #if defined(PETSC_USE_COMPLEX)
335       PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
336 #else
337       PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
338 #endif
339     } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
340       if (lu->FactPattern == SamePattern_SameRowPerm) {
341         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
342       } else if (lu->FactPattern == SamePattern) {
343         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate L and U matrices. */
344         lu->options.Fact = SamePattern;
345       } else if (lu->FactPattern == DOFACT) {
346         PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
347         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct));
348         lu->options.Fact = DOFACT;
349 
350 #if defined(PETSC_USE_COMPLEX)
351       PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
352 #else
353       PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
354 #endif
355       } else {
356         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"options.Fact must be one of SamePattern SamePattern_SameRowPerm DOFACT");
357       }
358     }
359     nz = 0;
360     for (i=0; i<m; i++) {
361       lu->row[i] = nz;
362       countA     = ai[i+1] - ai[i];
363       countB     = bi[i+1] - bi[i];
364       if (aj) {
365         ajj = aj + ai[i]; /* ptr to the beginning of this row */
366       } else {
367         ajj = NULL;
368       }
369       bjj = bj + bi[i];
370 
371       /* B part, smaller col index */
372       if (aj) {
373         colA_start = rstart + ajj[0]; /* the smallest global col index of A */
374       } else { /* superlu_dist does not require matrix has diagonal entries, thus aj=NULL would work */
375         colA_start = rstart;
376       }
377       jB         = 0;
378       for (j=0; j<countB; j++) {
379         jcol = garray[bjj[j]];
380         if (jcol > colA_start) {
381           jB = j;
382           break;
383         }
384         lu->col[nz]   = jcol;
385         lu->val[nz++] = *bv++;
386         if (j==countB-1) jB = countB;
387       }
388 
389       /* A part */
390       for (j=0; j<countA; j++) {
391         lu->col[nz]   = rstart + ajj[j];
392         lu->val[nz++] = *av++;
393       }
394 
395       /* B part, larger col index */
396       for (j=jB; j<countB; j++) {
397         lu->col[nz]   = garray[bjj[j]];
398         lu->val[nz++] = *bv++;
399       }
400     }
401     lu->row[m] = nz;
402 
403     if (lu->options.Fact == DOFACT) {
404 #if defined(PETSC_USE_COMPLEX)
405       PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE));
406 #else
407       PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE));
408 #endif
409     }
410   }
411 
412   /* Factor the matrix. */
413   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));   /* Initialize the statistics variables. */
414   if (lu->MatInputMode == GLOBAL) { /* global mat input */
415 #if defined(PETSC_USE_COMPLEX)
416     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
417 #else
418     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
419 #endif
420   } else { /* distributed mat input */
421 #if defined(PETSC_USE_COMPLEX)
422     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
423 #else
424     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
425 #endif
426   }
427 
428   if (sinfo > 0) {
429     if (A->erroriffailure) {
430       SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot in row %D",sinfo);
431     } else {
432       if (sinfo <= lu->A_sup.ncol) {
433         F->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT;
434         ierr = PetscInfo1(F,"U(i,i) is exactly zero, i= %D\n",sinfo);CHKERRQ(ierr);
435       } else if (sinfo > lu->A_sup.ncol) {
436         /*
437          number of bytes allocated when memory allocation
438          failure occurred, plus A->ncol.
439          */
440         F->factorerrortype = MAT_FACTOR_OUTMEMORY;
441         ierr = PetscInfo1(F,"Number of bytes allocated when memory allocation fails %D\n",sinfo);CHKERRQ(ierr);
442       }
443     }
444   } else if (sinfo < 0) {
445     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB, "info = %D, argument in p*gssvx() had an illegal value", sinfo);
446   }
447 
448   if (lu->MatInputMode == GLOBAL && size > 1) {
449     ierr = MatDestroy(&A_seq);CHKERRQ(ierr);
450   }
451 
452   if (lu->options.PrintStat) {
453     PStatPrint(&lu->options, &stat, &lu->grid);  /* Print the statistics. */
454   }
455   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
456   (F)->assembled    = PETSC_TRUE;
457   (F)->preallocated = PETSC_TRUE;
458   lu->options.Fact  = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
459   PetscFunctionReturn(0);
460 }
461 
462 /* Note the Petsc r and c permutations are ignored */
463 static PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
464 {
465   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
466   PetscInt         M   = A->rmap->N,N=A->cmap->N;
467 
468   PetscFunctionBegin;
469   /* Initialize the SuperLU process grid. */
470   PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid));
471 
472   /* Initialize ScalePermstruct and LUstruct. */
473   PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct));
474   PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(N, &lu->LUstruct));
475   F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
476   F->ops->solve           = MatSolve_SuperLU_DIST;
477   F->ops->matsolve        = MatMatSolve_SuperLU_DIST;
478   lu->CleanUpSuperLU_Dist = PETSC_TRUE;
479   PetscFunctionReturn(0);
480 }
481 
482 static PetscErrorCode MatFactorGetSolverType_aij_superlu_dist(Mat A,MatSolverType *type)
483 {
484   PetscFunctionBegin;
485   *type = MATSOLVERSUPERLU_DIST;
486   PetscFunctionReturn(0);
487 }
488 
489 static PetscErrorCode MatView_Info_SuperLU_DIST(Mat A,PetscViewer viewer)
490 {
491   Mat_SuperLU_DIST       *lu=(Mat_SuperLU_DIST*)A->data;
492   superlu_dist_options_t options;
493   PetscErrorCode         ierr;
494 
495   PetscFunctionBegin;
496   /* check if matrix is superlu_dist type */
497   if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(0);
498 
499   options = lu->options;
500   ierr    = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr);
501   ierr    = PetscViewerASCIIPrintf(viewer,"  Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);CHKERRQ(ierr);
502   ierr    = PetscViewerASCIIPrintf(viewer,"  Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);CHKERRQ(ierr);
503   ierr    = PetscViewerASCIIPrintf(viewer,"  Matrix input mode %d \n",lu->MatInputMode);CHKERRQ(ierr);
504   ierr    = PetscViewerASCIIPrintf(viewer,"  Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);CHKERRQ(ierr);
505   ierr    = PetscViewerASCIIPrintf(viewer,"  Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);CHKERRQ(ierr);
506   ierr    = PetscViewerASCIIPrintf(viewer,"  Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr);
507   ierr    = PetscViewerASCIIPrintf(viewer,"  Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL" : "LargeDiag");CHKERRQ(ierr);
508 
509   switch (options.ColPerm) {
510   case NATURAL:
511     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation NATURAL\n");CHKERRQ(ierr);
512     break;
513   case MMD_AT_PLUS_A:
514     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_AT_PLUS_A\n");CHKERRQ(ierr);
515     break;
516   case MMD_ATA:
517     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_ATA\n");CHKERRQ(ierr);
518     break;
519   case METIS_AT_PLUS_A:
520     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation METIS_AT_PLUS_A\n");CHKERRQ(ierr);
521     break;
522   case PARMETIS:
523     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation PARMETIS\n");CHKERRQ(ierr);
524     break;
525   default:
526     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
527   }
528 
529   ierr = PetscViewerASCIIPrintf(viewer,"  Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);CHKERRQ(ierr);
530 
531   if (lu->FactPattern == SamePattern) {
532     ierr = PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern\n");CHKERRQ(ierr);
533   } else if (lu->FactPattern == SamePattern_SameRowPerm) {
534     ierr = PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern_SameRowPerm\n");CHKERRQ(ierr);
535   } else if (lu->FactPattern == DOFACT) {
536     ierr = PetscViewerASCIIPrintf(viewer,"  Repeated factorization DOFACT\n");CHKERRQ(ierr);
537   } else {
538     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown factorization pattern");
539   }
540   PetscFunctionReturn(0);
541 }
542 
543 static PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
544 {
545   PetscErrorCode    ierr;
546   PetscBool         iascii;
547   PetscViewerFormat format;
548 
549   PetscFunctionBegin;
550   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
551   if (iascii) {
552     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
553     if (format == PETSC_VIEWER_ASCII_INFO) {
554       ierr = MatView_Info_SuperLU_DIST(A,viewer);CHKERRQ(ierr);
555     }
556   }
557   PetscFunctionReturn(0);
558 }
559 
560 static PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
561 {
562   Mat                    B;
563   Mat_SuperLU_DIST       *lu;
564   PetscErrorCode         ierr;
565   PetscInt               M=A->rmap->N,N=A->cmap->N,indx;
566   PetscMPIInt            size;
567   superlu_dist_options_t options;
568   PetscBool              flg;
569   const char             *colperm[]     = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"};
570   const char             *rowperm[]     = {"LargeDiag","NATURAL"};
571   const char             *factPattern[] = {"SamePattern","SamePattern_SameRowPerm","DOFACT"};
572   PetscBool              set;
573 
574   PetscFunctionBegin;
575   /* Create the factorization matrix */
576   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
577   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);CHKERRQ(ierr);
578   ierr = PetscStrallocpy("superlu_dist",&((PetscObject)B)->type_name);CHKERRQ(ierr);
579   ierr = MatSetUp(B);CHKERRQ(ierr);
580   B->ops->getinfo          = MatGetInfo_External;
581   B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
582   B->ops->view             = MatView_SuperLU_DIST;
583   B->ops->destroy          = MatDestroy_SuperLU_DIST;
584 
585   B->factortype = MAT_FACTOR_LU;
586 
587   /* set solvertype */
588   ierr = PetscFree(B->solvertype);CHKERRQ(ierr);
589   ierr = PetscStrallocpy(MATSOLVERSUPERLU_DIST,&B->solvertype);CHKERRQ(ierr);
590 
591   ierr    = PetscNewLog(B,&lu);CHKERRQ(ierr);
592   B->data = lu;
593 
594   /* Set the default input options:
595      options.Fact              = DOFACT;
596      options.Equil             = YES;
597      options.ParSymbFact       = NO;
598      options.ColPerm           = METIS_AT_PLUS_A;
599      options.RowPerm           = LargeDiag;
600      options.ReplaceTinyPivot  = YES;
601      options.IterRefine        = DOUBLE;
602      options.Trans             = NOTRANS;
603      options.SolveInitialized  = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
604      options.RefineInitialized = NO;
605      options.PrintStat         = YES;
606   */
607   set_default_options_dist(&options);
608 
609   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));CHKERRQ(ierr);
610   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
611   /* Default num of process columns and rows */
612   lu->npcol = (int_t) (0.5 + PetscSqrtReal((PetscReal)size));
613   if (!lu->npcol) lu->npcol = 1;
614   while (lu->npcol > 0) {
615     lu->nprow = (int_t) (size/lu->npcol);
616     if (size == lu->nprow * lu->npcol) break;
617     lu->npcol--;
618   }
619 
620   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr);
621   ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,(PetscInt*)&lu->nprow,NULL);CHKERRQ(ierr);
622   ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,(PetscInt*)&lu->npcol,NULL);CHKERRQ(ierr);
623   if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol);
624 
625   lu->MatInputMode = DISTRIBUTED;
626 
627   ierr = PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,NULL);CHKERRQ(ierr);
628   if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;
629 
630   ierr = PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",options.Equil ? PETSC_TRUE : PETSC_FALSE,&flg,&set);CHKERRQ(ierr);
631   if (set && !flg) options.Equil = NO;
632 
633   ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);CHKERRQ(ierr);
634   if (flg) {
635     switch (indx) {
636     case 0:
637       options.RowPerm = LargeDiag;
638       break;
639     case 1:
640       options.RowPerm = NOROWPERM;
641       break;
642     }
643   }
644 
645   ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);CHKERRQ(ierr);
646   if (flg) {
647     switch (indx) {
648     case 0:
649       options.ColPerm = NATURAL;
650       break;
651     case 1:
652       options.ColPerm = MMD_AT_PLUS_A;
653       break;
654     case 2:
655       options.ColPerm = MMD_ATA;
656       break;
657     case 3:
658       options.ColPerm = METIS_AT_PLUS_A;
659       break;
660     case 4:
661       options.ColPerm = PARMETIS;   /* only works for np>1 */
662       break;
663     default:
664       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
665     }
666   }
667 
668   options.ReplaceTinyPivot = NO;
669   ierr = PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",options.ReplaceTinyPivot ? PETSC_TRUE : PETSC_FALSE,&flg,&set);CHKERRQ(ierr);
670   if (set && flg) options.ReplaceTinyPivot = YES;
671 
672   options.ParSymbFact = NO;
673   ierr = PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,&set);CHKERRQ(ierr);
674   if (set && flg && size>1) {
675     if (lu->MatInputMode == GLOBAL) {
676 #if defined(PETSC_USE_INFO)
677       ierr = PetscInfo(A,"Warning: '-mat_superlu_dist_parsymbfact' is ignored because MatInputMode=GLOBAL\n");CHKERRQ(ierr);
678 #endif
679     } else {
680 #if defined(PETSC_HAVE_PARMETIS)
681       options.ParSymbFact = YES;
682       options.ColPerm     = PARMETIS;   /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
683 #else
684       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"parsymbfact needs PARMETIS");
685 #endif
686     }
687   }
688 
689   lu->FactPattern = SamePattern;
690   ierr = PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,3,factPattern[0],&indx,&flg);CHKERRQ(ierr);
691   if (flg) {
692     switch (indx) {
693     case 0:
694       lu->FactPattern = SamePattern;
695       break;
696     case 1:
697       lu->FactPattern = SamePattern_SameRowPerm;
698       break;
699     case 2:
700       lu->FactPattern = DOFACT;
701       break;
702     }
703   }
704 
705   options.IterRefine = NOREFINE;
706   ierr               = PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",options.IterRefine == NOREFINE ? PETSC_FALSE : PETSC_TRUE ,&flg,&set);CHKERRQ(ierr);
707   if (set) {
708     if (flg) options.IterRefine = SLU_DOUBLE;
709     else options.IterRefine = NOREFINE;
710   }
711 
712   if (PetscLogPrintInfo) options.PrintStat = YES;
713   else options.PrintStat = NO;
714   ierr = PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",(PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,NULL);CHKERRQ(ierr);
715   ierr = PetscOptionsEnd();CHKERRQ(ierr);
716 
717   lu->options              = options;
718   lu->options.Fact         = DOFACT;
719   lu->matsolve_iscalled    = PETSC_FALSE;
720   lu->matmatsolve_iscalled = PETSC_FALSE;
721 
722   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_aij_superlu_dist);CHKERRQ(ierr);
723   ierr = PetscObjectComposeFunction((PetscObject)B,"MatSuperluDistGetDiagU_C",MatSuperluDistGetDiagU_SuperLU_DIST);CHKERRQ(ierr);
724 
725   *F = B;
726   PetscFunctionReturn(0);
727 }
728 
729 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_SuperLU_DIST(void)
730 {
731   PetscErrorCode ierr;
732   PetscFunctionBegin;
733   ierr = MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ,  MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);CHKERRQ(ierr);
734   ierr = MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ,  MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);CHKERRQ(ierr);
735   PetscFunctionReturn(0);
736 }
737 
738 /*MC
739   MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization
740 
741   Use ./configure --download-superlu_dist --download-parmetis --download-metis --download-ptscotch  to have PETSc installed with SuperLU_DIST
742 
743   Use -pc_type lu -pc_factor_mat_solver_type superlu_dist to use this direct solver
744 
745    Works with AIJ matrices
746 
747   Options Database Keys:
748 + -mat_superlu_dist_r <n> - number of rows in processor partition
749 . -mat_superlu_dist_c <n> - number of columns in processor partition
750 . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed
751 . -mat_superlu_dist_equil - equilibrate the matrix
752 . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation
753 . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation
754 . -mat_superlu_dist_replacetinypivot - replace tiny pivots
755 . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm DOFACT
756 . -mat_superlu_dist_iterrefine - use iterative refinement
757 - -mat_superlu_dist_statprint - print factorization information
758 
759    Level: beginner
760 
761 .seealso: PCLU
762 
763 .seealso: PCFactorSetMatSolverType(), MatSolverType
764 
765 M*/
766