xref: /petsc/src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c (revision 3bb1ff401821b9e2ae019d3e61bc8ab4bd4e59d5)
1 
2 /*
3         Provides an interface to the SuperLU_DIST_2.2 sparse solver
4 */
5 
6 #include <../src/mat/impls/aij/seq/aij.h>
7 #include <../src/mat/impls/aij/mpi/mpiaij.h>
8 #include <petsctime.h>
9 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */
10 #include <stdlib.h>
11 #endif
12 
13 #if defined(PETSC_USE_64BIT_INDICES)
14 /* ugly SuperLU_Dist variable telling it to use long long int */
15 #define _LONGINT
16 #endif
17 
18 EXTERN_C_BEGIN
19 #if defined(PETSC_USE_COMPLEX)
20 #include <superlu_zdefs.h>
21 #else
22 #include <superlu_ddefs.h>
23 #endif
24 EXTERN_C_END
25 
26 /*
27     GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized
28     DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator.
29 */
30 typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode;
31 const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0};
32 
33 typedef struct {
34   int_t                nprow,npcol,*row,*col;
35   gridinfo_t           grid;
36   superlu_options_t    options;
37   SuperMatrix          A_sup;
38   ScalePermstruct_t    ScalePermstruct;
39   LUstruct_t           LUstruct;
40   int                  StatPrint;
41   SuperLU_MatInputMode MatInputMode;
42   SOLVEstruct_t        SOLVEstruct;
43   fact_t               FactPattern;
44   MPI_Comm             comm_superlu;
45 #if defined(PETSC_USE_COMPLEX)
46   doublecomplex        *val;
47 #else
48   double               *val;
49 #endif
50   PetscBool            matsolve_iscalled,matmatsolve_iscalled;
51   PetscBool            CleanUpSuperLU_Dist;  /* Flag to clean up (non-global) SuperLU objects during Destroy */
52 } Mat_SuperLU_DIST;
53 
54 extern PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat,PetscViewer);
55 extern PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat,Mat,const MatFactorInfo*);
56 extern PetscErrorCode MatDestroy_SuperLU_DIST(Mat);
57 extern PetscErrorCode MatView_SuperLU_DIST(Mat,PetscViewer);
58 extern PetscErrorCode MatSolve_SuperLU_DIST(Mat,Vec,Vec);
59 extern PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat,Mat,IS,IS,const MatFactorInfo*);
60 extern PetscErrorCode MatDestroy_MPIAIJ(Mat);
61 
62 #undef __FUNCT__
63 #define __FUNCT__ "MatDestroy_SuperLU_DIST"
64 PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
65 {
66   PetscErrorCode   ierr;
67   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
68   PetscBool        flg;
69 
70   PetscFunctionBegin;
71   if (lu && lu->CleanUpSuperLU_Dist) {
72     /* Deallocate SuperLU_DIST storage */
73     if (lu->MatInputMode == GLOBAL) {
74       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
75     } else {
76       PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
77       if (lu->options.SolveInitialized) {
78 #if defined(PETSC_USE_COMPLEX)
79         PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
80 #else
81         PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
82 #endif
83       }
84     }
85     PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct));
86     PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct));
87     PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct));
88 
89     /* Release the SuperLU_DIST process grid. */
90     PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid));
91     ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr);
92   }
93   ierr = PetscFree(A->spptr);CHKERRQ(ierr);
94 
95   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
96   if (flg) {
97     ierr = MatDestroy_SeqAIJ(A);CHKERRQ(ierr);
98   } else {
99     ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr);
100   }
101   PetscFunctionReturn(0);
102 }
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "MatSolve_SuperLU_DIST"
106 PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
107 {
108   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
109   PetscErrorCode   ierr;
110   PetscMPIInt      size;
111   PetscInt         m=A->rmap->n,M=A->rmap->N,N=A->cmap->N;
112   SuperLUStat_t    stat;
113   double           berr[1];
114   PetscScalar      *bptr;
115   PetscInt         nrhs=1;
116   Vec              x_seq;
117   IS               iden;
118   VecScatter       scat;
119   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
120 
121   PetscFunctionBegin;
122   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
123   if (size > 1 && lu->MatInputMode == GLOBAL) {
124     /* global mat input, convert b to x_seq */
125     ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr);
126     ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr);
127     ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr);
128     ierr = ISDestroy(&iden);CHKERRQ(ierr);
129 
130     ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
131     ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
132     ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr);
133   } else { /* size==1 || distributed mat input */
134     if (lu->options.SolveInitialized && !lu->matsolve_iscalled) {
135       /* see comments in MatMatSolve() */
136 #if defined(PETSC_USE_COMPLEX)
137       PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
138 #else
139       PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
140 #endif
141       lu->options.SolveInitialized = NO;
142     }
143     ierr = VecCopy(b_mpi,x);CHKERRQ(ierr);
144     ierr = VecGetArray(x,&bptr);CHKERRQ(ierr);
145   }
146 
147   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
148 
149   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
150   if (lu->MatInputMode == GLOBAL) {
151 #if defined(PETSC_USE_COMPLEX)
152     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
153 #else
154     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
155 #endif
156   } else { /* distributed mat input */
157 #if defined(PETSC_USE_COMPLEX)
158     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
159 #else
160     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
161 #endif
162   }
163   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
164 
165   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid);      /* Print the statistics. */
166   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
167 
168   if (size > 1 && lu->MatInputMode == GLOBAL) {
169     /* convert seq x to mpi x */
170     ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr);
171     ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
172     ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
173     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
174     ierr = VecDestroy(&x_seq);CHKERRQ(ierr);
175   } else {
176     ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr);
177 
178     lu->matsolve_iscalled    = PETSC_TRUE;
179     lu->matmatsolve_iscalled = PETSC_FALSE;
180   }
181   PetscFunctionReturn(0);
182 }
183 
184 #undef __FUNCT__
185 #define __FUNCT__ "MatMatSolve_SuperLU_DIST"
186 PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X)
187 {
188   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
189   PetscErrorCode   ierr;
190   PetscMPIInt      size;
191   PetscInt         M=A->rmap->N,m=A->rmap->n,nrhs;
192   SuperLUStat_t    stat;
193   double           berr[1];
194   PetscScalar      *bptr;
195   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
196   PetscBool        flg;
197 
198   PetscFunctionBegin;
199   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
200   ierr = PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
201   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
202   ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
203   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
204 
205   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
206   if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size);
207   /* size==1 or distributed mat input */
208   if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) {
209     /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
210        thus destroy it and create a new SOLVEstruct.
211        Otherwise it may result in memory corruption or incorrect solution
212        See src/mat/examples/tests/ex125.c */
213 #if defined(PETSC_USE_COMPLEX)
214     PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
215 #else
216     PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
217 #endif
218     lu->options.SolveInitialized = NO;
219   }
220   ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
221 
222   ierr = MatGetSize(B_mpi,NULL,&nrhs);CHKERRQ(ierr);
223 
224   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
225   ierr = MatDenseGetArray(X,&bptr);CHKERRQ(ierr);
226   if (lu->MatInputMode == GLOBAL) { /* size == 1 */
227 #if defined(PETSC_USE_COMPLEX)
228     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info));
229 #else
230     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info));
231 #endif
232   } else { /* distributed mat input */
233 #if defined(PETSC_USE_COMPLEX)
234     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
235 #else
236     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
237 #endif
238   }
239   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
240   ierr = MatDenseRestoreArray(X,&bptr);CHKERRQ(ierr);
241 
242   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
243   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
244   lu->matsolve_iscalled    = PETSC_FALSE;
245   lu->matmatsolve_iscalled = PETSC_TRUE;
246   PetscFunctionReturn(0);
247 }
248 
249 
250 #undef __FUNCT__
251 #define __FUNCT__ "MatLUFactorNumeric_SuperLU_DIST"
252 PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info)
253 {
254   Mat              *tseq,A_seq = NULL;
255   Mat_SeqAIJ       *aa,*bb;
256   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->spptr;
257   PetscErrorCode   ierr;
258   PetscInt         M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
259                    m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj;
260   int              sinfo;   /* SuperLU_Dist info flag is always an int even with long long indices */
261   PetscMPIInt      size;
262   SuperLUStat_t    stat;
263   double           *berr=0;
264   IS               isrow;
265   PetscLogDouble   time0,time,time_min,time_max;
266   Mat              F_diag=NULL;
267 #if defined(PETSC_USE_COMPLEX)
268   doublecomplex    *av, *bv;
269 #else
270   double           *av, *bv;
271 #endif
272 
273   PetscFunctionBegin;
274   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
275 
276   if (lu->options.PrintStat) { /* collect time for mat conversion */
277     ierr = MPI_Barrier(PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
278     ierr = PetscTime(&time0);CHKERRQ(ierr);
279   }
280 
281   if (lu->MatInputMode == GLOBAL) { /* global mat input */
282     if (size > 1) { /* convert mpi A to seq mat A */
283       ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr);
284       ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr);
285       ierr = ISDestroy(&isrow);CHKERRQ(ierr);
286 
287       A_seq = *tseq;
288       ierr  = PetscFree(tseq);CHKERRQ(ierr);
289       aa    = (Mat_SeqAIJ*)A_seq->data;
290     } else {
291       PetscBool flg;
292       ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
293       if (flg) {
294         Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data;
295         A = At->A;
296       }
297       aa =  (Mat_SeqAIJ*)A->data;
298     }
299 
300     /* Convert Petsc NR matrix to SuperLU_DIST NC.
301        Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */
302     if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */
303       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
304       if (lu->FactPattern == SamePattern_SameRowPerm) {
305         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
306       } else { /* lu->FactPattern == SamePattern */
307         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct));
308         lu->options.Fact = SamePattern;
309       }
310     }
311 #if defined(PETSC_USE_COMPLEX)
312     PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,aa->j,aa->i,&lu->val,&lu->col, &lu->row));
313 #else
314     PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,aa->j,aa->i,&lu->val, &lu->col, &lu->row));
315 #endif
316 
317     /* Create compressed column matrix A_sup. */
318 #if defined(PETSC_USE_COMPLEX)
319     PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE));
320 #else
321     PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE));
322 #endif
323   } else { /* distributed mat input */
324     Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
325     aa=(Mat_SeqAIJ*)(mat->A)->data;
326     bb=(Mat_SeqAIJ*)(mat->B)->data;
327     ai=aa->i; aj=aa->j;
328     bi=bb->i; bj=bb->j;
329 #if defined(PETSC_USE_COMPLEX)
330     av=(doublecomplex*)aa->a;
331     bv=(doublecomplex*)bb->a;
332 #else
333     av=aa->a;
334     bv=bb->a;
335 #endif
336     rstart = A->rmap->rstart;
337     nz     = aa->nz + bb->nz;
338     garray = mat->garray;
339 
340     if (lu->options.Fact == DOFACT) { /* first numeric factorization */
341 #if defined(PETSC_USE_COMPLEX)
342       PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
343 #else
344       PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
345 #endif
346     } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
347       /* Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); */ /* this leads to crash! However, see SuperLU_DIST_2.5/EXAMPLE/pzdrive2.c */
348       if (lu->FactPattern == SamePattern_SameRowPerm) {
349         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
350       } else {
351         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate storage associated with the L and U matrices. */
352         lu->options.Fact = SamePattern;
353       }
354     }
355     nz = 0;
356     for (i=0; i<m; i++) {
357       lu->row[i] = nz;
358       countA     = ai[i+1] - ai[i];
359       countB     = bi[i+1] - bi[i];
360       ajj        = aj + ai[i]; /* ptr to the beginning of this row */
361       bjj        = bj + bi[i];
362 
363       /* B part, smaller col index */
364       colA_start = rstart + ajj[0]; /* the smallest global col index of A */
365       jB         = 0;
366       for (j=0; j<countB; j++) {
367         jcol = garray[bjj[j]];
368         if (jcol > colA_start) {
369           jB = j;
370           break;
371         }
372         lu->col[nz]   = jcol;
373         lu->val[nz++] = *bv++;
374         if (j==countB-1) jB = countB;
375       }
376 
377       /* A part */
378       for (j=0; j<countA; j++) {
379         lu->col[nz]   = rstart + ajj[j];
380         lu->val[nz++] = *av++;
381       }
382 
383       /* B part, larger col index */
384       for (j=jB; j<countB; j++) {
385         lu->col[nz]   = garray[bjj[j]];
386         lu->val[nz++] = *bv++;
387       }
388     }
389     lu->row[m] = nz;
390 #if defined(PETSC_USE_COMPLEX)
391     PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE));
392 #else
393     PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE));
394 #endif
395   }
396   if (lu->options.PrintStat) {
397     ierr  = PetscTime(&time);CHKERRQ(ierr);
398     time0 = time - time0;
399   }
400 
401   /* Factor the matrix. */
402   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));   /* Initialize the statistics variables. */
403   if (lu->MatInputMode == GLOBAL) { /* global mat input */
404 #if defined(PETSC_USE_COMPLEX)
405     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
406 #else
407     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
408 #endif
409   } else { /* distributed mat input */
410 #if defined(PETSC_USE_COMPLEX)
411     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
412     if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pzgssvx fails, info: %d\n",sinfo);
413 #else
414     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
415     if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",sinfo);
416 #endif
417   }
418 
419   if (lu->MatInputMode == GLOBAL && size > 1) {
420     ierr = MatDestroy(&A_seq);CHKERRQ(ierr);
421   }
422 
423   if (lu->options.PrintStat) {
424     ierr = MPI_Reduce(&time0,&time_max,1,MPI_DOUBLE,MPI_MAX,0,PetscObjectComm((PetscObject)A));
425     ierr = MPI_Reduce(&time0,&time_min,1,MPI_DOUBLE,MPI_MIN,0,PetscObjectComm((PetscObject)A));
426     ierr = MPI_Reduce(&time0,&time,1,MPI_DOUBLE,MPI_SUM,0,PetscObjectComm((PetscObject)A));
427     time = time/size; /* average time */
428     ierr = PetscPrintf(PetscObjectComm((PetscObject)A), "        Mat conversion(PETSc->SuperLU_DIST) time (max/min/avg): \n                              %g / %g / %g\n",time_max,time_min,time);CHKERRQ(ierr);
429     PStatPrint(&lu->options, &stat, &lu->grid);  /* Print the statistics. */
430   }
431   PStatFree(&stat);
432   if (size > 1) {
433     F_diag            = ((Mat_MPIAIJ*)(F)->data)->A;
434     F_diag->assembled = PETSC_TRUE;
435   }
436   (F)->assembled    = PETSC_TRUE;
437   (F)->preallocated = PETSC_TRUE;
438   lu->options.Fact  = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
439   PetscFunctionReturn(0);
440 }
441 
442 /* Note the Petsc r and c permutations are ignored */
443 #undef __FUNCT__
444 #define __FUNCT__ "MatLUFactorSymbolic_SuperLU_DIST"
445 PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
446 {
447   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->spptr;
448   PetscInt         M   = A->rmap->N,N=A->cmap->N;
449 
450   PetscFunctionBegin;
451   /* Initialize the SuperLU process grid. */
452   PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid));
453 
454   /* Initialize ScalePermstruct and LUstruct. */
455   PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct));
456   PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(M, N, &lu->LUstruct));
457   F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
458   F->ops->solve           = MatSolve_SuperLU_DIST;
459   F->ops->matsolve        = MatMatSolve_SuperLU_DIST;
460   lu->CleanUpSuperLU_Dist = PETSC_TRUE;
461   PetscFunctionReturn(0);
462 }
463 
464 #undef __FUNCT__
465 #define __FUNCT__ "MatFactorGetSolverPackage_aij_superlu_dist"
466 PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type)
467 {
468   PetscFunctionBegin;
469   *type = MATSOLVERSUPERLU_DIST;
470   PetscFunctionReturn(0);
471 }
472 
473 #undef __FUNCT__
474 #define __FUNCT__ "MatGetFactor_aij_superlu_dist"
475 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
476 {
477   Mat               B;
478   Mat_SuperLU_DIST  *lu;
479   PetscErrorCode    ierr;
480   PetscInt          M=A->rmap->N,N=A->cmap->N,indx;
481   PetscMPIInt       size;
482   superlu_options_t options;
483   PetscBool         flg;
484   const char        *colperm[]     = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"};
485   const char        *rowperm[]     = {"LargeDiag","NATURAL"};
486   const char        *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"};
487 
488   PetscFunctionBegin;
489   /* Create the factorization matrix */
490   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
491   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);CHKERRQ(ierr);
492   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
493   ierr = MatSeqAIJSetPreallocation(B,0,NULL);
494   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
495 
496   B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
497   B->ops->view             = MatView_SuperLU_DIST;
498   B->ops->destroy          = MatDestroy_SuperLU_DIST;
499 
500   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_aij_superlu_dist);CHKERRQ(ierr);
501 
502   B->factortype = MAT_FACTOR_LU;
503 
504   ierr     = PetscNewLog(B,Mat_SuperLU_DIST,&lu);CHKERRQ(ierr);
505   B->spptr = lu;
506 
507   /* Set the default input options:
508      options.Fact              = DOFACT;
509      options.Equil             = YES;
510      options.ParSymbFact       = NO;
511      options.ColPerm           = METIS_AT_PLUS_A;
512      options.RowPerm           = LargeDiag;
513      options.ReplaceTinyPivot  = YES;
514      options.IterRefine        = DOUBLE;
515      options.Trans             = NOTRANS;
516      options.SolveInitialized  = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
517      options.RefineInitialized = NO;
518      options.PrintStat         = YES;
519   */
520   set_default_options_dist(&options);
521 
522   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));CHKERRQ(ierr);
523   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
524   /* Default num of process columns and rows */
525   ierr = PetscMPIIntCast((PetscInt)(0.5 + PetscSqrtReal((PetscReal)size)),&lu->npcol);CHKERRQ(ierr);
526   if (!lu->npcol) lu->npcol = 1;
527   while (lu->npcol > 0) {
528     ierr = PetscMPIIntCast(size/lu->npcol,&lu->nprow);CHKERRQ(ierr);
529     if (size == lu->nprow * lu->npcol) break;
530     lu->npcol--;
531   }
532 
533   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr);
534   ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,&lu->nprow,NULL);CHKERRQ(ierr);
535   ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,&lu->npcol,NULL);CHKERRQ(ierr);
536   if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol);
537 
538   lu->MatInputMode = DISTRIBUTED;
539 
540   ierr = PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,NULL);CHKERRQ(ierr);
541   if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;
542 
543   ierr = PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr);
544   if (!flg) options.Equil = NO;
545 
546   ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);CHKERRQ(ierr);
547   if (flg) {
548     switch (indx) {
549     case 0:
550       options.RowPerm = LargeDiag;
551       break;
552     case 1:
553       options.RowPerm = NOROWPERM;
554       break;
555     }
556   }
557 
558   ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);CHKERRQ(ierr);
559   if (flg) {
560     switch (indx) {
561     case 0:
562       options.ColPerm = NATURAL;
563       break;
564     case 1:
565       options.ColPerm = MMD_AT_PLUS_A;
566       break;
567     case 2:
568       options.ColPerm = MMD_ATA;
569       break;
570     case 3:
571       options.ColPerm = METIS_AT_PLUS_A;
572       break;
573     case 4:
574       options.ColPerm = PARMETIS;   /* only works for np>1 */
575       break;
576     default:
577       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
578     }
579   }
580 
581   ierr = PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr);
582   if (!flg) options.ReplaceTinyPivot = NO;
583 
584   options.ParSymbFact = NO;
585 
586   ierr = PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr);
587   if (flg) {
588 #if defined(PETSC_HAVE_PARMETIS)
589     options.ParSymbFact = YES;
590     options.ColPerm     = PARMETIS;   /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
591 #else
592     printf("parsymbfact needs PARMETIS");
593 #endif
594   }
595 
596   lu->FactPattern = SamePattern_SameRowPerm;
597 
598   ierr = PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[1],&indx,&flg);CHKERRQ(ierr);
599   if (flg) {
600     switch (indx) {
601     case 0:
602       lu->FactPattern = SamePattern;
603       break;
604     case 1:
605       lu->FactPattern = SamePattern_SameRowPerm;
606       break;
607     }
608   }
609 
610   options.IterRefine = NOREFINE;
611   ierr               = PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr);
612   if (flg) options.IterRefine = SLU_DOUBLE;
613 
614   if (PetscLogPrintInfo) options.PrintStat = YES;
615   else options.PrintStat = NO;
616   ierr = PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",
617                           (PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,0);CHKERRQ(ierr);
618   PetscOptionsEnd();
619 
620   lu->options              = options;
621   lu->options.Fact         = DOFACT;
622   lu->matsolve_iscalled    = PETSC_FALSE;
623   lu->matmatsolve_iscalled = PETSC_FALSE;
624 
625   *F = B;
626   PetscFunctionReturn(0);
627 }
628 
629 #undef __FUNCT__
630 #define __FUNCT__ "MatGetFactor_seqaij_superlu_dist"
631 PETSC_EXTERN PetscErrorCode MatGetFactor_seqaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
632 {
633   PetscErrorCode ierr;
634 
635   PetscFunctionBegin;
636   ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr);
637   PetscFunctionReturn(0);
638 }
639 
640 #undef __FUNCT__
641 #define __FUNCT__ "MatGetFactor_mpiaij_superlu_dist"
642 PETSC_EXTERN PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
643 {
644   PetscErrorCode ierr;
645 
646   PetscFunctionBegin;
647   ierr = MatGetFactor_aij_superlu_dist(A,ftype,F);CHKERRQ(ierr);
648   PetscFunctionReturn(0);
649 }
650 
651 #undef __FUNCT__
652 #define __FUNCT__ "MatFactorInfo_SuperLU_DIST"
653 PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer)
654 {
655   Mat_SuperLU_DIST  *lu=(Mat_SuperLU_DIST*)A->spptr;
656   superlu_options_t options;
657   PetscErrorCode    ierr;
658 
659   PetscFunctionBegin;
660   /* check if matrix is superlu_dist type */
661   if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(0);
662 
663   options = lu->options;
664   ierr    = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr);
665   ierr    = PetscViewerASCIIPrintf(viewer,"  Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);CHKERRQ(ierr);
666   ierr    = PetscViewerASCIIPrintf(viewer,"  Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);CHKERRQ(ierr);
667   ierr    = PetscViewerASCIIPrintf(viewer,"  Matrix input mode %d \n",lu->MatInputMode);CHKERRQ(ierr);
668   ierr    = PetscViewerASCIIPrintf(viewer,"  Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);CHKERRQ(ierr);
669   ierr    = PetscViewerASCIIPrintf(viewer,"  Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);CHKERRQ(ierr);
670   ierr    = PetscViewerASCIIPrintf(viewer,"  Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr);
671   ierr    = PetscViewerASCIIPrintf(viewer,"  Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL" : "LargeDiag");CHKERRQ(ierr);
672 
673   switch (options.ColPerm) {
674   case NATURAL:
675     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation NATURAL\n");CHKERRQ(ierr);
676     break;
677   case MMD_AT_PLUS_A:
678     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_AT_PLUS_A\n");CHKERRQ(ierr);
679     break;
680   case MMD_ATA:
681     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_ATA\n");CHKERRQ(ierr);
682     break;
683   case METIS_AT_PLUS_A:
684     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation METIS_AT_PLUS_A\n");CHKERRQ(ierr);
685     break;
686   case PARMETIS:
687     ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation PARMETIS\n");CHKERRQ(ierr);
688     break;
689   default:
690     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
691   }
692 
693   ierr = PetscViewerASCIIPrintf(viewer,"  Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);CHKERRQ(ierr);
694 
695   if (lu->FactPattern == SamePattern) {
696     ierr = PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern\n");CHKERRQ(ierr);
697   } else {
698     ierr = PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern_SameRowPerm\n");CHKERRQ(ierr);
699   }
700   PetscFunctionReturn(0);
701 }
702 
703 #undef __FUNCT__
704 #define __FUNCT__ "MatView_SuperLU_DIST"
705 PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
706 {
707   PetscErrorCode    ierr;
708   PetscBool         iascii;
709   PetscViewerFormat format;
710 
711   PetscFunctionBegin;
712   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
713   if (iascii) {
714     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
715     if (format == PETSC_VIEWER_ASCII_INFO) {
716       ierr = MatFactorInfo_SuperLU_DIST(A,viewer);CHKERRQ(ierr);
717     }
718   }
719   PetscFunctionReturn(0);
720 }
721 
722 
723 /*MC
724   MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization
725 
726    Works with AIJ matrices
727 
728   Options Database Keys:
729 + -mat_superlu_dist_r <n> - number of rows in processor partition
730 . -mat_superlu_dist_c <n> - number of columns in processor partition
731 . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed
732 . -mat_superlu_dist_equil - equilibrate the matrix
733 . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation
734 . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation
735 . -mat_superlu_dist_replacetinypivot - replace tiny pivots
736 . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm
737 . -mat_superlu_dist_iterrefine - use iterative refinement
738 - -mat_superlu_dist_statprint - print factorization information
739 
740    Level: beginner
741 
742 .seealso: PCLU
743 
744 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
745 
746 M*/
747 
748