xref: /petsc/src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c (revision d34347eca8ac9657cccc548b3ba1c8201ddea9cf)
1 /*$Id: superlu_DIST.c,v 1.10 2001/08/15 15:56:50 bsmith Exp $*/
2 /*
3         Provides an interface to the SuperLU_DIST_2.0 sparse solver
4 */
5 
6 #include "src/mat/impls/aij/seq/aij.h"
7 #include "src/mat/impls/aij/mpi/mpiaij.h"
8 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get arround weird problem with SuperLU on cray */
9 #include "stdlib.h"
10 #endif
11 
12 /*MC
13   MATSUPERLU_DIST - a matrix type providing direct solvers for parallel matrices
14   via the external package SuperLU_DIST.
15 
16   If SuperLU_DIST is installed (see the manual for
17   instructions on how to declare the existence of external packages),
18   a matrix type can be constructed which invokes SuperLU_DIST solvers.
19   After calling MatCreate(...,A), simply call MatSetType(A,MATSUPERLU_DIST).
20   This matrix type is only supported for double precision real.
21 
22   This matrix inherits from MATSEQAIJ when constructed with a single process communicator,
23   and from MATMPIAIJ otherwise.
24 
25   Options Database Keys:
26 + -mat_type superlu_dist
27 . -mat_superlu_dist_r <n> : number of rows in processor partition
28 . -mat_superlu_dist_c <n> : number of columns in processor partition
29 . -mat_superlu_dist_matinput 0|1 : matrix input mode; 0=global, 1=distributed
30 . -mat_superlu_dist_equil :, equilibrate the matrix
31 . -mat_superlu_dist_rowperm LargeDiag|NATURAL : row permutation
32 . -mat_superlu_dist_colperm MMD_AT_PLUS_A|MMD_ATA|COLAMD|NATURAL : column permutation
33 . -mat_superlu_dist_replacetinypivot : replace tiny pivots
34 . -mat_superlu_dist_iterrefine : use iterative refinement
35 - -mat_superlu_dist_statprint : print factorization information
36 
37 .seealso: PCLU
38 M*/
39 
40 EXTERN_C_BEGIN
41 #if defined(PETSC_USE_COMPLEX)
42 #include "superlu_zdefs.h"
43 #else
44 #include "superlu_ddefs.h"
45 #endif
46 EXTERN_C_END
47 
48 typedef enum { GLOBAL,DISTRIBUTED
49 } SuperLU_MatInputMode;
50 
51 typedef struct {
52   int_t                   nprow,npcol,*row,*col;
53   gridinfo_t              grid;
54   superlu_options_t       options;
55   SuperMatrix             A_sup;
56   ScalePermstruct_t       ScalePermstruct;
57   LUstruct_t              LUstruct;
58   int                     StatPrint;
59   int                     MatInputMode;
60   SOLVEstruct_t           SOLVEstruct;
61   MatStructure            flg;
62   MPI_Comm                comm_superlu;
63 #if defined(PETSC_USE_COMPLEX)
64   doublecomplex           *val;
65 #else
66   double                  *val;
67 #endif
68 
69   MatType basetype;
70   /* A few function pointers for inheritance */
71   int (*MatView)(Mat,PetscViewer);
72   int (*MatAssemblyEnd)(Mat,MatAssemblyType);
73   int (*MatLUFactorSymbolic)(Mat,IS,IS,MatFactorInfo*,Mat*);
74   int (*MatDestroy)(Mat);
75 
76   /* Flag to clean up (non-global) SuperLU objects during Destroy */
77   PetscTruth CleanUpSuperLUDist;
78 } Mat_MPIAIJ_SuperLU_DIST;
79 
80 EXTERN_C_BEGIN
81 #undef __FUNCT__
82 #define __FUNCT__ "MatConvert_SuperLU_DIST_Base"
83 int MatConvert_SuperLU_DIST_Base(Mat A,MatType type,Mat *newmat) {
84   /* This routine is only called to convert an unfactored PETSc-SuperLU_DIST matrix */
85   /* to its base PETSc type, so we will ignore 'MatType type'. */
86   int                     ierr;
87   Mat                     B=*newmat;
88   Mat_MPIAIJ_SuperLU_DIST *lu=(Mat_MPIAIJ_SuperLU_DIST *)A->spptr;
89 
90   PetscFunctionBegin;
91   if (B != A) {
92     /* This routine was inherited from SeqAIJ. */
93     ierr = MatDuplicate(A,MAT_COPY_VALUES,&B);CHKERRQ(ierr);
94   } else {
95     /* Reset the original function pointers */
96     B->ops->view             = lu->MatView;
97     B->ops->assemblyend      = lu->MatAssemblyEnd;
98     B->ops->lufactorsymbolic = lu->MatLUFactorSymbolic;
99     B->ops->destroy          = lu->MatDestroy;
100 
101     ierr = PetscObjectChangeTypeName((PetscObject)B,lu->basetype);CHKERRQ(ierr);
102     ierr = PetscFree(lu);CHKERRQ(ierr);
103   }
104   *newmat = B;
105   PetscFunctionReturn(0);
106 }
107 EXTERN_C_END
108 
109 #undef __FUNCT__
110 #define __FUNCT__ "MatDestroy_MPIAIJ_SuperLU_DIST"
111 int MatDestroy_MPIAIJ_SuperLU_DIST(Mat A)
112 {
113   int                     ierr;
114   Mat_MPIAIJ_SuperLU_DIST *lu = (Mat_MPIAIJ_SuperLU_DIST*)A->spptr;
115 
116   PetscFunctionBegin;
117   if (lu->CleanUpSuperLUDist) {
118     /* Deallocate SuperLU_DIST storage */
119     if (lu->MatInputMode == GLOBAL) {
120       Destroy_CompCol_Matrix_dist(&lu->A_sup);
121     } else {
122       Destroy_CompRowLoc_Matrix_dist(&lu->A_sup);
123       if ( lu->options.SolveInitialized ) {
124 #if defined(PETSC_USE_COMPLEX)
125         zSolveFinalize(&lu->options, &lu->SOLVEstruct);
126 #else
127         dSolveFinalize(&lu->options, &lu->SOLVEstruct);
128 #endif
129       }
130     }
131     Destroy_LU(A->N, &lu->grid, &lu->LUstruct);
132     ScalePermstructFree(&lu->ScalePermstruct);
133     LUstructFree(&lu->LUstruct);
134 
135     /* Release the SuperLU_DIST process grid. */
136     superlu_gridexit(&lu->grid);
137 
138     ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr);
139   }
140 
141   ierr = MatConvert_SuperLU_DIST_Base(A,lu->basetype,&A);CHKERRQ(ierr);
142   ierr = (*A->ops->destroy)(A);CHKERRQ(ierr);
143 
144   PetscFunctionReturn(0);
145 }
146 
147 #undef __FUNCT__
148 #define __FUNCT__ "MatSolve_MPIAIJ_SuperLU_DIST"
149 int MatSolve_MPIAIJ_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
150 {
151   Mat_MPIAIJ              *aa = (Mat_MPIAIJ*)A->data;
152   Mat_MPIAIJ_SuperLU_DIST *lu = (Mat_MPIAIJ_SuperLU_DIST*)A->spptr;
153   int                     ierr, size=aa->size;
154   int                     m=A->M, N=A->N;
155   SuperLUStat_t           stat;
156   double                  berr[1];
157   PetscScalar             *bptr;
158   int                     info, nrhs=1;
159   Vec                     x_seq;
160   IS                      iden;
161   VecScatter              scat;
162   PetscLogDouble          time0,time,time_min,time_max;
163 
164   PetscFunctionBegin;
165   if (size > 1) {
166     if (lu->MatInputMode == GLOBAL) { /* global mat input, convert b to x_seq */
167       ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr);
168       ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr);
169       ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr);
170       ierr = ISDestroy(iden);CHKERRQ(ierr);
171 
172       ierr = VecScatterBegin(b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD,scat);CHKERRQ(ierr);
173       ierr = VecScatterEnd(b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD,scat);CHKERRQ(ierr);
174       ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr);
175     } else { /* distributed mat input */
176       ierr = VecCopy(b_mpi,x);CHKERRQ(ierr);
177       ierr = VecGetArray(x,&bptr);CHKERRQ(ierr);
178     }
179   } else { /* size == 1 */
180     ierr = VecCopy(b_mpi,x);CHKERRQ(ierr);
181     ierr = VecGetArray(x,&bptr);CHKERRQ(ierr);
182   }
183 
184   lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only.*/
185 
186   PStatInit(&stat);        /* Initialize the statistics variables. */
187   if (lu->StatPrint) {
188     ierr = MPI_Barrier(A->comm);CHKERRQ(ierr); /* to be removed */
189     ierr = PetscGetTime(&time0);CHKERRQ(ierr);  /* to be removed */
190   }
191   if (lu->MatInputMode == GLOBAL) {
192 #if defined(PETSC_USE_COMPLEX)
193     pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, m, nrhs,
194                    &lu->grid, &lu->LUstruct, berr, &stat, &info);
195 #else
196     pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, m, nrhs,
197                    &lu->grid, &lu->LUstruct, berr, &stat, &info);
198 #endif
199   } else { /* distributed mat input */
200 #if defined(PETSC_USE_COMPLEX)
201     pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, (doublecomplex*)bptr, A->M, nrhs, &lu->grid,
202 	    &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info);
203     if (info) SETERRQ1(1,"pzgssvx fails, info: %d\n",info);
204 #else
205     pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, bptr, A->M, nrhs, &lu->grid,
206 	    &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info);
207     if (info) SETERRQ1(1,"pdgssvx fails, info: %d\n",info);
208 #endif
209   }
210   if (lu->StatPrint) {
211     ierr = PetscGetTime(&time);CHKERRQ(ierr);  /* to be removed */
212      PStatPrint(&lu->options, &stat, &lu->grid);     /* Print the statistics. */
213   }
214   PStatFree(&stat);
215 
216   if (size > 1) {
217     if (lu->MatInputMode == GLOBAL){ /* convert seq x to mpi x */
218       ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr);
219       ierr = VecScatterBegin(x_seq,x,INSERT_VALUES,SCATTER_REVERSE,scat);CHKERRQ(ierr);
220       ierr = VecScatterEnd(x_seq,x,INSERT_VALUES,SCATTER_REVERSE,scat);CHKERRQ(ierr);
221       ierr = VecScatterDestroy(scat);CHKERRQ(ierr);
222       ierr = VecDestroy(x_seq);CHKERRQ(ierr);
223     } else {
224       ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr);
225     }
226   } else {
227     ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr);
228   }
229   if (lu->StatPrint) {
230     time0 = time - time0;
231     ierr = MPI_Reduce(&time0,&time_max,1,MPI_DOUBLE,MPI_MAX,0,A->comm);CHKERRQ(ierr);
232     ierr = MPI_Reduce(&time0,&time_min,1,MPI_DOUBLE,MPI_MIN,0,A->comm);CHKERRQ(ierr);
233     ierr = MPI_Reduce(&time0,&time,1,MPI_DOUBLE,MPI_SUM,0,A->comm);CHKERRQ(ierr);
234     time = time/size; /* average time */
235     ierr = PetscPrintf(A->comm, "  Time for superlu_dist solve (max/min/avg): %g / %g / %g\n\n",time_max,time_min,time);CHKERRQ(ierr);
236   }
237   PetscFunctionReturn(0);
238 }
239 
240 #undef __FUNCT__
241 #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ_SuperLU_DIST"
242 int MatLUFactorNumeric_MPIAIJ_SuperLU_DIST(Mat A,Mat *F)
243 {
244   Mat_MPIAIJ              *fac = (Mat_MPIAIJ*)(*F)->data,*mat;
245   Mat                     *tseq,A_seq = PETSC_NULL;
246   Mat_SeqAIJ              *aa,*bb;
247   Mat_MPIAIJ_SuperLU_DIST *lu = (Mat_MPIAIJ_SuperLU_DIST*)(*F)->spptr;
248   int                     M=A->M,N=A->N,info,ierr,size=fac->size,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
249                           m=A->m, irow,colA_start,j,jcol,jB,countA,countB,*bjj,*ajj;
250   SuperLUStat_t           stat;
251   double                  *berr=0;
252   IS                      isrow;
253   PetscLogDouble          time0[2],time[2],time_min[2],time_max[2];
254 #if defined(PETSC_USE_COMPLEX)
255   doublecomplex           *av, *bv;
256 #else
257   double                  *av, *bv;
258 #endif
259 
260   PetscFunctionBegin;
261   if (lu->StatPrint) {
262     ierr = MPI_Barrier(A->comm);CHKERRQ(ierr);
263     ierr = PetscGetTime(&time0[0]);CHKERRQ(ierr);
264   }
265 
266   if (lu->MatInputMode == GLOBAL) { /* global mat input */
267     if (size > 1) { /* convert mpi A to seq mat A */
268       ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow); CHKERRQ(ierr);
269       ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq); CHKERRQ(ierr);
270       ierr = ISDestroy(isrow);CHKERRQ(ierr);
271 
272       A_seq = *tseq;
273       ierr = PetscFree(tseq);CHKERRQ(ierr);
274       aa =  (Mat_SeqAIJ*)A_seq->data;
275     } else {
276       aa =  (Mat_SeqAIJ*)A->data;
277     }
278 
279     /* Allocate storage, then convert Petsc NR matrix to SuperLU_DIST NC */
280     if (lu->flg == DIFFERENT_NONZERO_PATTERN) {/* first numeric factorization */
281 #if defined(PETSC_USE_COMPLEX)
282       zallocateA_dist(N, aa->nz, &lu->val, &lu->col, &lu->row);
283 #else
284       dallocateA_dist(N, aa->nz, &lu->val, &lu->col, &lu->row);
285 #endif
286     } else { /* successive numeric factorization, sparsity pattern is reused. */
287       Destroy_CompCol_Matrix_dist(&lu->A_sup);
288       Destroy_LU(N, &lu->grid, &lu->LUstruct);
289       lu->options.Fact = SamePattern;
290     }
291 #if defined(PETSC_USE_COMPLEX)
292     zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,aa->j,aa->i,&lu->val,&lu->col, &lu->row);
293 #else
294     dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,aa->j,aa->i,&lu->val, &lu->col, &lu->row);
295 #endif
296 
297     /* Create compressed column matrix A_sup. */
298 #if defined(PETSC_USE_COMPLEX)
299     zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE);
300 #else
301     dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE);
302 #endif
303   } else { /* distributed mat input */
304     mat =  (Mat_MPIAIJ*)A->data;
305     aa=(Mat_SeqAIJ*)(mat->A)->data;
306     bb=(Mat_SeqAIJ*)(mat->B)->data;
307     ai=aa->i; aj=aa->j;
308     bi=bb->i; bj=bb->j;
309 #if defined(PETSC_USE_COMPLEX)
310     av=(doublecomplex*)aa->a;
311     bv=(doublecomplex*)bb->a;
312 #else
313     av=aa->a;
314     bv=bb->a;
315 #endif
316     rstart = mat->rstart;
317     nz     = aa->nz + bb->nz;
318     garray = mat->garray;
319     rstart = mat->rstart;
320 
321     if (lu->flg == DIFFERENT_NONZERO_PATTERN) {/* first numeric factorization */
322 #if defined(PETSC_USE_COMPLEX)
323       zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row);
324 #else
325       dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row);
326 #endif
327     } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
328       /* Destroy_CompRowLoc_Matrix_dist(&lu->A_sup);  */ /* crash! */
329       Destroy_LU(N, &lu->grid, &lu->LUstruct);
330       lu->options.Fact = SamePattern;
331     }
332     nz = 0; jB = 0; irow = mat->rstart;
333     for ( i=0; i<m; i++ ) {
334       lu->row[i] = nz;
335       countA = ai[i+1] - ai[i];
336       countB = bi[i+1] - bi[i];
337       ajj = aj + ai[i];  /* ptr to the beginning of this row */
338       bjj = bj + bi[i];
339 
340       /* B part, smaller col index */
341       colA_start = mat->rstart + ajj[0]; /* the smallest global col index of A */
342       for (j=0; j<countB; j++){
343         jcol = garray[bjj[j]];
344         if (jcol > colA_start) {
345           jB = j;
346           break;
347         }
348         lu->col[nz] = jcol;
349         lu->val[nz++] = *bv++;
350         if (j==countB-1) jB = countB;
351       }
352 
353       /* A part */
354       for (j=0; j<countA; j++){
355         lu->col[nz] = mat->rstart + ajj[j];
356         lu->val[nz++] = *av++;
357       }
358 
359       /* B part, larger col index */
360       for (j=jB; j<countB; j++){
361         lu->col[nz] = garray[bjj[j]];
362         lu->val[nz++] = *bv++;
363       }
364     }
365     lu->row[m] = nz;
366 #if defined(PETSC_USE_COMPLEX)
367     zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,
368 				   lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE);
369 #else
370     dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,
371 				   lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE);
372 #endif
373   }
374   if (lu->StatPrint) {
375     ierr = PetscGetTime(&time[0]);CHKERRQ(ierr);
376     time0[0] = time[0] - time0[0];
377   }
378 
379   /* Factor the matrix. */
380   PStatInit(&stat);   /* Initialize the statistics variables. */
381 
382   if (lu->StatPrint) {
383     ierr = MPI_Barrier(A->comm);CHKERRQ(ierr);
384     ierr = PetscGetTime(&time0[1]);CHKERRQ(ierr);
385   }
386 
387   if (lu->MatInputMode == GLOBAL) { /* global mat input */
388 #if defined(PETSC_USE_COMPLEX)
389     pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,
390                    &lu->grid, &lu->LUstruct, berr, &stat, &info);
391 #else
392     pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,
393                    &lu->grid, &lu->LUstruct, berr, &stat, &info);
394 #endif
395   } else { /* distributed mat input */
396 #if defined(PETSC_USE_COMPLEX)
397     pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0, &lu->grid,
398 	    &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info);
399     if (info) SETERRQ1(1,"pzgssvx fails, info: %d\n",info);
400 #else
401     pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0, &lu->grid,
402 	    &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info);
403     if (info) SETERRQ1(1,"pdgssvx fails, info: %d\n",info);
404 #endif
405   }
406   if (lu->StatPrint) {
407     ierr = PetscGetTime(&time[1]);CHKERRQ(ierr);  /* to be removed */
408     time0[1] = time[1] - time0[1];
409     if (lu->StatPrint) PStatPrint(&lu->options, &stat, &lu->grid);  /* Print the statistics. */
410   }
411   PStatFree(&stat);
412 
413   if (lu->MatInputMode == GLOBAL && size > 1){
414     ierr = MatDestroy(A_seq);CHKERRQ(ierr);
415   }
416 
417   if (lu->StatPrint) {
418     ierr = MPI_Reduce(time0,time_max,2,MPI_DOUBLE,MPI_MAX,0,A->comm);
419     ierr = MPI_Reduce(time0,time_min,2,MPI_DOUBLE,MPI_MIN,0,A->comm);
420     ierr = MPI_Reduce(time0,time,2,MPI_DOUBLE,MPI_SUM,0,A->comm);
421     for (i=0; i<2; i++) time[i] = time[i]/size; /* average time */
422     ierr = PetscPrintf(A->comm, "  Time for mat conversion (max/min/avg):    %g / %g / %g\n",time_max[0],time_min[0],time[0]);
423     ierr = PetscPrintf(A->comm, "  Time for superlu_dist fact (max/min/avg): %g / %g / %g\n\n",time_max[1],time_min[1],time[1]);
424   }
425   (*F)->assembled = PETSC_TRUE;
426   lu->flg         = SAME_NONZERO_PATTERN;
427   PetscFunctionReturn(0);
428 }
429 
430 /* Note the Petsc r and c permutations are ignored */
431 #undef __FUNCT__
432 #define __FUNCT__ "MatLUFactorSymbolic_MPIAIJ_SuperLU_DIST"
433 int MatLUFactorSymbolic_MPIAIJ_SuperLU_DIST(Mat A,IS r,IS c,MatFactorInfo *info,Mat *F)
434 {
435   Mat                     B;
436   Mat_MPIAIJ_SuperLU_DIST *lu;
437   int                     ierr,M=A->M,N=A->N,size;
438   superlu_options_t       options;
439   char                    buff[32];
440   PetscTruth              flg;
441   char                    *ptype[] = {"MMD_AT_PLUS_A","NATURAL","MMD_ATA","COLAMD"};
442   char                    *prtype[] = {"LargeDiag","NATURAL"};
443   PetscFunctionBegin;
444 
445   /* Create the factorization matrix */
446   ierr = MatCreate(A->comm,A->m,A->n,M,N,&B);CHKERRQ(ierr);
447   ierr = MatSetType(B,MATSUPERLU_DIST);CHKERRQ(ierr);
448   ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
449   ierr = MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
450 
451   B->ops->lufactornumeric  = MatLUFactorNumeric_MPIAIJ_SuperLU_DIST;
452   B->ops->solve            = MatSolve_MPIAIJ_SuperLU_DIST;
453   B->factor                = FACTOR_LU;
454 
455   lu = (Mat_MPIAIJ_SuperLU_DIST*)(B->spptr);
456 
457   /* Set the input options */
458   set_default_options(&options);
459   lu->MatInputMode = GLOBAL;
460   ierr = MPI_Comm_dup(A->comm,&(lu->comm_superlu));CHKERRQ(ierr);
461 
462   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
463   lu->nprow = size/2;               /* Default process rows.      */
464   if (lu->nprow == 0) lu->nprow = 1;
465   lu->npcol = size/lu->nprow;           /* Default process columns.   */
466 
467   ierr = PetscOptionsBegin(A->comm,A->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr);
468 
469     ierr = PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,&lu->nprow,PETSC_NULL);CHKERRQ(ierr);
470     ierr = PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,&lu->npcol,PETSC_NULL);CHKERRQ(ierr);
471     if (size != lu->nprow * lu->npcol) SETERRQ(1,"Number of processes should be equal to nprow*npcol");
472 
473     ierr = PetscOptionsInt("-mat_superlu_dist_matinput","Matrix input mode (0: GLOBAL; 1: DISTRIBUTED)","None",lu->MatInputMode,&lu->MatInputMode,PETSC_NULL);CHKERRQ(ierr);
474     if(lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;
475 
476     ierr = PetscOptionsLogical("-mat_superlu_dist_equil","Equilibrate matrix","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr);
477     if (!flg) {
478       options.Equil = NO;
479     }
480 
481     ierr = PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",prtype,2,prtype[0],buff,32,&flg);CHKERRQ(ierr);
482     while (flg) {
483       ierr = PetscStrcmp(buff,"LargeDiag",&flg);CHKERRQ(ierr);
484       if (flg) {
485         options.RowPerm = LargeDiag;
486         break;
487       }
488       ierr = PetscStrcmp(buff,"NATURAL",&flg);CHKERRQ(ierr);
489       if (flg) {
490         options.RowPerm = NOROWPERM;
491         break;
492       }
493       SETERRQ1(1,"Unknown row permutation %s",buff);
494     }
495 
496     ierr = PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",ptype,4,ptype[0],buff,32,&flg);CHKERRQ(ierr);
497     while (flg) {
498       ierr = PetscStrcmp(buff,"MMD_AT_PLUS_A",&flg);CHKERRQ(ierr);
499       if (flg) {
500         options.ColPerm = MMD_AT_PLUS_A;
501         break;
502       }
503       ierr = PetscStrcmp(buff,"NATURAL",&flg);CHKERRQ(ierr);
504       if (flg) {
505         options.ColPerm = NATURAL;
506         break;
507       }
508       ierr = PetscStrcmp(buff,"MMD_ATA",&flg);CHKERRQ(ierr);
509       if (flg) {
510         options.ColPerm = MMD_ATA;
511         break;
512       }
513       ierr = PetscStrcmp(buff,"COLAMD",&flg);CHKERRQ(ierr);
514       if (flg) {
515         options.ColPerm = COLAMD;
516         break;
517       }
518       SETERRQ1(1,"Unknown column permutation %s",buff);
519     }
520 
521     ierr = PetscOptionsLogical("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr);
522     if (!flg) {
523       options.ReplaceTinyPivot = NO;
524     }
525 
526     options.IterRefine = NOREFINE;
527     ierr = PetscOptionsLogical("-mat_superlu_dist_iterrefine","Use iterative refinement","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr);
528     if (flg) {
529       options.IterRefine = DOUBLE;
530     }
531 
532     if (PetscLogPrintInfo) {
533       lu->StatPrint = (int)PETSC_TRUE;
534     } else {
535       lu->StatPrint = (int)PETSC_FALSE;
536     }
537     ierr = PetscOptionsLogical("-mat_superlu_dist_statprint","Print factorization information","None",
538                               (PetscTruth)lu->StatPrint,(PetscTruth*)&lu->StatPrint,0);CHKERRQ(ierr);
539   PetscOptionsEnd();
540 
541   /* Initialize the SuperLU process grid. */
542   superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid);
543 
544   /* Initialize ScalePermstruct and LUstruct. */
545   ScalePermstructInit(M, N, &lu->ScalePermstruct);
546   LUstructInit(M, N, &lu->LUstruct);
547 
548   lu->options            = options;
549   lu->flg                = DIFFERENT_NONZERO_PATTERN;
550   lu->CleanUpSuperLUDist = PETSC_TRUE;
551   *F = B;
552   PetscFunctionReturn(0);
553 }
554 
555 #undef __FUNCT__
556 #define __FUNCT__ "MatAssemblyEnd_MPIAIJ_SuperLU_DIST"
557 int MatAssemblyEnd_MPIAIJ_SuperLU_DIST(Mat A,MatAssemblyType mode) {
558   int                     ierr;
559   Mat_MPIAIJ_SuperLU_DIST *lu=(Mat_MPIAIJ_SuperLU_DIST*)(A->spptr);
560 
561   PetscFunctionBegin;
562   ierr = (*lu->MatAssemblyEnd)(A,mode);CHKERRQ(ierr);
563   lu->MatLUFactorSymbolic  = A->ops->lufactorsymbolic;
564   A->ops->lufactorsymbolic = MatLUFactorSymbolic_MPIAIJ_SuperLU_DIST;
565   PetscFunctionReturn(0);
566 }
567 
568 #undef __FUNCT__
569 #define __FUNCT__ "MatMPIAIJFactorInfo_SuperLu"
570 int MatMPIAIJFactorInfo_SuperLu(Mat A,PetscViewer viewer)
571 {
572   Mat_MPIAIJ_SuperLU_DIST *lu= (Mat_MPIAIJ_SuperLU_DIST*)A->spptr;
573   superlu_options_t       options;
574   int                     ierr;
575   char                    *colperm;
576 
577   PetscFunctionBegin;
578   /* check if matrix is superlu_dist type */
579   if (A->ops->solve != MatSolve_MPIAIJ_SuperLU_DIST) PetscFunctionReturn(0);
580 
581   options = lu->options;
582   ierr = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr);
583   ierr = PetscViewerASCIIPrintf(viewer,"  Equilibrate matrix %s \n",(options.Equil != NO) ? "true": "false");CHKERRQ(ierr);
584   ierr = PetscViewerASCIIPrintf(viewer,"  Replace tiny pivots %s \n",(options.ReplaceTinyPivot != NO) ? "true": "false");CHKERRQ(ierr);
585   ierr = PetscViewerASCIIPrintf(viewer,"  Use iterative refinement %s \n",(options.IterRefine == DOUBLE) ? "true": "false");CHKERRQ(ierr);
586   ierr = PetscViewerASCIIPrintf(viewer,"  Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr);
587   ierr = PetscViewerASCIIPrintf(viewer,"  Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL": "LargeDiag");CHKERRQ(ierr);
588   if (options.ColPerm == NATURAL) {
589     colperm = "NATURAL";
590   } else if (options.ColPerm == MMD_AT_PLUS_A) {
591     colperm = "MMD_AT_PLUS_A";
592   } else if (options.ColPerm == MMD_ATA) {
593     colperm = "MMD_ATA";
594   } else if (options.ColPerm == COLAMD) {
595     colperm = "COLAMD";
596   } else {
597     SETERRQ(1,"Unknown column permutation");
598   }
599   ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation %s \n",colperm);CHKERRQ(ierr);
600   PetscFunctionReturn(0);
601 }
602 
603 #undef __FUNCT__
604 #define __FUNCT__ "MatView_MPIAIJ_Spooles_DIST"
605 int MatView_MPIAIJ_SuperLU_DIST(Mat A,PetscViewer viewer)
606 {
607   int                     ierr;
608   PetscTruth              isascii;
609   PetscViewerFormat       format;
610   Mat_MPIAIJ_SuperLU_DIST *lu=(Mat_MPIAIJ_SuperLU_DIST*)(A->spptr);
611 
612   PetscFunctionBegin;
613   ierr = (*lu->MatView)(A,viewer);CHKERRQ(ierr);
614 
615   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
616   if (isascii) {
617     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
618     if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
619       ierr = MatMPIAIJFactorInfo_SuperLu(A,viewer);CHKERRQ(ierr);
620     }
621   }
622   PetscFunctionReturn(0);
623 }
624 
625 
626 EXTERN_C_BEGIN
627 #undef __FUNCT__
628 #define __FUNCT__ "MatConvert_Base_SuperLU_DIST"
629 int MatConvert_Base_SuperLU_DIST(Mat A,MatType type,Mat *newmat) {
630   /* This routine is only called to convert to MATSUPERLU_DIST */
631   /* from MATSEQAIJ if A has a single process communicator */
632   /* or MATMPIAIJ otherwise, so we will ignore 'MatType type'. */
633   int                     ierr,size;
634   MPI_Comm                comm;
635   Mat                     B=*newmat;
636   Mat_MPIAIJ_SuperLU_DIST *lu;
637 
638   PetscFunctionBegin;
639   if (B != A) {
640     ierr = MatDuplicate(A,MAT_COPY_VALUES,&B);CHKERRQ(ierr);
641   }
642 
643   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
644   ierr = PetscNew(Mat_MPIAIJ_SuperLU_DIST,&lu);CHKERRQ(ierr);
645 
646   lu->MatView              = A->ops->view;
647   lu->MatAssemblyEnd       = A->ops->assemblyend;
648   lu->MatLUFactorSymbolic  = A->ops->lufactorsymbolic;
649   lu->MatDestroy           = A->ops->destroy;
650   lu->CleanUpSuperLUDist   = PETSC_FALSE;
651 
652   B->spptr                 = (void*)lu;
653   B->ops->view             = MatView_MPIAIJ_SuperLU_DIST;
654   B->ops->assemblyend      = MatAssemblyEnd_MPIAIJ_SuperLU_DIST;
655   B->ops->lufactorsymbolic = MatLUFactorSymbolic_MPIAIJ_SuperLU_DIST;
656   B->ops->destroy          = MatDestroy_MPIAIJ_SuperLU_DIST;
657   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);CHKERRQ(ierr);
658   if (size == 1) {
659     ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_seqaij_superlu_dist_C",
660                                              "MatConvert_Base_SuperLU_DIST",MatConvert_Base_SuperLU_DIST);CHKERRQ(ierr);
661     ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_superlu_dist_seqaij_C",
662                                              "MatConvert_SuperLU_DIST_Base",MatConvert_SuperLU_DIST_Base);CHKERRQ(ierr);
663   } else {
664     ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_superlu_dist_C",
665                                              "MatConvert_Base_SuperLU_DIST",MatConvert_Base_SuperLU_DIST);CHKERRQ(ierr);
666     ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_superlu_dist_mpiaij_C",
667                                              "MatConvert_SuperLU_DIST_Base",MatConvert_SuperLU_DIST_Base);CHKERRQ(ierr);
668   }
669   PetscLogInfo(0,"Using SuperLU_DIST for SeqAIJ LU factorization and solves.");
670   ierr = PetscObjectChangeTypeName((PetscObject)B,MATSUPERLU_DIST);CHKERRQ(ierr);
671   *newmat = B;
672   PetscFunctionReturn(0);
673 }
674 EXTERN_C_END
675 
676 EXTERN_C_BEGIN
677 #undef __FUNCT__
678 #define __FUNCT__ "MatCreate_MPIAIJ_SuperLU_DIST"
679 int MatCreate_MPIAIJ_SuperLU_DIST(Mat A) {
680   int                     ierr,size;
681   MPI_Comm                comm;
682 
683   PetscFunctionBegin;
684   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
685   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);CHKERRQ(ierr);
686   if (size == 1) {
687     ierr = MatSetType(A,MATSEQAIJ);CHKERRQ(ierr);
688   } else {
689     ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr);
690   }
691   ierr = MatConvert_Base_SuperLU_DIST(A,MATSUPERLU_DIST,&A);CHKERRQ(ierr);
692   PetscFunctionReturn(0);
693 }
694 EXTERN_C_END
695 
696 EXTERN_C_BEGIN
697 #undef __FUNCT__
698 #define __FUNCT__ "MatLoad_MPIAIJ_SuperLU_DIST"
699 int MatLoad_MPIAIJ_SuperLU_DIST(PetscViewer viewer,MatType type,Mat *A) {
700   int      ierr,size,(*r)(PetscViewer,MatType,Mat*);
701   MPI_Comm comm;
702 
703   PetscFunctionBegin;
704   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
705   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
706   if (size == 1) {
707     ierr = PetscFListFind(comm,MatLoadList,MATSEQAIJ,(void(**)(void))&r);CHKERRQ(ierr);
708   } else {
709     ierr = PetscFListFind(comm,MatLoadList,MATMPIAIJ,(void(**)(void))&r);CHKERRQ(ierr);
710   }
711   ierr = (*r)(viewer,type,A);CHKERRQ(ierr);
712   PetscFunctionReturn(0);
713 }
714 EXTERN_C_END
715