xref: /petsc/src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c (revision e8aa55a48f1e7a2d1b208d8f55db49fa3b2bfe2a)
1 /*$Id: superlu_DIST.c,v 1.10 2001/08/15 15:56:50 bsmith Exp $*/
2 /*
3         Provides an interface to the SuperLU_DIST_2.0 sparse solver
4 */
5 
6 #include "src/mat/impls/aij/seq/aij.h"
7 #include "src/mat/impls/aij/mpi/mpiaij.h"
8 #if defined(PETSC_HAVE_STDLIB_H) /* This is to get arround weird problem with SuperLU on cray */
9 #include "stdlib.h"
10 #endif
11 
12 EXTERN_C_BEGIN
13 #if defined(PETSC_USE_COMPLEX)
14 #include "superlu_zdefs.h"
15 #else
16 #include "superlu_ddefs.h"
17 #endif
18 EXTERN_C_END
19 
20 typedef enum { GLOBAL,DISTRIBUTED
21 } SuperLU_MatInputMode;
22 
23 typedef struct {
24   int_t                   nprow,npcol,*row,*col;
25   gridinfo_t              grid;
26   superlu_options_t       options;
27   SuperMatrix             A_sup;
28   ScalePermstruct_t       ScalePermstruct;
29   LUstruct_t              LUstruct;
30   int                     StatPrint;
31   int                     MatInputMode;
32   SOLVEstruct_t           SOLVEstruct;
33   MatStructure            flg;
34   MPI_Comm                comm_superlu;
35 #if defined(PETSC_USE_COMPLEX)
36   doublecomplex           *val;
37 #else
38   double                  *val;
39 #endif
40 
41   /* A few function pointers for inheritance */
42   int (*MatView)(Mat,PetscViewer);
43   int (*MatAssemblyEnd)(Mat,MatAssemblyType);
44   int (*MatDestroy)(Mat);
45 
46   /* Flag to clean up (non-global) SuperLU objects during Destroy */
47   PetscTruth CleanUpSuperLUDist;
48 } Mat_MPIAIJ_SuperLU_DIST;
49 
50 #undef __FUNCT__
51 #define __FUNCT__ "MatDestroy_MPIAIJ_SuperLU_DIST"
52 int MatDestroy_MPIAIJ_SuperLU_DIST(Mat A)
53 {
54   Mat_MPIAIJ              *a  = (Mat_MPIAIJ*)A->data;
55   Mat_MPIAIJ_SuperLU_DIST *lu = (Mat_MPIAIJ_SuperLU_DIST*)A->spptr;
56   int                     ierr, size=a->size,(*destroy)(Mat);
57 
58   PetscFunctionBegin;
59   if (lu->CleanUpSuperLUDist) {
60     /* Deallocate SuperLU_DIST storage */
61     if (lu->MatInputMode == GLOBAL) {
62       Destroy_CompCol_Matrix_dist(&lu->A_sup);
63     } else {
64       Destroy_CompRowLoc_Matrix_dist(&lu->A_sup);
65       if ( lu->options.SolveInitialized ) {
66 #if defined(PETSC_USE_COMPLEX)
67         zSolveFinalize(&lu->options, &lu->SOLVEstruct);
68 #else
69         dSolveFinalize(&lu->options, &lu->SOLVEstruct);
70 #endif
71       }
72     }
73     Destroy_LU(A->N, &lu->grid, &lu->LUstruct);
74     ScalePermstructFree(&lu->ScalePermstruct);
75     LUstructFree(&lu->LUstruct);
76 
77     /* Release the SuperLU_DIST process grid. */
78     superlu_gridexit(&lu->grid);
79 
80     ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr);
81   }
82   destroy = lu->MatDestroy;
83   ierr = PetscFree(lu);CHKERRQ(ierr);
84   ierr = (*destroy)(A);CHKERRQ(ierr);
85 
86   PetscFunctionReturn(0);
87 }
88 
89 #undef __FUNCT__
90 #define __FUNCT__ "MatView_MPIAIJ_Spooles_DIST"
91 int MatView_MPIAIJ_SuperLU_DIST(Mat A,PetscViewer viewer)
92 {
93   int                     ierr;
94   PetscTruth              isascii;
95   PetscViewerFormat       format;
96   Mat_MPIAIJ_SuperLU_DIST *lu=(Mat_MPIAIJ_SuperLU_DIST*)(A->spptr);
97 
98   PetscFunctionBegin;
99   ierr = (*lu->MatView)(A,viewer);CHKERRQ(ierr);
100 
101   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
102   if (isascii) {
103     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
104     if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
105       ierr = MatMPIAIJFactorInfo_SuperLu(A,viewer);CHKERRQ(ierr);
106     }
107   }
108   PetscFunctionReturn(0);
109 }
110 
111 #undef __FUNCT__
112 #define __FUNCT__ "MatAssemblyEnd_MPIAIJ_SuperLU_DIST"
113 int MatAssemblyEnd_MPIAIJ_SuperLU_DIST(Mat A,MatAssemblyType mode) {
114   int                     ierr;
115   Mat_MPIAIJ_SuperLU_DIST *lu=(Mat_MPIAIJ_SuperLU_DIST*)(A->spptr);
116 
117   PetscFunctionBegin;
118   ierr = (*lu->MatAssemblyEnd)(A,mode);CHKERRQ(ierr);
119   ierr = MatUseSuperLU_DIST_MPIAIJ(A);CHKERRQ(ierr);
120   PetscFunctionReturn(0);
121 }
122 
123 #undef __FUNCT__
124 #define __FUNCT__ "MatSolve_MPIAIJ_SuperLU_DIST"
125 int MatSolve_MPIAIJ_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
126 {
127   Mat_MPIAIJ              *aa = (Mat_MPIAIJ*)A->data;
128   Mat_MPIAIJ_SuperLU_DIST *lu = (Mat_MPIAIJ_SuperLU_DIST*)A->spptr;
129   int                     ierr, size=aa->size;
130   int                     m=A->M, N=A->N;
131   SuperLUStat_t           stat;
132   double                  berr[1];
133   PetscScalar             *bptr;
134   int                     info, nrhs=1;
135   Vec                     x_seq;
136   IS                      iden;
137   VecScatter              scat;
138   PetscLogDouble          time0,time,time_min,time_max;
139 
140   PetscFunctionBegin;
141   if (size > 1) {
142     if (lu->MatInputMode == GLOBAL) { /* global mat input, convert b to x_seq */
143       ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr);
144       ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr);
145       ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr);
146       ierr = ISDestroy(iden);CHKERRQ(ierr);
147 
148       ierr = VecScatterBegin(b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD,scat);CHKERRQ(ierr);
149       ierr = VecScatterEnd(b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD,scat);CHKERRQ(ierr);
150       ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr);
151     } else { /* distributed mat input */
152       ierr = VecCopy(b_mpi,x);CHKERRQ(ierr);
153       ierr = VecGetArray(x,&bptr);CHKERRQ(ierr);
154     }
155   } else { /* size == 1 */
156     ierr = VecCopy(b_mpi,x);CHKERRQ(ierr);
157     ierr = VecGetArray(x,&bptr);CHKERRQ(ierr);
158   }
159 
160   lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only.*/
161 
162   PStatInit(&stat);        /* Initialize the statistics variables. */
163   if (lu->StatPrint) {
164     ierr = MPI_Barrier(A->comm);CHKERRQ(ierr); /* to be removed */
165     ierr = PetscGetTime(&time0);CHKERRQ(ierr);  /* to be removed */
166   }
167   if (lu->MatInputMode == GLOBAL) {
168 #if defined(PETSC_USE_COMPLEX)
169     pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, m, nrhs,
170                    &lu->grid, &lu->LUstruct, berr, &stat, &info);
171 #else
172     pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, m, nrhs,
173                    &lu->grid, &lu->LUstruct, berr, &stat, &info);
174 #endif
175   } else { /* distributed mat input */
176 #if defined(PETSC_USE_COMPLEX)
177     pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, (doublecomplex*)bptr, A->M, nrhs, &lu->grid,
178 	    &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info);
179     if (info) SETERRQ1(1,"pzgssvx fails, info: %d\n",info);
180 #else
181     pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, bptr, A->M, nrhs, &lu->grid,
182 	    &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info);
183     if (info) SETERRQ1(1,"pdgssvx fails, info: %d\n",info);
184 #endif
185   }
186   if (lu->StatPrint) {
187     ierr = PetscGetTime(&time);CHKERRQ(ierr);  /* to be removed */
188      PStatPrint(&lu->options, &stat, &lu->grid);     /* Print the statistics. */
189   }
190   PStatFree(&stat);
191 
192   if (size > 1) {
193     if (lu->MatInputMode == GLOBAL){ /* convert seq x to mpi x */
194       ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr);
195       ierr = VecScatterBegin(x_seq,x,INSERT_VALUES,SCATTER_REVERSE,scat);CHKERRQ(ierr);
196       ierr = VecScatterEnd(x_seq,x,INSERT_VALUES,SCATTER_REVERSE,scat);CHKERRQ(ierr);
197       ierr = VecScatterDestroy(scat);CHKERRQ(ierr);
198       ierr = VecDestroy(x_seq);CHKERRQ(ierr);
199     } else {
200       ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr);
201     }
202   } else {
203     ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr);
204   }
205   if (lu->StatPrint) {
206     time0 = time - time0;
207     ierr = MPI_Reduce(&time0,&time_max,1,MPI_DOUBLE,MPI_MAX,0,A->comm);CHKERRQ(ierr);
208     ierr = MPI_Reduce(&time0,&time_min,1,MPI_DOUBLE,MPI_MIN,0,A->comm);CHKERRQ(ierr);
209     ierr = MPI_Reduce(&time0,&time,1,MPI_DOUBLE,MPI_SUM,0,A->comm);CHKERRQ(ierr);
210     time = time/size; /* average time */
211     ierr = PetscPrintf(A->comm, "  Time for superlu_dist solve (max/min/avg): %g / %g / %g\n\n",time_max,time_min,time);CHKERRQ(ierr);
212   }
213   PetscFunctionReturn(0);
214 }
215 
216 #undef __FUNCT__
217 #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ_SuperLU_DIST"
218 int MatLUFactorNumeric_MPIAIJ_SuperLU_DIST(Mat A,Mat *F)
219 {
220   Mat_MPIAIJ              *fac = (Mat_MPIAIJ*)(*F)->data,*mat;
221   Mat                     *tseq,A_seq = PETSC_NULL;
222   Mat_SeqAIJ              *aa,*bb;
223   Mat_MPIAIJ_SuperLU_DIST *lu = (Mat_MPIAIJ_SuperLU_DIST*)(*F)->spptr;
224   int                     M=A->M,N=A->N,info,ierr,size=fac->size,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
225                           m=A->m, irow,colA_start,j,jcol,jB,countA,countB,*bjj,*ajj;
226   SuperLUStat_t           stat;
227   double                  *berr=0;
228   IS                      isrow;
229   PetscLogDouble          time0[2],time[2],time_min[2],time_max[2];
230 #if defined(PETSC_USE_COMPLEX)
231   doublecomplex           *av, *bv;
232 #else
233   double                  *av, *bv;
234 #endif
235 
236   PetscFunctionBegin;
237   if (lu->StatPrint) {
238     ierr = MPI_Barrier(A->comm);CHKERRQ(ierr);
239     ierr = PetscGetTime(&time0[0]);CHKERRQ(ierr);
240   }
241 
242   if (lu->MatInputMode == GLOBAL) { /* global mat input */
243     if (size > 1) { /* convert mpi A to seq mat A */
244       ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow); CHKERRQ(ierr);
245       ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq); CHKERRQ(ierr);
246       ierr = ISDestroy(isrow);CHKERRQ(ierr);
247 
248       A_seq = *tseq;
249       ierr = PetscFree(tseq);CHKERRQ(ierr);
250       aa =  (Mat_SeqAIJ*)A_seq->data;
251     } else {
252       aa =  (Mat_SeqAIJ*)A->data;
253     }
254 
255     /* Allocate storage, then convert Petsc NR matrix to SuperLU_DIST NC */
256     if (lu->flg == DIFFERENT_NONZERO_PATTERN) {/* first numeric factorization */
257 #if defined(PETSC_USE_COMPLEX)
258       zallocateA_dist(N, aa->nz, &lu->val, &lu->col, &lu->row);
259 #else
260       dallocateA_dist(N, aa->nz, &lu->val, &lu->col, &lu->row);
261 #endif
262     } else { /* successive numeric factorization, sparsity pattern is reused. */
263       Destroy_CompCol_Matrix_dist(&lu->A_sup);
264       Destroy_LU(N, &lu->grid, &lu->LUstruct);
265       lu->options.Fact = SamePattern;
266     }
267 #if defined(PETSC_USE_COMPLEX)
268     zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,aa->j,aa->i,&lu->val,&lu->col, &lu->row);
269 #else
270     dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,aa->j,aa->i,&lu->val, &lu->col, &lu->row);
271 #endif
272 
273     /* Create compressed column matrix A_sup. */
274 #if defined(PETSC_USE_COMPLEX)
275     zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE);
276 #else
277     dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE);
278 #endif
279   } else { /* distributed mat input */
280     mat =  (Mat_MPIAIJ*)A->data;
281     aa=(Mat_SeqAIJ*)(mat->A)->data;
282     bb=(Mat_SeqAIJ*)(mat->B)->data;
283     ai=aa->i; aj=aa->j;
284     bi=bb->i; bj=bb->j;
285 #if defined(PETSC_USE_COMPLEX)
286     av=(doublecomplex*)aa->a;
287     bv=(doublecomplex*)bb->a;
288 #else
289     av=aa->a;
290     bv=bb->a;
291 #endif
292     rstart = mat->rstart;
293     nz     = aa->nz + bb->nz;
294     garray = mat->garray;
295     rstart = mat->rstart;
296 
297     if (lu->flg == DIFFERENT_NONZERO_PATTERN) {/* first numeric factorization */
298 #if defined(PETSC_USE_COMPLEX)
299       zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row);
300 #else
301       dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row);
302 #endif
303     } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
304       /* Destroy_CompRowLoc_Matrix_dist(&lu->A_sup);  */ /* crash! */
305       Destroy_LU(N, &lu->grid, &lu->LUstruct);
306       lu->options.Fact = SamePattern;
307     }
308     nz = 0; jB = 0; irow = mat->rstart;
309     for ( i=0; i<m; i++ ) {
310       lu->row[i] = nz;
311       countA = ai[i+1] - ai[i];
312       countB = bi[i+1] - bi[i];
313       ajj = aj + ai[i];  /* ptr to the beginning of this row */
314       bjj = bj + bi[i];
315 
316       /* B part, smaller col index */
317       colA_start = mat->rstart + ajj[0]; /* the smallest global col index of A */
318       for (j=0; j<countB; j++){
319         jcol = garray[bjj[j]];
320         if (jcol > colA_start) {
321           jB = j;
322           break;
323         }
324         lu->col[nz] = jcol;
325         lu->val[nz++] = *bv++;
326         if (j==countB-1) jB = countB;
327       }
328 
329       /* A part */
330       for (j=0; j<countA; j++){
331         lu->col[nz] = mat->rstart + ajj[j];
332         lu->val[nz++] = *av++;
333       }
334 
335       /* B part, larger col index */
336       for (j=jB; j<countB; j++){
337         lu->col[nz] = garray[bjj[j]];
338         lu->val[nz++] = *bv++;
339       }
340     }
341     lu->row[m] = nz;
342 #if defined(PETSC_USE_COMPLEX)
343     zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,
344 				   lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE);
345 #else
346     dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,
347 				   lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE);
348 #endif
349   }
350   if (lu->StatPrint) {
351     ierr = PetscGetTime(&time[0]);CHKERRQ(ierr);
352     time0[0] = time[0] - time0[0];
353   }
354 
355   /* Factor the matrix. */
356   PStatInit(&stat);   /* Initialize the statistics variables. */
357 
358   if (lu->StatPrint) {
359     ierr = MPI_Barrier(A->comm);CHKERRQ(ierr);
360     ierr = PetscGetTime(&time0[1]);CHKERRQ(ierr);
361   }
362 
363   if (lu->MatInputMode == GLOBAL) { /* global mat input */
364 #if defined(PETSC_USE_COMPLEX)
365     pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,
366                    &lu->grid, &lu->LUstruct, berr, &stat, &info);
367 #else
368     pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,
369                    &lu->grid, &lu->LUstruct, berr, &stat, &info);
370 #endif
371   } else { /* distributed mat input */
372 #if defined(PETSC_USE_COMPLEX)
373     pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0, &lu->grid,
374 	    &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info);
375     if (info) SETERRQ1(1,"pzgssvx fails, info: %d\n",info);
376 #else
377     pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0, &lu->grid,
378 	    &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info);
379     if (info) SETERRQ1(1,"pdgssvx fails, info: %d\n",info);
380 #endif
381   }
382   if (lu->StatPrint) {
383     ierr = PetscGetTime(&time[1]);CHKERRQ(ierr);  /* to be removed */
384     time0[1] = time[1] - time0[1];
385     if (lu->StatPrint) PStatPrint(&lu->options, &stat, &lu->grid);  /* Print the statistics. */
386   }
387   PStatFree(&stat);
388 
389   if (lu->MatInputMode == GLOBAL && size > 1){
390     ierr = MatDestroy(A_seq);CHKERRQ(ierr);
391   }
392 
393   if (lu->StatPrint) {
394     ierr = MPI_Reduce(time0,time_max,2,MPI_DOUBLE,MPI_MAX,0,A->comm);
395     ierr = MPI_Reduce(time0,time_min,2,MPI_DOUBLE,MPI_MIN,0,A->comm);
396     ierr = MPI_Reduce(time0,time,2,MPI_DOUBLE,MPI_SUM,0,A->comm);
397     for (i=0; i<2; i++) time[i] = time[i]/size; /* average time */
398     ierr = PetscPrintf(A->comm, "  Time for mat conversion (max/min/avg):    %g / %g / %g\n",time_max[0],time_min[0],time[0]);
399     ierr = PetscPrintf(A->comm, "  Time for superlu_dist fact (max/min/avg): %g / %g / %g\n\n",time_max[1],time_min[1],time[1]);
400   }
401   (*F)->assembled = PETSC_TRUE;
402   lu->flg         = SAME_NONZERO_PATTERN;
403   PetscFunctionReturn(0);
404 }
405 
406 /* Note the Petsc r and c permutations are ignored */
407 #undef __FUNCT__
408 #define __FUNCT__ "MatLUFactorSymbolic_MPIAIJ_SuperLU_DIST"
409 int MatLUFactorSymbolic_MPIAIJ_SuperLU_DIST(Mat A,IS r,IS c,MatFactorInfo *info,Mat *F)
410 {
411   Mat                     B;
412   Mat_MPIAIJ_SuperLU_DIST *lu;
413   int                     ierr,M=A->M,N=A->N,size;
414   superlu_options_t       options;
415   char                    buff[32];
416   PetscTruth              flg;
417   char                    *ptype[] = {"MMD_AT_PLUS_A","NATURAL","MMD_ATA","COLAMD"};
418   char                    *prtype[] = {"LargeDiag","NATURAL"};
419   PetscFunctionBegin;
420 
421   /* Create the factorization matrix */
422   ierr = MatCreate(A->comm,A->m,A->n,M,N,&B);CHKERRQ(ierr);
423   ierr = MatSetType(B,MATSUPERLUDIST);CHKERRQ(ierr);
424   ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
425   ierr = MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
426 
427   B->ops->solve            = MatSolve_MPIAIJ_SuperLU_DIST;
428   B->factor                = FACTOR_LU;
429 
430   lu = (Mat_MPIAIJ_SuperLU_DIST*)(B->spptr);
431 
432   /* Set the input options */
433   set_default_options(&options);
434   lu->MatInputMode = GLOBAL;
435   ierr = MPI_Comm_dup(A->comm,&(lu->comm_superlu));CHKERRQ(ierr);
436 
437   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
438   lu->nprow = size/2;               /* Default process rows.      */
439   if (lu->nprow == 0) lu->nprow = 1;
440   lu->npcol = size/lu->nprow;           /* Default process columns.   */
441 
442   ierr = PetscOptionsBegin(A->comm,A->prefix,"SuperLU_Dist Options","Mat");CHKERRQ(ierr);
443 
444     ierr = PetscOptionsInt("-mat_aij_superlu_dist_r","Number rows in processor partition","None",lu->nprow,&lu->nprow,PETSC_NULL);CHKERRQ(ierr);
445     ierr = PetscOptionsInt("-mat_aij_superlu_dist_c","Number columns in processor partition","None",lu->npcol,&lu->npcol,PETSC_NULL);CHKERRQ(ierr);
446     if (size != lu->nprow * lu->npcol) SETERRQ(1,"Number of processes should be equal to nprow*npcol");
447 
448     ierr = PetscOptionsInt("-mat_aij_superlu_dist_matinput","Matrix input mode (0: GLOBAL; 1: DISTRIBUTED)","None",lu->MatInputMode,&lu->MatInputMode,PETSC_NULL);CHKERRQ(ierr);
449     if(lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;
450 
451     ierr = PetscOptionsLogical("-mat_aij_superlu_dist_equil","Equilibrate matrix","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr);
452     if (!flg) {
453       options.Equil = NO;
454     }
455 
456     ierr = PetscOptionsEList("-mat_aij_superlu_dist_rowperm","Row permutation","None",prtype,2,prtype[0],buff,32,&flg);CHKERRQ(ierr);
457     while (flg) {
458       ierr = PetscStrcmp(buff,"LargeDiag",&flg);CHKERRQ(ierr);
459       if (flg) {
460         options.RowPerm = LargeDiag;
461         break;
462       }
463       ierr = PetscStrcmp(buff,"NATURAL",&flg);CHKERRQ(ierr);
464       if (flg) {
465         options.RowPerm = NOROWPERM;
466         break;
467       }
468       SETERRQ1(1,"Unknown row permutation %s",buff);
469     }
470 
471     ierr = PetscOptionsEList("-mat_aij_superlu_dist_colperm","Column permutation","None",ptype,4,ptype[0],buff,32,&flg);CHKERRQ(ierr);
472     while (flg) {
473       ierr = PetscStrcmp(buff,"MMD_AT_PLUS_A",&flg);CHKERRQ(ierr);
474       if (flg) {
475         options.ColPerm = MMD_AT_PLUS_A;
476         break;
477       }
478       ierr = PetscStrcmp(buff,"NATURAL",&flg);CHKERRQ(ierr);
479       if (flg) {
480         options.ColPerm = NATURAL;
481         break;
482       }
483       ierr = PetscStrcmp(buff,"MMD_ATA",&flg);CHKERRQ(ierr);
484       if (flg) {
485         options.ColPerm = MMD_ATA;
486         break;
487       }
488       ierr = PetscStrcmp(buff,"COLAMD",&flg);CHKERRQ(ierr);
489       if (flg) {
490         options.ColPerm = COLAMD;
491         break;
492       }
493       SETERRQ1(1,"Unknown column permutation %s",buff);
494     }
495 
496     ierr = PetscOptionsLogical("-mat_aij_superlu_dist_replacetinypivot","Replace tiny pivots","None",PETSC_TRUE,&flg,0);CHKERRQ(ierr);
497     if (!flg) {
498       options.ReplaceTinyPivot = NO;
499     }
500 
501     options.IterRefine = NOREFINE;
502     ierr = PetscOptionsLogical("-mat_aij_superlu_dist_iterrefine","Use iterative refinement","None",PETSC_FALSE,&flg,0);CHKERRQ(ierr);
503     if (flg) {
504       options.IterRefine = DOUBLE;
505     }
506 
507     if (PetscLogPrintInfo) {
508       lu->StatPrint = (int)PETSC_TRUE;
509     } else {
510       lu->StatPrint = (int)PETSC_FALSE;
511     }
512     ierr = PetscOptionsLogical("-mat_aij_superlu_dist_statprint","Print factorization information","None",
513                               (PetscTruth)lu->StatPrint,(PetscTruth*)&lu->StatPrint,0);CHKERRQ(ierr);
514   PetscOptionsEnd();
515 
516   /* Initialize the SuperLU process grid. */
517   superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid);
518 
519   /* Initialize ScalePermstruct and LUstruct. */
520   ScalePermstructInit(M, N, &lu->ScalePermstruct);
521   LUstructInit(M, N, &lu->LUstruct);
522 
523   lu->options            = options;
524   lu->flg                = DIFFERENT_NONZERO_PATTERN;
525   lu->CleanUpSuperLUDist = PETSC_TRUE;
526   *F = B;
527   PetscFunctionReturn(0);
528 }
529 
530 #undef __FUNCT__
531 #define __FUNCT__ "MatUseSuperLU_DIST_MPIAIJ"
532 int MatUseSuperLU_DIST_MPIAIJ(Mat A)
533 {
534   PetscFunctionBegin;
535   A->ops->lufactorsymbolic = MatLUFactorSymbolic_MPIAIJ_SuperLU_DIST;
536   A->ops->lufactornumeric  = MatLUFactorNumeric_MPIAIJ_SuperLU_DIST;
537   PetscFunctionReturn(0);
538 }
539 
540 #undef __FUNCT__
541 #define __FUNCT__ "MatMPIAIJFactorInfo_SuperLu"
542 int MatMPIAIJFactorInfo_SuperLu(Mat A,PetscViewer viewer)
543 {
544   Mat_MPIAIJ_SuperLU_DIST *lu= (Mat_MPIAIJ_SuperLU_DIST*)A->spptr;
545   superlu_options_t       options;
546   int                     ierr;
547   char                    *colperm;
548 
549   PetscFunctionBegin;
550   /* check if matrix is superlu_dist type */
551   if (A->ops->solve != MatSolve_MPIAIJ_SuperLU_DIST) PetscFunctionReturn(0);
552 
553   options = lu->options;
554   ierr = PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");CHKERRQ(ierr);
555   ierr = PetscViewerASCIIPrintf(viewer,"  Equilibrate matrix %s \n",(options.Equil != NO) ? "true": "false");CHKERRQ(ierr);
556   ierr = PetscViewerASCIIPrintf(viewer,"  Replace tiny pivots %s \n",(options.ReplaceTinyPivot != NO) ? "true": "false");CHKERRQ(ierr);
557   ierr = PetscViewerASCIIPrintf(viewer,"  Use iterative refinement %s \n",(options.IterRefine == DOUBLE) ? "true": "false");CHKERRQ(ierr);
558   ierr = PetscViewerASCIIPrintf(viewer,"  Processors in row %d col partition %d \n",lu->nprow,lu->npcol);CHKERRQ(ierr);
559   ierr = PetscViewerASCIIPrintf(viewer,"  Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL": "LargeDiag");CHKERRQ(ierr);
560   if (options.ColPerm == NATURAL) {
561     colperm = "NATURAL";
562   } else if (options.ColPerm == MMD_AT_PLUS_A) {
563     colperm = "MMD_AT_PLUS_A";
564   } else if (options.ColPerm == MMD_ATA) {
565     colperm = "MMD_ATA";
566   } else if (options.ColPerm == COLAMD) {
567     colperm = "COLAMD";
568   } else {
569     SETERRQ(1,"Unknown column permutation");
570   }
571   ierr = PetscViewerASCIIPrintf(viewer,"  Column permutation %s \n",colperm);CHKERRQ(ierr);
572   PetscFunctionReturn(0);
573 }
574 
575 EXTERN_C_BEGIN
576 #undef __FUNCT__
577 #define __FUNCT__ "MatCreate_MPIAIJ_SuperLU_DIST"
578 int MatCreate_MPIAIJ_SuperLU_DIST(Mat A) {
579   int                     ierr,size;
580   MPI_Comm                comm;
581   Mat_MPIAIJ_SuperLU_DIST *lu;
582 
583   PetscFunctionBegin;
584   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
585   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);CHKERRQ(ierr);
586   if (size == 1) {
587     ierr = MatSetType(A,MATSEQAIJ);CHKERRQ(ierr);
588   } else {
589     ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr);
590   }
591   ierr = MatUseSuperLU_DIST_MPIAIJ(A);CHKERRQ(ierr);
592 
593   ierr                   = PetscNew(Mat_MPIAIJ_SuperLU_DIST,&lu);CHKERRQ(ierr);
594   lu->MatView            = A->ops->view;
595   lu->MatAssemblyEnd     = A->ops->assemblyend;
596   lu->MatDestroy         = A->ops->destroy;
597   lu->CleanUpSuperLUDist = PETSC_FALSE;
598   A->spptr               = (void*)lu;
599   A->ops->view           = MatView_MPIAIJ_SuperLU_DIST;
600   A->ops->assemblyend    = MatAssemblyEnd_MPIAIJ_SuperLU_DIST;
601   A->ops->destroy        = MatDestroy_MPIAIJ_SuperLU_DIST;
602   PetscFunctionReturn(0);
603 }
604 EXTERN_C_END
605 
606 EXTERN_C_BEGIN
607 #undef __FUNCT__
608 #define __FUNCT__ "MatLoad_MPIAIJ_SuperLU_DIST"
609 int MatLoad_MPIAIJ_SuperLU_DIST(PetscViewer viewer,MatType type,Mat *A) {
610   int      ierr,size,(*r)(PetscViewer,MatType,Mat*);
611   MPI_Comm comm;
612 
613   PetscFunctionBegin;
614   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
615   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
616   if (size == 1) {
617     ierr = PetscFListFind(comm,MatLoadList,MATSEQAIJ,(void(**)(void))&r);CHKERRQ(ierr);
618   } else {
619     ierr = PetscFListFind(comm,MatLoadList,MATMPIAIJ,(void(**)(void))&r);CHKERRQ(ierr);
620   }
621   ierr = (*r)(viewer,type,A);CHKERRQ(ierr);
622   PetscFunctionReturn(0);
623 }
624 EXTERN_C_END
625