xref: /petsc/src/mat/impls/aij/mpi/mumps/mumps.c (revision 668f157ea6d169bb50bcdb9ebcdd418abd089fa7)
1 #define PETSCMAT_DLL
2 
3 /*
4     Provides an interface to the MUMPS sparse solver
5 */
6 #include "../src/mat/impls/aij/seq/aij.h"  /*I  "petscmat.h"  I*/
7 #include "../src/mat/impls/aij/mpi/mpiaij.h"
8 #include "../src/mat/impls/sbaij/seq/sbaij.h"
9 #include "../src/mat/impls/sbaij/mpi/mpisbaij.h"
10 #include "../src/mat/impls/baij/seq/baij.h"
11 #include "../src/mat/impls/baij/mpi/mpibaij.h"
12 
13 EXTERN_C_BEGIN
14 #if defined(PETSC_USE_COMPLEX)
15 #include "zmumps_c.h"
16 #else
17 #include "dmumps_c.h"
18 #endif
19 EXTERN_C_END
20 #define JOB_INIT -1
21 #define JOB_END -2
22 /* macros s.t. indices match MUMPS documentation */
23 #define ICNTL(I) icntl[(I)-1]
24 #define CNTL(I) cntl[(I)-1]
25 #define INFOG(I) infog[(I)-1]
26 #define INFO(I) info[(I)-1]
27 #define RINFOG(I) rinfog[(I)-1]
28 #define RINFO(I) rinfo[(I)-1]
29 
30 typedef struct {
31 #if defined(PETSC_USE_COMPLEX)
32   ZMUMPS_STRUC_C id;
33 #else
34   DMUMPS_STRUC_C id;
35 #endif
36   MatStructure   matstruc;
37   PetscMPIInt    myid,size;
38   PetscInt       *irn,*jcn,sym,nSolve;
39   PetscScalar    *val;
40   MPI_Comm       comm_mumps;
41   VecScatter     scat_rhs, scat_sol;
42   PetscTruth     isAIJ,CleanUpMUMPS,mumpsview;
43   Vec            b_seq,x_seq;
44   PetscErrorCode (*MatDestroy)(Mat);
45 } Mat_MUMPS;
46 
47 EXTERN PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*);
48 
49 /* convert Petsc mpiaij matrix to triples: row[nz], col[nz], val[nz] */
50 /*
51   input:
52     A       - matrix in mpiaij or mpisbaij (bs=1) format
53     shift   - 0: C style output triple; 1: Fortran style output triple.
54     valOnly - FALSE: spaces are allocated and values are set for the triple
55               TRUE:  only the values in v array are updated
56   output:
57     nnz     - dim of r, c, and v (number of local nonzero entries of A)
58     r, c, v - row and col index, matrix values (matrix triples)
59  */
60 PetscErrorCode MatConvertToTriples(Mat A,int shift,PetscTruth valOnly,int *nnz,int **r, int **c, PetscScalar **v)
61 {
62   PetscInt       *ai, *aj, *bi, *bj, rstart,nz, *garray;
63   PetscErrorCode ierr;
64   PetscInt       i,j,jj,jB,irow,m=A->rmap->n,*ajj,*bjj,countA,countB,colA_start,jcol;
65   PetscInt       *row,*col;
66   PetscScalar    *av, *bv,*val;
67   PetscTruth     isAIJ;
68 
69   PetscFunctionBegin;
70   ierr = PetscTypeCompare((PetscObject)A,MATMPIAIJ,&isAIJ);CHKERRQ(ierr);
71   if (isAIJ){
72     Mat_MPIAIJ    *mat =  (Mat_MPIAIJ*)A->data;
73     Mat_SeqAIJ    *aa=(Mat_SeqAIJ*)(mat->A)->data;
74     Mat_SeqAIJ    *bb=(Mat_SeqAIJ*)(mat->B)->data;
75     nz = aa->nz + bb->nz;
76     ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
77     garray = mat->garray;
78     av=aa->a; bv=bb->a;
79 
80   } else {
81     Mat_MPISBAIJ  *mat =  (Mat_MPISBAIJ*)A->data;
82     Mat_SeqSBAIJ  *aa=(Mat_SeqSBAIJ*)(mat->A)->data;
83     Mat_SeqBAIJ    *bb=(Mat_SeqBAIJ*)(mat->B)->data;
84     if (A->rmap->bs > 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP," bs=%d is not supported yet\n", A->rmap->bs);
85     nz = aa->nz + bb->nz;
86     ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
87     garray = mat->garray;
88     av=aa->a; bv=bb->a;
89   }
90 
91   if (!valOnly){
92     ierr = PetscMalloc(nz*sizeof(PetscInt) ,&row);CHKERRQ(ierr);
93     ierr = PetscMalloc(nz*sizeof(PetscInt),&col);CHKERRQ(ierr);
94     ierr = PetscMalloc(nz*sizeof(PetscScalar),&val);CHKERRQ(ierr);
95     *r = row; *c = col; *v = val;
96   } else {
97     row = *r; col = *c; val = *v;
98   }
99   *nnz = nz;
100 
101   jj = 0; irow = rstart;
102   for ( i=0; i<m; i++ ) {
103     ajj = aj + ai[i];                 /* ptr to the beginning of this row */
104     countA = ai[i+1] - ai[i];
105     countB = bi[i+1] - bi[i];
106     bjj = bj + bi[i];
107 
108     /* get jB, the starting local col index for the 2nd B-part */
109     colA_start = rstart + ajj[0]; /* the smallest col index for A */
110     j=-1;
111     do {
112       j++;
113       if (j == countB) break;
114       jcol = garray[bjj[j]];
115     } while (jcol < colA_start);
116     jB = j;
117 
118     /* B-part, smaller col index */
119     colA_start = rstart + ajj[0]; /* the smallest col index for A */
120     for (j=0; j<jB; j++){
121       jcol = garray[bjj[j]];
122       if (!valOnly){
123         row[jj] = irow + shift; col[jj] = jcol + shift;
124 
125       }
126       val[jj++] = *bv++;
127     }
128     /* A-part */
129     for (j=0; j<countA; j++){
130       if (!valOnly){
131         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
132       }
133       val[jj++] = *av++;
134     }
135     /* B-part, larger col index */
136     for (j=jB; j<countB; j++){
137       if (!valOnly){
138         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
139       }
140       val[jj++] = *bv++;
141     }
142     irow++;
143   }
144   PetscFunctionReturn(0);
145 }
146 
147 #undef __FUNCT__
148 #define __FUNCT__ "MatDestroy_MUMPS"
149 PetscErrorCode MatDestroy_MUMPS(Mat A)
150 {
151   Mat_MUMPS      *lu=(Mat_MUMPS*)A->spptr;
152   PetscErrorCode ierr;
153   PetscMPIInt    size=lu->size;
154 
155   PetscFunctionBegin;
156   if (lu->CleanUpMUMPS) {
157     /* Terminate instance, deallocate memories */
158     if (size > 1){
159       ierr = PetscFree2(lu->id.sol_loc,lu->id.isol_loc);CHKERRQ(ierr);
160       ierr = VecScatterDestroy(lu->scat_rhs);CHKERRQ(ierr);
161       ierr = VecDestroy(lu->b_seq);CHKERRQ(ierr);
162       if (lu->nSolve && lu->scat_sol){ierr = VecScatterDestroy(lu->scat_sol);CHKERRQ(ierr);}
163       if (lu->nSolve && lu->x_seq){ierr = VecDestroy(lu->x_seq);CHKERRQ(ierr);}
164       ierr = PetscFree(lu->val);CHKERRQ(ierr);
165     }
166     if( size == 1 && (A)->factortype == MAT_FACTOR_CHOLESKY && lu->isAIJ) {
167       ierr = PetscFree(lu->val);CHKERRQ(ierr);
168     }
169     lu->id.job=JOB_END;
170 #if defined(PETSC_USE_COMPLEX)
171     zmumps_c(&lu->id);
172 #else
173     dmumps_c(&lu->id);
174 #endif
175     ierr = PetscFree(lu->irn);CHKERRQ(ierr);
176     ierr = PetscFree(lu->jcn);CHKERRQ(ierr);
177     ierr = MPI_Comm_free(&(lu->comm_mumps));CHKERRQ(ierr);
178   }
179   /* clear composed functions */
180   ierr = PetscObjectComposeFunctionDynamic((PetscObject)A,"MatFactorGetSolverPackage_C","",PETSC_NULL);CHKERRQ(ierr);
181   ierr = PetscObjectComposeFunctionDynamic((PetscObject)A,"MatMumpsSetIcntl_C","",PETSC_NULL);CHKERRQ(ierr);
182   ierr = (lu->MatDestroy)(A);CHKERRQ(ierr);
183   PetscFunctionReturn(0);
184 }
185 
186 #undef __FUNCT__
187 #define __FUNCT__ "MatSolve_MUMPS"
188 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x)
189 {
190   Mat_MUMPS      *lu=(Mat_MUMPS*)A->spptr;
191   PetscScalar    *array;
192   Vec            x_seq;
193   IS             is_iden,is_petsc;
194   PetscErrorCode ierr;
195   PetscInt       i;
196 
197   PetscFunctionBegin;
198   lu->id.nrhs = 1;
199   x_seq = lu->b_seq;
200   if (lu->size > 1){
201     /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */
202     ierr = VecScatterBegin(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
203     ierr = VecScatterEnd(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
204     if (!lu->myid) {ierr = VecGetArray(x_seq,&array);CHKERRQ(ierr);}
205   } else {  /* size == 1 */
206     ierr = VecCopy(b,x);CHKERRQ(ierr);
207     ierr = VecGetArray(x,&array);CHKERRQ(ierr);
208   }
209   if (!lu->myid) { /* define rhs on the host */
210     lu->id.nrhs = 1;
211 #if defined(PETSC_USE_COMPLEX)
212     lu->id.rhs = (mumps_double_complex*)array;
213 #else
214     lu->id.rhs = array;
215 #endif
216   }
217   if (lu->size == 1){
218     ierr = VecRestoreArray(x,&array);CHKERRQ(ierr);
219   } else if (!lu->myid){
220     ierr = VecRestoreArray(x_seq,&array);CHKERRQ(ierr);
221   }
222 
223   if (lu->size > 1){
224     /* distributed solution */
225     lu->id.ICNTL(21) = 1;
226     if (!lu->nSolve){
227       /* Create x_seq=sol_loc for repeated use */
228       PetscInt    lsol_loc;
229       PetscScalar *sol_loc;
230       lsol_loc = lu->id.INFO(23); /* length of sol_loc */
231       ierr = PetscMalloc2(lsol_loc,PetscScalar,&sol_loc,lsol_loc,PetscInt,&lu->id.isol_loc);CHKERRQ(ierr);
232       lu->id.lsol_loc = lsol_loc;
233 #if defined(PETSC_USE_COMPLEX)
234       lu->id.sol_loc  = (mumps_double_complex*)sol_loc;
235 #else
236       lu->id.sol_loc  = sol_loc;
237 #endif
238       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,lsol_loc,sol_loc,&lu->x_seq);CHKERRQ(ierr);
239     }
240   }
241 
242   /* solve phase */
243   /*-------------*/
244   lu->id.job = 3;
245 #if defined(PETSC_USE_COMPLEX)
246   zmumps_c(&lu->id);
247 #else
248   dmumps_c(&lu->id);
249 #endif
250   if (lu->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",lu->id.INFOG(1));
251 
252   if (lu->size > 1) { /* convert mumps distributed solution to petsc mpi x */
253     if (!lu->nSolve){ /* create scatter scat_sol */
254       ierr = ISCreateStride(PETSC_COMM_SELF,lu->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */
255       for (i=0; i<lu->id.lsol_loc; i++){
256         lu->id.isol_loc[i] -= 1; /* change Fortran style to C style */
257       }
258       ierr = ISCreateGeneral(PETSC_COMM_SELF,lu->id.lsol_loc,lu->id.isol_loc,&is_petsc);CHKERRQ(ierr);  /* to */
259       ierr = VecScatterCreate(lu->x_seq,is_iden,x,is_petsc,&lu->scat_sol);CHKERRQ(ierr);
260       ierr = ISDestroy(is_iden);CHKERRQ(ierr);
261       ierr = ISDestroy(is_petsc);CHKERRQ(ierr);
262     }
263     ierr = VecScatterBegin(lu->scat_sol,lu->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
264     ierr = VecScatterEnd(lu->scat_sol,lu->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
265   }
266   lu->nSolve++;
267   PetscFunctionReturn(0);
268 }
269 
270 #if !defined(PETSC_USE_COMPLEX)
271 /*
272   input:
273    F:        numeric factor
274   output:
275    nneg:     total number of negative pivots
276    nzero:    0
277    npos:     (global dimension of F) - nneg
278 */
279 
280 #undef __FUNCT__
281 #define __FUNCT__ "MatGetInertia_SBAIJMUMPS"
282 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos)
283 {
284   Mat_MUMPS      *lu =(Mat_MUMPS*)F->spptr;
285   PetscErrorCode ierr;
286   PetscMPIInt    size;
287 
288   PetscFunctionBegin;
289   ierr = MPI_Comm_size(((PetscObject)F)->comm,&size);CHKERRQ(ierr);
290   /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */
291   if (size > 1 && lu->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",lu->id.INFOG(13));
292   if (nneg){
293     if (!lu->myid){
294       *nneg = lu->id.INFOG(12);
295     }
296     ierr = MPI_Bcast(nneg,1,MPI_INT,0,lu->comm_mumps);CHKERRQ(ierr);
297   }
298   if (nzero) *nzero = 0;
299   if (npos)  *npos  = F->rmap->N - (*nneg);
300   PetscFunctionReturn(0);
301 }
302 #endif /* !defined(PETSC_USE_COMPLEX) */
303 
304 #undef __FUNCT__
305 #define __FUNCT__ "MatFactorNumeric_MUMPS"
306 PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info)
307 {
308   Mat_MUMPS      *lu =(Mat_MUMPS*)(F)->spptr;
309   Mat            newMat;
310   PetscErrorCode ierr;
311   PetscInt       rnz,nnz,nz=0,i,M=A->rmap->N,*ai,*aj,*adiag,jidx;
312   PetscScalar   *av;
313   PetscTruth     valOnly;
314   Mat            F_diag;
315   IS             is_iden;
316   Vec            b;
317   PetscTruth     isSeqAIJ,isSeqSBAIJ,isMPIAIJ;
318 
319   PetscFunctionBegin;
320   ierr = PetscTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr);
321   ierr = PetscTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr);
322   ierr = PetscTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr);
323 
324   /* define matrix A */
325   switch (lu->id.ICNTL(18)){
326   case 0:  /* centralized assembled matrix input (size=1) */
327     if (!lu->myid) {
328       if (isSeqAIJ){
329 	Mat_SeqAIJ   *aa = (Mat_SeqAIJ*)A->data;
330 	nz               = aa->nz;
331 	ai = aa->i; aj = aa->j; adiag = aa->diag; av = aa->a;
332       } else if (isSeqSBAIJ) {
333         Mat_SeqSBAIJ *aa = (Mat_SeqSBAIJ*)A->data;
334         nz                  =  aa->nz;
335         ai = aa->i; aj = aa->j; av  = aa->a;
336       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No mumps factorization for this matrix type");
337 
338       if ((F)->factortype == MAT_FACTOR_CHOLESKY && isSeqAIJ) {
339 	nz = 0;
340 	for (i=0; i<M; i++){
341 	  rnz = ai[i+1] - adiag[i];
342 	  jidx = adiag[i];
343 	  while (rnz--) {  /* Fortran row/col index! */
344 	    lu->val[nz] = av[jidx]; jidx++; nz++;
345 	  }
346 	}
347       } else {
348 	lu->val = av;
349       }
350     }
351     break;
352   case 3:  /* distributed assembled matrix input (size>1) */
353     valOnly = PETSC_TRUE; /* only update mat values, not row and col index */
354 
355     if(((F)->factortype == MAT_FACTOR_CHOLESKY) && isMPIAIJ) {
356       /* Create an SBAIJ matrix and use this matrix to set the lu values */
357       ierr = MatConvert(A,MATMPISBAIJ,MAT_INITIAL_MATRIX,&newMat);CHKERRQ(ierr);
358       ierr = MatConvertToTriples(newMat,1,valOnly,&nnz,&lu->irn , &lu->jcn, &lu->val);CHKERRQ(ierr);
359       ierr = MatDestroy(newMat);CHKERRQ(ierr);
360     }
361     else {
362       ierr = MatConvertToTriples(A,1,valOnly, &nnz, &lu->irn, &lu->jcn, &lu->val);CHKERRQ(ierr);
363     }
364     break;
365   default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix input format is not supported by MUMPS.");
366   }
367 
368   /* analysis phase */
369   /*----------------*/
370   if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
371     lu->id.job = 1;
372 
373     lu->id.n = M;
374     switch (lu->id.ICNTL(18)){
375     case 0:  /* centralized assembled matrix input */
376       if (!lu->myid) {
377         lu->id.nz =nz; lu->id.irn=lu->irn; lu->id.jcn=lu->jcn;
378         if (lu->id.ICNTL(6)>1){
379 #if defined(PETSC_USE_COMPLEX)
380           lu->id.a = (mumps_double_complex*)lu->val;
381 #else
382           lu->id.a = lu->val;
383 #endif
384         }
385       }
386       break;
387     case 3:  /* distributed assembled matrix input (size>1) */
388       lu->id.nz_loc = nnz;
389       lu->id.irn_loc=lu->irn; lu->id.jcn_loc=lu->jcn;
390       if (lu->id.ICNTL(6)>1) {
391 #if defined(PETSC_USE_COMPLEX)
392         lu->id.a_loc = (mumps_double_complex*)lu->val;
393 #else
394         lu->id.a_loc = lu->val;
395 #endif
396       }
397       /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
398       if (!lu->myid){
399         ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);CHKERRQ(ierr);
400         ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
401       } else {
402         ierr = VecCreateSeq(PETSC_COMM_SELF,0,&lu->b_seq);CHKERRQ(ierr);
403         ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
404       }
405       ierr = VecCreate(((PetscObject)A)->comm,&b);CHKERRQ(ierr);
406       ierr = VecSetSizes(b,A->rmap->n,PETSC_DECIDE);CHKERRQ(ierr);
407       ierr = VecSetFromOptions(b);CHKERRQ(ierr);
408 
409       ierr = VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);CHKERRQ(ierr);
410       ierr = ISDestroy(is_iden);CHKERRQ(ierr);
411       ierr = VecDestroy(b);CHKERRQ(ierr);
412       break;
413     }
414 #if defined(PETSC_USE_COMPLEX)
415     zmumps_c(&lu->id);
416 #else
417     dmumps_c(&lu->id);
418 #endif
419     if (lu->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",lu->id.INFOG(1));
420   }
421 
422   /* numerical factorization phase */
423   /*-------------------------------*/
424   lu->id.job = 2;
425   if(!lu->id.ICNTL(18)) {
426     if (!lu->myid) {
427 #if defined(PETSC_USE_COMPLEX)
428       lu->id.a = (mumps_double_complex*)lu->val;
429 #else
430       lu->id.a = lu->val;
431 #endif
432     }
433   } else {
434 #if defined(PETSC_USE_COMPLEX)
435     lu->id.a_loc = (mumps_double_complex*)lu->val;
436 #else
437     lu->id.a_loc = lu->val;
438 #endif
439   }
440 #if defined(PETSC_USE_COMPLEX)
441   zmumps_c(&lu->id);
442 #else
443   dmumps_c(&lu->id);
444 #endif
445   if (lu->id.INFOG(1) < 0) {
446     if (lu->id.INFO(1) == -13) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d megabytes\n",lu->id.INFO(2));
447     else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFO(1)=%d, INFO(2)=%d\n",lu->id.INFO(1),lu->id.INFO(2));
448   }
449   if (!lu->myid && lu->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"  lu->id.ICNTL(16):=%d\n",lu->id.INFOG(16));
450 
451   if (lu->size > 1){
452     if(isMPIAIJ) {
453       F_diag = ((Mat_MPIAIJ *)(F)->data)->A;
454     } else {
455       F_diag = ((Mat_MPISBAIJ *)(F)->data)->A;
456     }
457     F_diag->assembled = PETSC_TRUE;
458     if (lu->nSolve){
459       ierr = VecScatterDestroy(lu->scat_sol);CHKERRQ(ierr);
460       ierr = PetscFree2(lu->id.sol_loc,lu->id.isol_loc);CHKERRQ(ierr);
461       ierr = VecDestroy(lu->x_seq);CHKERRQ(ierr);
462     }
463   }
464   (F)->assembled   = PETSC_TRUE;
465   lu->matstruc     = SAME_NONZERO_PATTERN;
466   lu->CleanUpMUMPS = PETSC_TRUE;
467   lu->nSolve       = 0;
468   PetscFunctionReturn(0);
469 }
470 
471 #undef __FUNCT__
472 #define __FUNCT__ "PetscSetMUMPSOptions"
473 PetscErrorCode PetscSetMUMPSOptions(Mat F, Mat A)
474 {
475   Mat_MUMPS        *lu = (Mat_MUMPS*)F->spptr;
476   PetscErrorCode   ierr;
477   PetscInt         icntl;
478   PetscTruth       flg;
479 
480   PetscFunctionBegin;
481   ierr = PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr);
482   ierr = PetscOptionsTruth("-mat_mumps_view","View MUMPS parameters","None",lu->mumpsview,&lu->mumpsview,PETSC_NULL);CHKERRQ(ierr);
483   if (lu->size == 1){
484     lu->id.ICNTL(18) = 0;   /* centralized assembled matrix input */
485   } else {
486     lu->id.ICNTL(18) = 3;   /* distributed assembled matrix input */
487   }
488 
489   icntl=-1;
490   lu->id.ICNTL(4) = 0;  /* level of printing; overwrite mumps default ICNTL(4)=2 */
491   ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",lu->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr);
492   if ((flg && icntl > 0) || PetscLogPrintInfo) {
493     lu->id.ICNTL(4)=icntl; /* and use mumps default icntl(i), i=1,2,3 */
494   } else { /* no output */
495     lu->id.ICNTL(1) = 0;  /* error message, default= 6 */
496     lu->id.ICNTL(2) = 0;  /* output stream for diagnostic printing, statistics, and warning. default=0 */
497     lu->id.ICNTL(3) = 0; /* output stream for global information, default=6 */
498   }
499   ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): column permutation and/or scaling to get a zero-free diagonal (0 to 7)","None",lu->id.ICNTL(6),&lu->id.ICNTL(6),PETSC_NULL);CHKERRQ(ierr);
500   icntl=-1;
501   ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): matrix ordering (0 to 7)","None",lu->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr);
502   if (flg) {
503     if (icntl== 1){
504       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n");
505     } else {
506       lu->id.ICNTL(7) = icntl;
507     }
508   }
509   ierr = PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 7 or 77)","None",lu->id.ICNTL(8),&lu->id.ICNTL(8),PETSC_NULL);CHKERRQ(ierr);
510   ierr = PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): A or A^T x=b to be solved. 1: A; otherwise: A^T","None",lu->id.ICNTL(9),&lu->id.ICNTL(9),PETSC_NULL);CHKERRQ(ierr);
511   ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",lu->id.ICNTL(10),&lu->id.ICNTL(10),PETSC_NULL);CHKERRQ(ierr);
512   ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to the linear system solved (via -ksp_view)","None",lu->id.ICNTL(11),&lu->id.ICNTL(11),PETSC_NULL);CHKERRQ(ierr);
513   ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): efficiency control: defines the ordering strategy with scaling constraints (0 to 3","None",lu->id.ICNTL(12),&lu->id.ICNTL(12),PETSC_NULL);CHKERRQ(ierr);
514   ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): efficiency control: with or without ScaLAPACK","None",lu->id.ICNTL(13),&lu->id.ICNTL(13),PETSC_NULL);CHKERRQ(ierr);
515   ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage of estimated workspace increase","None",lu->id.ICNTL(14),&lu->id.ICNTL(14),PETSC_NULL);CHKERRQ(ierr);
516   ierr = PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): Schur complement","None",lu->id.ICNTL(19),&lu->id.ICNTL(19),PETSC_NULL);CHKERRQ(ierr);
517 
518   ierr = PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core facility (0 or 1)","None",lu->id.ICNTL(22),&lu->id.ICNTL(22),PETSC_NULL);CHKERRQ(ierr);
519   ierr = PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",lu->id.ICNTL(23),&lu->id.ICNTL(23),PETSC_NULL);CHKERRQ(ierr);
520   ierr = PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",lu->id.ICNTL(24),&lu->id.ICNTL(24),PETSC_NULL);CHKERRQ(ierr);
521   ierr = PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): computation of a null space basis","None",lu->id.ICNTL(25),&lu->id.ICNTL(25),PETSC_NULL);CHKERRQ(ierr);
522   ierr = PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): Schur options for right-hand side or solution vector","None",lu->id.ICNTL(26),&lu->id.ICNTL(26),PETSC_NULL);CHKERRQ(ierr);
523   ierr = PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): experimental parameter","None",lu->id.ICNTL(27),&lu->id.ICNTL(27),PETSC_NULL);CHKERRQ(ierr);
524 
525   ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",lu->id.CNTL(1),&lu->id.CNTL(1),PETSC_NULL);CHKERRQ(ierr);
526   ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",lu->id.CNTL(2),&lu->id.CNTL(2),PETSC_NULL);CHKERRQ(ierr);
527   ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",lu->id.CNTL(3),&lu->id.CNTL(3),PETSC_NULL);CHKERRQ(ierr);
528   ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",lu->id.CNTL(4),&lu->id.CNTL(4),PETSC_NULL);CHKERRQ(ierr);
529   ierr = PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",lu->id.CNTL(5),&lu->id.CNTL(5),PETSC_NULL);CHKERRQ(ierr);
530   PetscOptionsEnd();
531   PetscFunctionReturn(0);
532 }
533 
534 #undef __FUNCT__
535 #define __FUNCT__ "PetscInitializeMUMPS"
536 PetscErrorCode PetscInitializeMUMPS(Mat F)
537 {
538   Mat_MUMPS       *lu = (Mat_MUMPS*)F->spptr;
539   PetscErrorCode  ierr;
540   PetscInt        icntl;
541   PetscTruth      flg;
542 
543   PetscFunctionBegin;
544   lu->id.job = JOB_INIT;
545   lu->id.par=1;  /* host participates factorizaton and solve */
546   lu->id.sym=lu->sym;
547   if (lu->sym == 2){
548     ierr = PetscOptionsInt("-mat_mumps_sym","SYM: (1,2)","None",lu->id.sym,&icntl,&flg);CHKERRQ(ierr);
549     if (flg && icntl == 1) lu->id.sym=icntl;  /* matrix is spd */
550   }
551 #if defined(PETSC_USE_COMPLEX)
552   zmumps_c(&lu->id);
553 #else
554   dmumps_c(&lu->id);
555 #endif
556   PetscFunctionReturn(0);
557 }
558 
559 /* Note the Petsc r and c permutations are ignored */
560 #undef __FUNCT__
561 #define __FUNCT__ "MatLUFactorSymbolic_AIJMUMPS"
562 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
563 {
564   Mat_MUMPS          *lu = (Mat_MUMPS*)F->spptr;
565   PetscErrorCode     ierr;
566   PetscTruth         isSeqAIJ,isMPIAIJ;
567   PetscInt           nz=0,M=A->rmap->N,rnz,i,nnz;
568   const PetscInt     *ai,*aj;
569   PetscTruth         valOnly;
570 
571   PetscFunctionBegin;
572   lu->sym                  = 0;
573   lu->matstruc             = DIFFERENT_NONZERO_PATTERN;
574 
575   ierr = MPI_Comm_rank(((PetscObject)A)->comm, &lu->myid);
576   ierr = MPI_Comm_size(((PetscObject)A)->comm,&lu->size);CHKERRQ(ierr);
577   ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_mumps));CHKERRQ(ierr);
578   lu->id.comm_fortran = MPI_Comm_c2f(lu->comm_mumps);
579 
580   /* Initialize a MUMPS instance */
581   ierr = PetscInitializeMUMPS(F);CHKERRQ(ierr);
582   /* Set MUMPS options */
583   ierr = PetscSetMUMPSOptions(F,A);CHKERRQ(ierr);
584 
585   ierr = PetscTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr);
586   ierr = PetscTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr);
587   switch (lu->id.ICNTL(18)){
588   case 0:  /* centralized assembled matrix input (size=1) */
589     if (!lu->myid) {
590       Mat_SeqAIJ   *aa = (Mat_SeqAIJ*)A->data;
591       nz               = aa->nz;
592       ai = aa->i; aj = aa->j; lu->val = aa->a;
593       ierr = PetscMalloc(nz*sizeof(PetscInt),&lu->irn);CHKERRQ(ierr);
594       ierr = PetscMalloc(nz*sizeof(PetscInt),&lu->jcn);CHKERRQ(ierr);
595       nz = 0;
596       for (i=0; i<M; i++){
597 	rnz = ai[i+1] - ai[i];
598 	while (rnz--) {  /* Fortran row/col index! */
599 	  lu->irn[nz] = i+1; lu->jcn[nz] = (*aj)+1; aj++; nz++;
600 	}
601       }
602     }
603     break;
604   case 3:  /* distributed assembled matrix input (size>1) */
605     valOnly = PETSC_FALSE;
606     ierr = MatConvertToTriples(A,1,valOnly, &nnz, &lu->irn, &lu->jcn, &lu->val);CHKERRQ(ierr);
607     break;
608   default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix input format is not supported by MUMPS.");
609   }
610 
611   F->ops->lufactornumeric  = MatFactorNumeric_MUMPS;
612   F->ops->solve            = MatSolve_MUMPS;
613   PetscFunctionReturn(0);
614 }
615 
616 /* Note the Petsc r and c permutations are ignored */
617 #undef __FUNCT__
618 #define __FUNCT__ "MatLUFactorSymbolic_BAIJMUMPS"
619 PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
620 {
621 
622   Mat_MUMPS       *lu = (Mat_MUMPS*)F->spptr;
623   PetscErrorCode  ierr;
624 
625   PetscFunctionBegin;
626   lu->sym                  = 0;
627   lu->matstruc             = DIFFERENT_NONZERO_PATTERN;
628   ierr = MPI_Comm_rank(((PetscObject)A)->comm, &lu->myid);
629   ierr = MPI_Comm_size(((PetscObject)A)->comm,&lu->size);CHKERRQ(ierr);
630   ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_mumps));CHKERRQ(ierr);
631   lu->id.comm_fortran = MPI_Comm_c2f(lu->comm_mumps);
632 
633   /* Initialize a MUMPS instance */
634   ierr = PetscInitializeMUMPS(F);CHKERRQ(ierr);
635   /* Set MUMPS options */
636   ierr = PetscSetMUMPSOptions(F,A);CHKERRQ(ierr);
637 
638   F->ops->lufactornumeric  = MatFactorNumeric_MUMPS;
639   F->ops->solve            = MatSolve_MUMPS;
640   PetscFunctionReturn(0);
641 }
642 
643 /* Note the Petsc r permutation is ignored */
644 #undef __FUNCT__
645 #define __FUNCT__ "MatCholeskyFactorSymbolic_SBAIJMUMPS"
646 PetscErrorCode MatCholeskyFactorSymbolic_SBAIJMUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info)
647 {
648   Mat_MUMPS          *lu = (Mat_MUMPS*)F->spptr;
649   Mat                newMat;
650   PetscErrorCode     ierr;
651   PetscInt           nz=0,M=A->rmap->N,rnz,i,nnz,jidx;
652   const PetscInt     *ai,*aj,*adiag;
653   PetscScalar        *av;
654   PetscTruth         valOnly,isSeqAIJ,isMPIAIJ;
655 
656 
657   PetscFunctionBegin;
658   lu->sym                          = 2;
659   lu->matstruc                     = DIFFERENT_NONZERO_PATTERN;
660   ierr = MPI_Comm_rank(((PetscObject)A)->comm, &lu->myid);
661   ierr = MPI_Comm_size(((PetscObject)A)->comm,&lu->size);CHKERRQ(ierr);
662   ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_mumps));CHKERRQ(ierr);
663   lu->id.comm_fortran = MPI_Comm_c2f(lu->comm_mumps);
664 
665   /* Initialize a MUMPS instance */
666   ierr = PetscInitializeMUMPS(F);CHKERRQ(ierr);
667   /* Set MUMPS options */
668   ierr = PetscSetMUMPSOptions(F,A);CHKERRQ(ierr);
669 
670   ierr = PetscTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr);
671   ierr = PetscTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr);
672   switch (lu->id.ICNTL(18)){
673   case 0:  /* centralized assembled matrix input (size=1) */
674     if (!lu->myid) {
675       if(isSeqAIJ) {
676 	Mat_SeqAIJ   *aa = (Mat_SeqAIJ*)A->data;
677 	nz               = aa->nz;
678 	ai = aa->i; aj = aa->j; adiag = aa->diag; av = aa->a;
679       } else {
680 	Mat_SeqSBAIJ *aa = (Mat_SeqSBAIJ*)A->data;
681 	nz               = aa->nz;
682 	ai = aa->i; aj = aa->j; adiag = aa->diag; av = aa->a;
683       }
684       if ((F)->factortype == MAT_FACTOR_CHOLESKY && isSeqAIJ) {
685 	nz = M + (nz - M)/2;  /* nz for upper triangle */
686 	ierr = PetscMalloc(nz*sizeof(PetscInt),&lu->irn);CHKERRQ(ierr);
687 	ierr = PetscMalloc(nz*sizeof(PetscInt),&lu->jcn);CHKERRQ(ierr);
688 	ierr = PetscMalloc(nz*sizeof(PetscScalar),&lu->val);CHKERRQ(ierr);
689 	nz = 0;
690 	for (i=0; i<M; i++){
691 	  rnz = ai[i+1] - adiag[i];
692 	  jidx = adiag[i];
693 	  while (rnz--) {  /* Fortran row/col index! */
694 	    lu->irn[nz] = i+1; lu->jcn[nz] = aj[jidx]+1;
695 	    lu->val[nz] = av[jidx]; jidx++; nz++;
696 	  }
697 	}
698       } else {
699 	lu->val = av;
700 	ierr = PetscMalloc(nz*sizeof(PetscInt),&lu->irn);CHKERRQ(ierr);
701 	ierr = PetscMalloc(nz*sizeof(PetscInt),&lu->jcn);CHKERRQ(ierr);
702 	nz = 0;
703 	for (i=0; i<M; i++){
704 	  rnz = ai[i+1] - ai[i];
705 	  while (rnz--) {  /* Fortran row/col index! */
706 	    lu->irn[nz] = i+1; lu->jcn[nz] = (*aj)+1; aj++; nz++;
707 	  }
708 	}
709       }
710     }
711     break;
712   case 3:  /* distributed assembled matrix input (size>1) */
713     valOnly = PETSC_FALSE;
714     if(((F)->factortype == MAT_FACTOR_CHOLESKY) && isMPIAIJ) {
715       /* Create an SBAIJ matrix and use this matrix to set the lu values */
716       ierr = MatConvert(A,MATMPISBAIJ,MAT_INITIAL_MATRIX,&newMat);CHKERRQ(ierr);
717       ierr = MatConvertToTriples(newMat,1,valOnly,&nnz,&lu->irn , &lu->jcn, &lu->val);CHKERRQ(ierr);
718       ierr = MatDestroy(newMat);CHKERRQ(ierr);
719     } else {
720       ierr = MatConvertToTriples(A,1,valOnly, &nnz, &lu->irn, &lu->jcn, &lu->val);CHKERRQ(ierr);
721     }
722     break;
723   default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix input format is not supported by MUMPS.");
724   }
725 
726   F->ops->choleskyfactornumeric =  MatFactorNumeric_MUMPS;
727   F->ops->solve                 =  MatSolve_MUMPS;
728 #if !defined(PETSC_USE_COMPLEX)
729   (F)->ops->getinertia          =  MatGetInertia_SBAIJMUMPS;
730 #endif
731   PetscFunctionReturn(0);
732 }
733 
734 #undef __FUNCT__
735 #define __FUNCT__ "MatFactorInfo_MUMPS"
736 PetscErrorCode MatFactorInfo_MUMPS(Mat A,PetscViewer viewer)
737 {
738   Mat_MUMPS      *lu=(Mat_MUMPS*)A->spptr;
739   PetscErrorCode ierr;
740 
741   PetscFunctionBegin;
742   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(1) (output for error):        %d \n",lu->id.ICNTL(1));CHKERRQ(ierr);
743   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(2) (output of diagnostic msg):%d \n",lu->id.ICNTL(2));CHKERRQ(ierr);
744   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(3) (output for global info):  %d \n",lu->id.ICNTL(3));CHKERRQ(ierr);
745   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(4) (level of printing):       %d \n",lu->id.ICNTL(4));CHKERRQ(ierr);
746   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(5) (input mat struct):        %d \n",lu->id.ICNTL(5));CHKERRQ(ierr);
747   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(6) (matrix prescaling):       %d \n",lu->id.ICNTL(6));CHKERRQ(ierr);
748   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(7) (matrix ordering):         %d \n",lu->id.ICNTL(7));CHKERRQ(ierr);
749   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(8) (scalling strategy):       %d \n",lu->id.ICNTL(8));CHKERRQ(ierr);
750   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(9) (A/A^T x=b is solved):     %d \n",lu->id.ICNTL(9));CHKERRQ(ierr);
751   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(10) (max num of refinements): %d \n",lu->id.ICNTL(10));CHKERRQ(ierr);
752   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(11) (error analysis):         %d \n",lu->id.ICNTL(11));CHKERRQ(ierr);
753   if (!lu->myid && lu->id.ICNTL(11)>0) {
754     ierr = PetscPrintf(PETSC_COMM_SELF,"        RINFOG(4) (inf norm of input mat):        %g\n",lu->id.RINFOG(4));CHKERRQ(ierr);
755     ierr = PetscPrintf(PETSC_COMM_SELF,"        RINFOG(5) (inf norm of solution):         %g\n",lu->id.RINFOG(5));CHKERRQ(ierr);
756     ierr = PetscPrintf(PETSC_COMM_SELF,"        RINFOG(6) (inf norm of residual):         %g\n",lu->id.RINFOG(6));CHKERRQ(ierr);
757     ierr = PetscPrintf(PETSC_COMM_SELF,"        RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",lu->id.RINFOG(7),lu->id.RINFOG(8));CHKERRQ(ierr);
758     ierr = PetscPrintf(PETSC_COMM_SELF,"        RINFOG(9) (error estimate):               %g \n",lu->id.RINFOG(9));CHKERRQ(ierr);
759     ierr = PetscPrintf(PETSC_COMM_SELF,"        RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",lu->id.RINFOG(10),lu->id.RINFOG(11));CHKERRQ(ierr);
760 
761   }
762   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(12) (efficiency control):                         %d \n",lu->id.ICNTL(12));CHKERRQ(ierr);
763   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(13) (efficiency control):                         %d \n",lu->id.ICNTL(13));CHKERRQ(ierr);
764   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(14) (percentage of estimated workspace increase): %d \n",lu->id.ICNTL(14));CHKERRQ(ierr);
765   /* ICNTL(15-17) not used */
766   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(18) (input mat struct):                           %d \n",lu->id.ICNTL(18));CHKERRQ(ierr);
767   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(19) (Shur complement info):                       %d \n",lu->id.ICNTL(19));CHKERRQ(ierr);
768   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(20) (rhs sparse pattern):                         %d \n",lu->id.ICNTL(20));CHKERRQ(ierr);
769   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(21) (solution struct):                            %d \n",lu->id.ICNTL(21));CHKERRQ(ierr);
770   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(22) (in-core/out-of-core facility):               %d \n",lu->id.ICNTL(22));CHKERRQ(ierr);
771   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(23) (max size of memory can be allocated locally):%d \n",lu->id.ICNTL(23));CHKERRQ(ierr);
772 
773   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(24) (detection of null pivot rows):               %d \n",lu->id.ICNTL(24));CHKERRQ(ierr);
774   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(25) (computation of a null space basis):          %d \n",lu->id.ICNTL(25));CHKERRQ(ierr);
775   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(26) (Schur options for rhs or solution):          %d \n",lu->id.ICNTL(26));CHKERRQ(ierr);
776   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(27) (experimental parameter):                     %d \n",lu->id.ICNTL(27));CHKERRQ(ierr);
777 
778   ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(1) (relative pivoting threshold):      %g \n",lu->id.CNTL(1));CHKERRQ(ierr);
779   ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(2) (stopping criterion of refinement): %g \n",lu->id.CNTL(2));CHKERRQ(ierr);
780   ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(3) (absolute pivoting threshold):      %g \n",lu->id.CNTL(3));CHKERRQ(ierr);
781   ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(4) (value of static pivoting):         %g \n",lu->id.CNTL(4));CHKERRQ(ierr);
782   ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(5) (fixation for null pivots):         %g \n",lu->id.CNTL(5));CHKERRQ(ierr);
783 
784   /* infomation local to each processor */
785   if (!lu->myid) {ierr = PetscPrintf(PETSC_COMM_SELF, "              RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr);}
786   ierr = PetscSynchronizedPrintf(((PetscObject)A)->comm,"              [%d] %g \n",lu->myid,lu->id.RINFO(1));CHKERRQ(ierr);
787   ierr = PetscSynchronizedFlush(((PetscObject)A)->comm);
788   if (!lu->myid) {ierr = PetscPrintf(PETSC_COMM_SELF, "              RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr);}
789   ierr = PetscSynchronizedPrintf(((PetscObject)A)->comm,"              [%d]  %g \n",lu->myid,lu->id.RINFO(2));CHKERRQ(ierr);
790   ierr = PetscSynchronizedFlush(((PetscObject)A)->comm);
791   if (!lu->myid) {ierr = PetscPrintf(PETSC_COMM_SELF, "              RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr);}
792   ierr = PetscSynchronizedPrintf(((PetscObject)A)->comm,"              [%d]  %g \n",lu->myid,lu->id.RINFO(3));CHKERRQ(ierr);
793   ierr = PetscSynchronizedFlush(((PetscObject)A)->comm);
794 
795   if (!lu->myid) {ierr = PetscPrintf(PETSC_COMM_SELF, "              INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr);}
796   ierr = PetscSynchronizedPrintf(((PetscObject)A)->comm,"              [%d] %d \n",lu->myid,lu->id.INFO(15));CHKERRQ(ierr);
797   ierr = PetscSynchronizedFlush(((PetscObject)A)->comm);
798 
799   if (!lu->myid) {ierr = PetscPrintf(PETSC_COMM_SELF, "              INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr);}
800   ierr = PetscSynchronizedPrintf(((PetscObject)A)->comm,"              [%d] %d \n",lu->myid,lu->id.INFO(16));CHKERRQ(ierr);
801   ierr = PetscSynchronizedFlush(((PetscObject)A)->comm);
802 
803   if (!lu->myid) {ierr = PetscPrintf(PETSC_COMM_SELF, "              INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr);}
804   ierr = PetscSynchronizedPrintf(((PetscObject)A)->comm,"              [%d] %d \n",lu->myid,lu->id.INFO(23));CHKERRQ(ierr);
805   ierr = PetscSynchronizedFlush(((PetscObject)A)->comm);
806 
807   if (!lu->myid){ /* information from the host */
808     ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",lu->id.RINFOG(1));CHKERRQ(ierr);
809     ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",lu->id.RINFOG(2));CHKERRQ(ierr);
810     ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",lu->id.RINFOG(3));CHKERRQ(ierr);
811 
812     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",lu->id.INFOG(3));CHKERRQ(ierr);
813     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",lu->id.INFOG(4));CHKERRQ(ierr);
814     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(5) (estimated maximum front size in the complete tree): %d \n",lu->id.INFOG(5));CHKERRQ(ierr);
815     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(6) (number of nodes in the complete tree): %d \n",lu->id.INFOG(6));CHKERRQ(ierr);
816     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(7) (ordering option effectively uese after analysis): %d \n",lu->id.INFOG(7));CHKERRQ(ierr);
817     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",lu->id.INFOG(8));CHKERRQ(ierr);
818     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",lu->id.INFOG(9));CHKERRQ(ierr);
819     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(10) (total integer space store the matrix factors after factorization): %d \n",lu->id.INFOG(10));CHKERRQ(ierr);
820     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(11) (order of largest frontal matrix after factorization): %d \n",lu->id.INFOG(11));CHKERRQ(ierr);
821     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(12) (number of off-diagonal pivots): %d \n",lu->id.INFOG(12));CHKERRQ(ierr);
822     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(13) (number of delayed pivots after factorization): %d \n",lu->id.INFOG(13));CHKERRQ(ierr);
823     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(14) (number of memory compress after factorization): %d \n",lu->id.INFOG(14));CHKERRQ(ierr);
824     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(15) (number of steps of iterative refinement after solution): %d \n",lu->id.INFOG(15));CHKERRQ(ierr);
825     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",lu->id.INFOG(16));CHKERRQ(ierr);
826     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",lu->id.INFOG(17));CHKERRQ(ierr);
827     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",lu->id.INFOG(18));CHKERRQ(ierr);
828     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",lu->id.INFOG(19));CHKERRQ(ierr);
829      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(20) (estimated number of entries in the factors): %d \n",lu->id.INFOG(20));CHKERRQ(ierr);
830      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",lu->id.INFOG(21));CHKERRQ(ierr);
831      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",lu->id.INFOG(22));CHKERRQ(ierr);
832      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",lu->id.INFOG(23));CHKERRQ(ierr);
833      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",lu->id.INFOG(24));CHKERRQ(ierr);
834      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",lu->id.INFOG(25));CHKERRQ(ierr);
835   }
836   PetscFunctionReturn(0);
837 }
838 
839 #undef __FUNCT__
840 #define __FUNCT__ "MatView_MUMPS"
841 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer)
842 {
843   PetscErrorCode    ierr;
844   PetscTruth        iascii;
845   PetscViewerFormat format;
846   Mat_MUMPS         *mumps=(Mat_MUMPS*)A->spptr;
847 
848   PetscFunctionBegin;
849   /* check if matrix is mumps type */
850   if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0);
851 
852   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
853   if (iascii) {
854     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
855     if (format == PETSC_VIEWER_ASCII_INFO || mumps->mumpsview){
856       ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr);
857       ierr = PetscViewerASCIIPrintf(viewer,"  SYM (matrix type):                  %d \n",mumps->id.sym);CHKERRQ(ierr);
858       ierr = PetscViewerASCIIPrintf(viewer,"  PAR (host participation):           %d \n",mumps->id.par);CHKERRQ(ierr);
859       if (mumps->mumpsview){ /* View all MUMPS parameters */
860         ierr = MatFactorInfo_MUMPS(A,viewer);CHKERRQ(ierr);
861       }
862     }
863   }
864   PetscFunctionReturn(0);
865 }
866 
867 #undef __FUNCT__
868 #define __FUNCT__ "MatGetInfo_MUMPS"
869 PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info)
870 {
871   Mat_MUMPS  *mumps =(Mat_MUMPS*)A->spptr;
872 
873   PetscFunctionBegin;
874   info->block_size        = 1.0;
875   info->nz_allocated      = mumps->id.INFOG(20);
876   info->nz_used           = mumps->id.INFOG(20);
877   info->nz_unneeded       = 0.0;
878   info->assemblies        = 0.0;
879   info->mallocs           = 0.0;
880   info->memory            = 0.0;
881   info->fill_ratio_given  = 0;
882   info->fill_ratio_needed = 0;
883   info->factor_mallocs    = 0;
884   PetscFunctionReturn(0);
885 }
886 
887 /*MC
888   MAT_SOLVER_MUMPS -  A matrix type providing direct solvers (LU and Cholesky) for
889   distributed and sequential matrices via the external package MUMPS.
890 
891   Works with MATAIJ and MATSBAIJ matrices
892 
893   Options Database Keys:
894 + -mat_mumps_sym <0,1,2> - 0 the matrix is unsymmetric, 1 symmetric positive definite, 2 symmetric
895 . -mat_mumps_icntl_4 <0,...,4> - print level
896 . -mat_mumps_icntl_6 <0,...,7> - matrix prescaling options (see MUMPS User's Guide)
897 . -mat_mumps_icntl_7 <0,...,7> - matrix orderings (see MUMPS User's Guide)
898 . -mat_mumps_icntl_9 <1,2> - A or A^T x=b to be solved: 1 denotes A, 2 denotes A^T
899 . -mat_mumps_icntl_10 <n> - maximum number of iterative refinements
900 . -mat_mumps_icntl_11 <n> - error analysis, a positive value returns statistics during -ksp_view
901 . -mat_mumps_icntl_12 <n> - efficiency control (see MUMPS User's Guide)
902 . -mat_mumps_icntl_13 <n> - efficiency control (see MUMPS User's Guide)
903 . -mat_mumps_icntl_14 <n> - efficiency control (see MUMPS User's Guide)
904 . -mat_mumps_icntl_15 <n> - efficiency control (see MUMPS User's Guide)
905 . -mat_mumps_cntl_1 <delta> - relative pivoting threshold
906 . -mat_mumps_cntl_2 <tol> - stopping criterion for refinement
907 - -mat_mumps_cntl_3 <adelta> - absolute pivoting threshold
908 
909   Level: beginner
910 
911 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
912 
913 M*/
914 
915 EXTERN_C_BEGIN
916 #undef __FUNCT__
917 #define __FUNCT__ "MatFactorGetSolverPackage_mumps"
918 PetscErrorCode MatFactorGetSolverPackage_mumps(Mat A,const MatSolverPackage *type)
919 {
920   PetscFunctionBegin;
921   *type = MAT_SOLVER_MUMPS;
922   PetscFunctionReturn(0);
923 }
924 EXTERN_C_END
925 
926 EXTERN_C_BEGIN
927 /*
928     The seq and mpi versions of this function are the same
929 */
930 #undef __FUNCT__
931 #define __FUNCT__ "MatGetFactor_seqaij_mumps"
932 PetscErrorCode MatGetFactor_seqaij_mumps(Mat A,MatFactorType ftype,Mat *F)
933 {
934   Mat            B;
935   PetscErrorCode ierr;
936   Mat_MUMPS      *mumps;
937 
938   PetscFunctionBegin;
939   /* Create the factorization matrix */
940   ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
941   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
942   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
943   ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr);
944 
945   B->ops->view             = MatView_MUMPS;
946   B->ops->getinfo          = MatGetInfo_MUMPS;
947   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mumps",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
948   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMumpsSetIcntl_C","MatMumpsSetIcntl",MatMumpsSetIcntl);CHKERRQ(ierr);
949   if (ftype == MAT_FACTOR_LU) {
950     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
951     B->factortype = MAT_FACTOR_LU;
952   } else {
953     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJMUMPS;
954     B->factortype = MAT_FACTOR_CHOLESKY;
955   }
956 
957   ierr = PetscNewLog(B,Mat_MUMPS,&mumps);CHKERRQ(ierr);
958   mumps->CleanUpMUMPS              = PETSC_FALSE;
959   mumps->isAIJ                     = PETSC_TRUE;
960   mumps->scat_rhs                  = PETSC_NULL;
961   mumps->scat_sol                  = PETSC_NULL;
962   mumps->nSolve                    = 0;
963   mumps->MatDestroy                = B->ops->destroy;
964   B->ops->destroy                  = MatDestroy_MUMPS;
965   B->spptr                         = (void*)mumps;
966 
967   *F = B;
968   PetscFunctionReturn(0);
969 }
970 EXTERN_C_END
971 
972 EXTERN_C_BEGIN
973 #undef __FUNCT__
974 #define __FUNCT__ "MatGetFactor_seqsbaij_mumps"
975 PetscErrorCode MatGetFactor_seqsbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
976 {
977   Mat            B;
978   PetscErrorCode ierr;
979   Mat_MUMPS      *mumps;
980 
981   PetscFunctionBegin;
982   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
983   /* Create the factorization matrix */
984   ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
985   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
986   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
987   ierr = MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);CHKERRQ(ierr);
988   ierr = MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
989 
990   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJMUMPS;
991   B->ops->view                   = MatView_MUMPS;
992   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mumps",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
993   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMumpsSetIcntl_C","MatMumpsSetIcntl",MatMumpsSetIcntl);CHKERRQ(ierr);
994   B->factortype                   = MAT_FACTOR_CHOLESKY;
995 
996   ierr = PetscNewLog(B,Mat_MUMPS,&mumps);CHKERRQ(ierr);
997   mumps->CleanUpMUMPS              = PETSC_FALSE;
998   mumps->isAIJ                     = PETSC_FALSE;
999   mumps->scat_rhs                  = PETSC_NULL;
1000   mumps->scat_sol                  = PETSC_NULL;
1001   mumps->nSolve                    = 0;
1002   mumps->MatDestroy                = B->ops->destroy;
1003   B->ops->destroy                  = MatDestroy_MUMPS;
1004   B->spptr                         = (void*)mumps;
1005 
1006   *F = B;
1007   PetscFunctionReturn(0);
1008 }
1009 EXTERN_C_END
1010 
1011 EXTERN_C_BEGIN
1012 #undef __FUNCT__
1013 #define __FUNCT__ "MatGetFactor_mpisbaij_mumps"
1014 PetscErrorCode MatGetFactor_mpisbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
1015 {
1016   Mat            B;
1017   PetscErrorCode ierr;
1018   Mat_MUMPS      *mumps;
1019 
1020   PetscFunctionBegin;
1021   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
1022   /* Create the factorization matrix */
1023   ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
1024   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1025   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1026   ierr = MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);CHKERRQ(ierr);
1027   ierr = MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
1028 
1029   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJMUMPS;
1030   B->ops->view                   = MatView_MUMPS;
1031   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mumps",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1032   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMumpsSetIcntl_C","MatMumpsSetIcntl",MatMumpsSetIcntl);CHKERRQ(ierr);
1033   B->factortype                  = MAT_FACTOR_CHOLESKY;
1034 
1035   ierr = PetscNewLog(B,Mat_MUMPS,&mumps);CHKERRQ(ierr);
1036   mumps->CleanUpMUMPS              = PETSC_FALSE;
1037   mumps->isAIJ                     = PETSC_FALSE;
1038   mumps->scat_rhs                  = PETSC_NULL;
1039   mumps->scat_sol                  = PETSC_NULL;
1040   mumps->nSolve                    = 0;
1041   mumps->MatDestroy                = B->ops->destroy;
1042   B->ops->destroy                  = MatDestroy_MUMPS;
1043   B->spptr                         = (void*)mumps;
1044 
1045   *F = B;
1046   PetscFunctionReturn(0);
1047 }
1048 EXTERN_C_END
1049 
1050 EXTERN_C_BEGIN
1051 #undef __FUNCT__
1052 #define __FUNCT__ "MatGetFactor_mpiaij_mumps"
1053 PetscErrorCode MatGetFactor_mpiaij_mumps(Mat A,MatFactorType ftype,Mat *F)
1054 {
1055   Mat            B;
1056   PetscErrorCode ierr;
1057   Mat_MUMPS      *mumps;
1058 
1059   PetscFunctionBegin;
1060   /* Create the factorization matrix */
1061   ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
1062   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1063   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1064   ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr);
1065   ierr = MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
1066 
1067   if (ftype == MAT_FACTOR_LU) {
1068     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
1069     B->factortype = MAT_FACTOR_LU;
1070   } else {
1071     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJMUMPS;
1072     B->factortype = MAT_FACTOR_CHOLESKY;
1073   }
1074 
1075   B->ops->view             = MatView_MUMPS;
1076   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mumps",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1077   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMumpsSetIcntl_C","MatMumpsSetIcntl",MatMumpsSetIcntl);CHKERRQ(ierr);
1078 
1079   ierr = PetscNewLog(B,Mat_MUMPS,&mumps);CHKERRQ(ierr);
1080   mumps->CleanUpMUMPS              = PETSC_FALSE;
1081   mumps->isAIJ                     = PETSC_TRUE;
1082   mumps->scat_rhs                  = PETSC_NULL;
1083   mumps->scat_sol                  = PETSC_NULL;
1084   mumps->nSolve                    = 0;
1085   mumps->MatDestroy                = B->ops->destroy;
1086   B->ops->destroy                  = MatDestroy_MUMPS;
1087   B->spptr                         = (void*)mumps;
1088 
1089   *F = B;
1090   PetscFunctionReturn(0);
1091 }
1092 EXTERN_C_END
1093 
1094 EXTERN_C_BEGIN
1095 #undef __FUNCT__
1096 #define __FUNCT__ "MatGetFactor_mpibaij_mumps"
1097 PetscErrorCode MatGetFactor_mpibaij_mumps(Mat A,MatFactorType ftype,Mat *F)
1098 {
1099   Mat            B;
1100   PetscErrorCode ierr;
1101   Mat_MUMPS      *mumps;
1102 
1103   PetscFunctionBegin;
1104   /* Create the factorization matrix */
1105   ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
1106   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1107   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1108   ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr);
1109   ierr = MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
1110 
1111   if (ftype == MAT_FACTOR_LU) {
1112     B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS;
1113     B->factortype = MAT_FACTOR_LU;
1114   }
1115   else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cholesky factorization for Petsc BAIJ matrices not supported yet\n");
1116   B->ops->view             = MatView_MUMPS;
1117   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mumps",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1118   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMumpsSetIcntl_C","MatMumpsSetIcntl",MatMumpsSetIcntl);CHKERRQ(ierr);
1119 
1120   ierr = PetscNewLog(B,Mat_MUMPS,&mumps);CHKERRQ(ierr);
1121   mumps->CleanUpMUMPS              = PETSC_FALSE;
1122   mumps->isAIJ                     = PETSC_TRUE;
1123   mumps->scat_rhs                  = PETSC_NULL;
1124   mumps->scat_sol                  = PETSC_NULL;
1125   mumps->nSolve                    = 0;
1126   mumps->MatDestroy                = B->ops->destroy;
1127   B->ops->destroy                  = MatDestroy_MUMPS;
1128   B->spptr                         = (void*)mumps;
1129 
1130   *F = B;
1131   PetscFunctionReturn(0);
1132 }
1133 EXTERN_C_END
1134 
1135 /* -------------------------------------------------------------------------------------------*/
1136 /*@
1137   MatMumpsSetIcntl - Set MUMPS parameter ICNTL()
1138 
1139    Collective on Mat
1140 
1141    Input Parameters:
1142 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1143 .  idx - index of MUMPS parameter array ICNTL()
1144 -  icntl - value of MUMPS ICNTL(imumps)
1145 
1146   Options Database:
1147 .   -mat_mumps_icntl_<idx> <icntl>
1148 
1149    Level: beginner
1150 
1151    References: MUMPS Users' Guide
1152 
1153 .seealso: MatGetFactor()
1154 @*/
1155 #undef __FUNCT__
1156 #define __FUNCT__ "MatMumpsSetIcntl"
1157 PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt idx,PetscInt icntl)
1158 {
1159   Mat_MUMPS      *lu =(Mat_MUMPS*)(F)->spptr;
1160 
1161   PetscFunctionBegin;
1162   lu->id.ICNTL(idx) = icntl;
1163   PetscFunctionReturn(0);
1164 }
1165 
1166