xref: /petsc/src/mat/impls/aij/mpi/mumps/mumps.c (revision b7b27f36d7a1fe91f03b1bc30ce7c8eef79f1a09)
1 
2 /*
3     Provides an interface to the MUMPS sparse solver
4 */
5 
6 #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I  "petscmat.h"  I*/
7 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
8 
9 EXTERN_C_BEGIN
10 #if defined(PETSC_USE_COMPLEX)
11 #if defined(PETSC_USE_REAL_SINGLE)
12 #include <cmumps_c.h>
13 #else
14 #include <zmumps_c.h>
15 #endif
16 #else
17 #if defined(PETSC_USE_REAL_SINGLE)
18 #include <smumps_c.h>
19 #else
20 #include <dmumps_c.h>
21 #endif
22 #endif
23 EXTERN_C_END
24 #define JOB_INIT -1
25 #define JOB_FACTSYMBOLIC 1
26 #define JOB_FACTNUMERIC 2
27 #define JOB_SOLVE 3
28 #define JOB_END -2
29 
30 /* calls to MUMPS */
31 #if defined(PETSC_USE_COMPLEX)
32 #if defined(PETSC_USE_REAL_SINGLE)
33 #define PetscMUMPS_c cmumps_c
34 #else
35 #define PetscMUMPS_c zmumps_c
36 #endif
37 #else
38 #if defined(PETSC_USE_REAL_SINGLE)
39 #define PetscMUMPS_c smumps_c
40 #else
41 #define PetscMUMPS_c dmumps_c
42 #endif
43 #endif
44 
45 
46 /* macros s.t. indices match MUMPS documentation */
47 #define ICNTL(I) icntl[(I)-1]
48 #define CNTL(I) cntl[(I)-1]
49 #define INFOG(I) infog[(I)-1]
50 #define INFO(I) info[(I)-1]
51 #define RINFOG(I) rinfog[(I)-1]
52 #define RINFO(I) rinfo[(I)-1]
53 
54 typedef struct {
55 #if defined(PETSC_USE_COMPLEX)
56 #if defined(PETSC_USE_REAL_SINGLE)
57   CMUMPS_STRUC_C id;
58 #else
59   ZMUMPS_STRUC_C id;
60 #endif
61 #else
62 #if defined(PETSC_USE_REAL_SINGLE)
63   SMUMPS_STRUC_C id;
64 #else
65   DMUMPS_STRUC_C id;
66 #endif
67 #endif
68 
69   MatStructure matstruc;
70   PetscMPIInt  myid,size;
71   PetscInt     *irn,*jcn,nz,sym;
72   PetscScalar  *val;
73   MPI_Comm     comm_mumps;
74   VecScatter   scat_rhs, scat_sol;
75   PetscBool    isAIJ,CleanUpMUMPS;
76   Vec          b_seq,x_seq;
77   PetscInt     ICNTL9_pre;   /* check if ICNTL(9) is changed from previous MatSolve */
78 
79   PetscErrorCode (*Destroy)(Mat);
80   PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**);
81 } Mat_MUMPS;
82 
83 extern PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*);
84 
85 
86 /* MatConvertToTriples_A_B */
87 /*convert Petsc matrix to triples: row[nz], col[nz], val[nz] */
88 /*
89   input:
90     A       - matrix in aij,baij or sbaij (bs=1) format
91     shift   - 0: C style output triple; 1: Fortran style output triple.
92     reuse   - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple
93               MAT_REUSE_MATRIX:   only the values in v array are updated
94   output:
95     nnz     - dim of r, c, and v (number of local nonzero entries of A)
96     r, c, v - row and col index, matrix values (matrix triples)
97 
98   The returned values r, c, and sometimes v are obtained in a single PetscMalloc(). Then in MatDestroy_MUMPS() it is
99   freed with PetscFree((mumps->irn);  This is not ideal code, the fact that v is ONLY sometimes part of mumps->irn means
100   that the PetscMalloc() cannot easily be replaced with a PetscMalloc3().
101 
102  */
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "MatConvertToTriples_seqaij_seqaij"
106 PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
107 {
108   const PetscInt *ai,*aj,*ajj,M=A->rmap->n;
109   PetscInt       nz,rnz,i,j;
110   PetscErrorCode ierr;
111   PetscInt       *row,*col;
112   Mat_SeqAIJ     *aa=(Mat_SeqAIJ*)A->data;
113 
114   PetscFunctionBegin;
115   *v=aa->a;
116   if (reuse == MAT_INITIAL_MATRIX) {
117     nz   = aa->nz;
118     ai   = aa->i;
119     aj   = aa->j;
120     *nnz = nz;
121     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
122     col  = row + nz;
123 
124     nz = 0;
125     for (i=0; i<M; i++) {
126       rnz = ai[i+1] - ai[i];
127       ajj = aj + ai[i];
128       for (j=0; j<rnz; j++) {
129         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
130       }
131     }
132     *r = row; *c = col;
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatConvertToTriples_seqbaij_seqaij"
139 PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
140 {
141   Mat_SeqBAIJ    *aa=(Mat_SeqBAIJ*)A->data;
142   const PetscInt *ai,*aj,*ajj,bs2 = aa->bs2;
143   PetscInt       bs,M,nz,idx=0,rnz,i,j,k,m;
144   PetscErrorCode ierr;
145   PetscInt       *row,*col;
146 
147   PetscFunctionBegin;
148   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
149   M = A->rmap->N/bs;
150   *v = aa->a;
151   if (reuse == MAT_INITIAL_MATRIX) {
152     ai   = aa->i; aj = aa->j;
153     nz   = bs2*aa->nz;
154     *nnz = nz;
155     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
156     col  = row + nz;
157 
158     for (i=0; i<M; i++) {
159       ajj = aj + ai[i];
160       rnz = ai[i+1] - ai[i];
161       for (k=0; k<rnz; k++) {
162         for (j=0; j<bs; j++) {
163           for (m=0; m<bs; m++) {
164             row[idx]   = i*bs + m + shift;
165             col[idx++] = bs*(ajj[k]) + j + shift;
166           }
167         }
168       }
169     }
170     *r = row; *c = col;
171   }
172   PetscFunctionReturn(0);
173 }
174 
175 #undef __FUNCT__
176 #define __FUNCT__ "MatConvertToTriples_seqsbaij_seqsbaij"
177 PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
178 {
179   const PetscInt *ai, *aj,*ajj,M=A->rmap->n;
180   PetscInt       nz,rnz,i,j;
181   PetscErrorCode ierr;
182   PetscInt       *row,*col;
183   Mat_SeqSBAIJ   *aa=(Mat_SeqSBAIJ*)A->data;
184 
185   PetscFunctionBegin;
186   *v = aa->a;
187   if (reuse == MAT_INITIAL_MATRIX) {
188     nz   = aa->nz;
189     ai   = aa->i;
190     aj   = aa->j;
191     *v   = aa->a;
192     *nnz = nz;
193     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
194     col  = row + nz;
195 
196     nz = 0;
197     for (i=0; i<M; i++) {
198       rnz = ai[i+1] - ai[i];
199       ajj = aj + ai[i];
200       for (j=0; j<rnz; j++) {
201         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
202       }
203     }
204     *r = row; *c = col;
205   }
206   PetscFunctionReturn(0);
207 }
208 
209 #undef __FUNCT__
210 #define __FUNCT__ "MatConvertToTriples_seqaij_seqsbaij"
211 PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
212 {
213   const PetscInt    *ai,*aj,*ajj,*adiag,M=A->rmap->n;
214   PetscInt          nz,rnz,i,j;
215   const PetscScalar *av,*v1;
216   PetscScalar       *val;
217   PetscErrorCode    ierr;
218   PetscInt          *row,*col;
219   Mat_SeqSBAIJ      *aa=(Mat_SeqSBAIJ*)A->data;
220 
221   PetscFunctionBegin;
222   ai   =aa->i; aj=aa->j;av=aa->a;
223   adiag=aa->diag;
224   if (reuse == MAT_INITIAL_MATRIX) {
225     nz   = M + (aa->nz-M)/2;
226     *nnz = nz;
227     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
228     col  = row + nz;
229     val  = (PetscScalar*)(col + nz);
230 
231     nz = 0;
232     for (i=0; i<M; i++) {
233       rnz = ai[i+1] - adiag[i];
234       ajj = aj + adiag[i];
235       v1  = av + adiag[i];
236       for (j=0; j<rnz; j++) {
237         row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j];
238       }
239     }
240     *r = row; *c = col; *v = val;
241   } else {
242     nz = 0; val = *v;
243     for (i=0; i <M; i++) {
244       rnz = ai[i+1] - adiag[i];
245       ajj = aj + adiag[i];
246       v1  = av + adiag[i];
247       for (j=0; j<rnz; j++) {
248         val[nz++] = v1[j];
249       }
250     }
251   }
252   PetscFunctionReturn(0);
253 }
254 
255 #undef __FUNCT__
256 #define __FUNCT__ "MatConvertToTriples_mpisbaij_mpisbaij"
257 PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
258 {
259   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
260   PetscErrorCode    ierr;
261   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
262   PetscInt          *row,*col;
263   const PetscScalar *av, *bv,*v1,*v2;
264   PetscScalar       *val;
265   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ*)A->data;
266   Mat_SeqSBAIJ      *aa  = (Mat_SeqSBAIJ*)(mat->A)->data;
267   Mat_SeqBAIJ       *bb  = (Mat_SeqBAIJ*)(mat->B)->data;
268 
269   PetscFunctionBegin;
270   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
271   av=aa->a; bv=bb->a;
272 
273   garray = mat->garray;
274 
275   if (reuse == MAT_INITIAL_MATRIX) {
276     nz   = aa->nz + bb->nz;
277     *nnz = nz;
278     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
279     col  = row + nz;
280     val  = (PetscScalar*)(col + nz);
281 
282     *r = row; *c = col; *v = val;
283   } else {
284     row = *r; col = *c; val = *v;
285   }
286 
287   jj = 0; irow = rstart;
288   for (i=0; i<m; i++) {
289     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
290     countA = ai[i+1] - ai[i];
291     countB = bi[i+1] - bi[i];
292     bjj    = bj + bi[i];
293     v1     = av + ai[i];
294     v2     = bv + bi[i];
295 
296     /* A-part */
297     for (j=0; j<countA; j++) {
298       if (reuse == MAT_INITIAL_MATRIX) {
299         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
300       }
301       val[jj++] = v1[j];
302     }
303 
304     /* B-part */
305     for (j=0; j < countB; j++) {
306       if (reuse == MAT_INITIAL_MATRIX) {
307         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
308       }
309       val[jj++] = v2[j];
310     }
311     irow++;
312   }
313   PetscFunctionReturn(0);
314 }
315 
316 #undef __FUNCT__
317 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpiaij"
318 PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
319 {
320   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
321   PetscErrorCode    ierr;
322   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
323   PetscInt          *row,*col;
324   const PetscScalar *av, *bv,*v1,*v2;
325   PetscScalar       *val;
326   Mat_MPIAIJ        *mat = (Mat_MPIAIJ*)A->data;
327   Mat_SeqAIJ        *aa  = (Mat_SeqAIJ*)(mat->A)->data;
328   Mat_SeqAIJ        *bb  = (Mat_SeqAIJ*)(mat->B)->data;
329 
330   PetscFunctionBegin;
331   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
332   av=aa->a; bv=bb->a;
333 
334   garray = mat->garray;
335 
336   if (reuse == MAT_INITIAL_MATRIX) {
337     nz   = aa->nz + bb->nz;
338     *nnz = nz;
339     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
340     col  = row + nz;
341     val  = (PetscScalar*)(col + nz);
342 
343     *r = row; *c = col; *v = val;
344   } else {
345     row = *r; col = *c; val = *v;
346   }
347 
348   jj = 0; irow = rstart;
349   for (i=0; i<m; i++) {
350     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
351     countA = ai[i+1] - ai[i];
352     countB = bi[i+1] - bi[i];
353     bjj    = bj + bi[i];
354     v1     = av + ai[i];
355     v2     = bv + bi[i];
356 
357     /* A-part */
358     for (j=0; j<countA; j++) {
359       if (reuse == MAT_INITIAL_MATRIX) {
360         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
361       }
362       val[jj++] = v1[j];
363     }
364 
365     /* B-part */
366     for (j=0; j < countB; j++) {
367       if (reuse == MAT_INITIAL_MATRIX) {
368         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
369       }
370       val[jj++] = v2[j];
371     }
372     irow++;
373   }
374   PetscFunctionReturn(0);
375 }
376 
377 #undef __FUNCT__
378 #define __FUNCT__ "MatConvertToTriples_mpibaij_mpiaij"
379 PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
380 {
381   Mat_MPIBAIJ       *mat    = (Mat_MPIBAIJ*)A->data;
382   Mat_SeqBAIJ       *aa     = (Mat_SeqBAIJ*)(mat->A)->data;
383   Mat_SeqBAIJ       *bb     = (Mat_SeqBAIJ*)(mat->B)->data;
384   const PetscInt    *ai     = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj;
385   const PetscInt    *garray = mat->garray,mbs=mat->mbs,rstart=A->rmap->rstart;
386   const PetscInt    bs2=mat->bs2;
387   PetscErrorCode    ierr;
388   PetscInt          bs,nz,i,j,k,n,jj,irow,countA,countB,idx;
389   PetscInt          *row,*col;
390   const PetscScalar *av=aa->a, *bv=bb->a,*v1,*v2;
391   PetscScalar       *val;
392 
393   PetscFunctionBegin;
394   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
395   if (reuse == MAT_INITIAL_MATRIX) {
396     nz   = bs2*(aa->nz + bb->nz);
397     *nnz = nz;
398     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
399     col  = row + nz;
400     val  = (PetscScalar*)(col + nz);
401 
402     *r = row; *c = col; *v = val;
403   } else {
404     row = *r; col = *c; val = *v;
405   }
406 
407   jj = 0; irow = rstart;
408   for (i=0; i<mbs; i++) {
409     countA = ai[i+1] - ai[i];
410     countB = bi[i+1] - bi[i];
411     ajj    = aj + ai[i];
412     bjj    = bj + bi[i];
413     v1     = av + bs2*ai[i];
414     v2     = bv + bs2*bi[i];
415 
416     idx = 0;
417     /* A-part */
418     for (k=0; k<countA; k++) {
419       for (j=0; j<bs; j++) {
420         for (n=0; n<bs; n++) {
421           if (reuse == MAT_INITIAL_MATRIX) {
422             row[jj] = irow + n + shift;
423             col[jj] = rstart + bs*ajj[k] + j + shift;
424           }
425           val[jj++] = v1[idx++];
426         }
427       }
428     }
429 
430     idx = 0;
431     /* B-part */
432     for (k=0; k<countB; k++) {
433       for (j=0; j<bs; j++) {
434         for (n=0; n<bs; n++) {
435           if (reuse == MAT_INITIAL_MATRIX) {
436             row[jj] = irow + n + shift;
437             col[jj] = bs*garray[bjj[k]] + j + shift;
438           }
439           val[jj++] = v2[idx++];
440         }
441       }
442     }
443     irow += bs;
444   }
445   PetscFunctionReturn(0);
446 }
447 
448 #undef __FUNCT__
449 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpisbaij"
450 PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
451 {
452   const PetscInt    *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
453   PetscErrorCode    ierr;
454   PetscInt          rstart,nz,nza,nzb,i,j,jj,irow,countA,countB;
455   PetscInt          *row,*col;
456   const PetscScalar *av, *bv,*v1,*v2;
457   PetscScalar       *val;
458   Mat_MPIAIJ        *mat =  (Mat_MPIAIJ*)A->data;
459   Mat_SeqAIJ        *aa  =(Mat_SeqAIJ*)(mat->A)->data;
460   Mat_SeqAIJ        *bb  =(Mat_SeqAIJ*)(mat->B)->data;
461 
462   PetscFunctionBegin;
463   ai=aa->i; aj=aa->j; adiag=aa->diag;
464   bi=bb->i; bj=bb->j; garray = mat->garray;
465   av=aa->a; bv=bb->a;
466 
467   rstart = A->rmap->rstart;
468 
469   if (reuse == MAT_INITIAL_MATRIX) {
470     nza = 0;    /* num of upper triangular entries in mat->A, including diagonals */
471     nzb = 0;    /* num of upper triangular entries in mat->B */
472     for (i=0; i<m; i++) {
473       nza   += (ai[i+1] - adiag[i]);
474       countB = bi[i+1] - bi[i];
475       bjj    = bj + bi[i];
476       for (j=0; j<countB; j++) {
477         if (garray[bjj[j]] > rstart) nzb++;
478       }
479     }
480 
481     nz   = nza + nzb; /* total nz of upper triangular part of mat */
482     *nnz = nz;
483     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
484     col  = row + nz;
485     val  = (PetscScalar*)(col + nz);
486 
487     *r = row; *c = col; *v = val;
488   } else {
489     row = *r; col = *c; val = *v;
490   }
491 
492   jj = 0; irow = rstart;
493   for (i=0; i<m; i++) {
494     ajj    = aj + adiag[i];                 /* ptr to the beginning of the diagonal of this row */
495     v1     = av + adiag[i];
496     countA = ai[i+1] - adiag[i];
497     countB = bi[i+1] - bi[i];
498     bjj    = bj + bi[i];
499     v2     = bv + bi[i];
500 
501     /* A-part */
502     for (j=0; j<countA; j++) {
503       if (reuse == MAT_INITIAL_MATRIX) {
504         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
505       }
506       val[jj++] = v1[j];
507     }
508 
509     /* B-part */
510     for (j=0; j < countB; j++) {
511       if (garray[bjj[j]] > rstart) {
512         if (reuse == MAT_INITIAL_MATRIX) {
513           row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
514         }
515         val[jj++] = v2[j];
516       }
517     }
518     irow++;
519   }
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "MatDestroy_MUMPS"
525 PetscErrorCode MatDestroy_MUMPS(Mat A)
526 {
527   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
528   PetscErrorCode ierr;
529 
530   PetscFunctionBegin;
531   if (mumps->CleanUpMUMPS) {
532     /* Terminate instance, deallocate memories */
533     ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
534     ierr = VecScatterDestroy(&mumps->scat_rhs);CHKERRQ(ierr);
535     ierr = VecDestroy(&mumps->b_seq);CHKERRQ(ierr);
536     ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
537     ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
538     ierr = PetscFree(mumps->id.perm_in);CHKERRQ(ierr);
539     ierr = PetscFree(mumps->irn);CHKERRQ(ierr);
540 
541     mumps->id.job = JOB_END;
542     PetscMUMPS_c(&mumps->id);
543     ierr = MPI_Comm_free(&(mumps->comm_mumps));CHKERRQ(ierr);
544   }
545   if (mumps->Destroy) {
546     ierr = (mumps->Destroy)(A);CHKERRQ(ierr);
547   }
548   ierr = PetscFree(A->spptr);CHKERRQ(ierr);
549 
550   /* clear composed functions */
551   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);CHKERRQ(ierr);
552   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetIcntl_C",NULL);CHKERRQ(ierr);
553   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetIcntl_C",NULL);CHKERRQ(ierr);
554   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetCntl_C",NULL);CHKERRQ(ierr);
555   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetCntl_C",NULL);CHKERRQ(ierr);
556 
557   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfo_C",NULL);CHKERRQ(ierr);
558   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfog_C",NULL);CHKERRQ(ierr);
559   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfo_C",NULL);CHKERRQ(ierr);
560   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfog_C",NULL);CHKERRQ(ierr);
561   PetscFunctionReturn(0);
562 }
563 
564 #undef __FUNCT__
565 #define __FUNCT__ "MatSolve_MUMPS"
566 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x)
567 {
568   Mat_MUMPS        *mumps=(Mat_MUMPS*)A->spptr;
569   PetscScalar      *array;
570   Vec              b_seq;
571   IS               is_iden,is_petsc;
572   PetscErrorCode   ierr;
573   PetscInt         i;
574   static PetscBool cite1 = PETSC_FALSE,cite2 = PETSC_FALSE;
575 
576   PetscFunctionBegin;
577   ierr = PetscCitationsRegister("@article{MUMPS01,\n  author = {P.~R. Amestoy and I.~S. Duff and J.-Y. L'Excellent and J. Koster},\n  title = {A fully asynchronous multifrontal solver using distributed dynamic scheduling},\n  journal = {SIAM Journal on Matrix Analysis and Applications},\n  volume = {23},\n  number = {1},\n  pages = {15--41},\n  year = {2001}\n}\n",&cite1);CHKERRQ(ierr);
578   ierr = PetscCitationsRegister("@article{MUMPS02,\n  author = {P.~R. Amestoy and A. Guermouche and J.-Y. L'Excellent and S. Pralet},\n  title = {Hybrid scheduling for the parallel solution of linear systems},\n  journal = {Parallel Computing},\n  volume = {32},\n  number = {2},\n  pages = {136--156},\n  year = {2006}\n}\n",&cite2);CHKERRQ(ierr);
579   mumps->id.nrhs = 1;
580   b_seq          = mumps->b_seq;
581   if (mumps->size > 1) {
582     /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */
583     ierr = VecScatterBegin(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
584     ierr = VecScatterEnd(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
585     if (!mumps->myid) {ierr = VecGetArray(b_seq,&array);CHKERRQ(ierr);}
586   } else {  /* size == 1 */
587     ierr = VecCopy(b,x);CHKERRQ(ierr);
588     ierr = VecGetArray(x,&array);CHKERRQ(ierr);
589   }
590   if (!mumps->myid) { /* define rhs on the host */
591     mumps->id.nrhs = 1;
592 #if defined(PETSC_USE_COMPLEX)
593 #if defined(PETSC_USE_REAL_SINGLE)
594     mumps->id.rhs = (mumps_complex*)array;
595 #else
596     mumps->id.rhs = (mumps_double_complex*)array;
597 #endif
598 #else
599     mumps->id.rhs = array;
600 #endif
601   }
602 
603   /* solve phase */
604   /*-------------*/
605   mumps->id.job = JOB_SOLVE;
606   PetscMUMPS_c(&mumps->id);
607   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
608 
609   if (mumps->size > 1) { /* convert mumps distributed solution to petsc mpi x */
610     if (mumps->scat_sol && mumps->ICNTL9_pre != mumps->id.ICNTL(9)) {
611       /* when id.ICNTL(9) changes, the contents of lsol_loc may change (not its size, lsol_loc), recreates scat_sol */
612       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
613     }
614     if (!mumps->scat_sol) { /* create scatter scat_sol */
615       ierr = ISCreateStride(PETSC_COMM_SELF,mumps->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */
616       for (i=0; i<mumps->id.lsol_loc; i++) {
617         mumps->id.isol_loc[i] -= 1; /* change Fortran style to C style */
618       }
619       ierr = ISCreateGeneral(PETSC_COMM_SELF,mumps->id.lsol_loc,mumps->id.isol_loc,PETSC_COPY_VALUES,&is_petsc);CHKERRQ(ierr);  /* to */
620       ierr = VecScatterCreate(mumps->x_seq,is_iden,x,is_petsc,&mumps->scat_sol);CHKERRQ(ierr);
621       ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
622       ierr = ISDestroy(&is_petsc);CHKERRQ(ierr);
623 
624       mumps->ICNTL9_pre = mumps->id.ICNTL(9); /* save current value of id.ICNTL(9) */
625     }
626 
627     ierr = VecScatterBegin(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
628     ierr = VecScatterEnd(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
629   }
630   PetscFunctionReturn(0);
631 }
632 
633 #undef __FUNCT__
634 #define __FUNCT__ "MatSolveTranspose_MUMPS"
635 PetscErrorCode MatSolveTranspose_MUMPS(Mat A,Vec b,Vec x)
636 {
637   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
638   PetscErrorCode ierr;
639 
640   PetscFunctionBegin;
641   mumps->id.ICNTL(9) = 0;
642 
643   ierr = MatSolve_MUMPS(A,b,x);CHKERRQ(ierr);
644 
645   mumps->id.ICNTL(9) = 1;
646   PetscFunctionReturn(0);
647 }
648 
649 #undef __FUNCT__
650 #define __FUNCT__ "MatMatSolve_MUMPS"
651 PetscErrorCode MatMatSolve_MUMPS(Mat A,Mat B,Mat X)
652 {
653   PetscErrorCode ierr;
654   PetscBool      flg;
655 
656   PetscFunctionBegin;
657   ierr = PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
658   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
659   ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
660   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
661   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatMatSolve_MUMPS() is not implemented yet");
662   PetscFunctionReturn(0);
663 }
664 
665 #if !defined(PETSC_USE_COMPLEX)
666 /*
667   input:
668    F:        numeric factor
669   output:
670    nneg:     total number of negative pivots
671    nzero:    0
672    npos:     (global dimension of F) - nneg
673 */
674 
675 #undef __FUNCT__
676 #define __FUNCT__ "MatGetInertia_SBAIJMUMPS"
677 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos)
678 {
679   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
680   PetscErrorCode ierr;
681   PetscMPIInt    size;
682 
683   PetscFunctionBegin;
684   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)F),&size);CHKERRQ(ierr);
685   /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */
686   if (size > 1 && mumps->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",mumps->id.INFOG(13));
687   if (nneg) {
688     if (!mumps->myid) {
689       *nneg = mumps->id.INFOG(12);
690     }
691     ierr = MPI_Bcast(nneg,1,MPI_INT,0,mumps->comm_mumps);CHKERRQ(ierr);
692   }
693   if (nzero) *nzero = 0;
694   if (npos)  *npos  = F->rmap->N - (*nneg);
695   PetscFunctionReturn(0);
696 }
697 #endif /* !defined(PETSC_USE_COMPLEX) */
698 
699 #undef __FUNCT__
700 #define __FUNCT__ "MatFactorNumeric_MUMPS"
701 PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info)
702 {
703   Mat_MUMPS      *mumps =(Mat_MUMPS*)(F)->spptr;
704   PetscErrorCode ierr;
705   Mat            F_diag;
706   PetscBool      isMPIAIJ;
707 
708   PetscFunctionBegin;
709   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_REUSE_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
710 
711   /* numerical factorization phase */
712   /*-------------------------------*/
713   mumps->id.job = JOB_FACTNUMERIC;
714   if (!mumps->id.ICNTL(18)) {
715     if (!mumps->myid) {
716 #if defined(PETSC_USE_COMPLEX)
717 #if defined(PETSC_USE_REAL_SINGLE)
718       mumps->id.a = (mumps_complex*)mumps->val;
719 #else
720       mumps->id.a = (mumps_double_complex*)mumps->val;
721 #endif
722 #else
723       mumps->id.a = mumps->val;
724 #endif
725     }
726   } else {
727 #if defined(PETSC_USE_COMPLEX)
728 #if defined(PETSC_USE_REAL_SINGLE)
729     mumps->id.a_loc = (mumps_complex*)mumps->val;
730 #else
731     mumps->id.a_loc = (mumps_double_complex*)mumps->val;
732 #endif
733 #else
734     mumps->id.a_loc = mumps->val;
735 #endif
736   }
737   PetscMUMPS_c(&mumps->id);
738   if (mumps->id.INFOG(1) < 0) {
739     if (mumps->id.INFO(1) == -13) {
740       if (mumps->id.INFO(2) < 0) {
741         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d megabytes\n",-mumps->id.INFO(2));
742       } else {
743         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d bytes\n",mumps->id.INFO(2));
744       }
745     } else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFO(1)=%d, INFO(2)=%d\n",mumps->id.INFO(1),mumps->id.INFO(2));
746   }
747   if (!mumps->myid && mumps->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"  mumps->id.ICNTL(16):=%d\n",mumps->id.INFOG(16));
748 
749   if (mumps->size > 1) {
750     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr);
751     if (isMPIAIJ) F_diag = ((Mat_MPIAIJ*)(F)->data)->A;
752     else F_diag = ((Mat_MPISBAIJ*)(F)->data)->A;
753     F_diag->assembled = PETSC_TRUE;
754     if (mumps->scat_sol) {
755       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
756       ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
757       ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
758     }
759   }
760   (F)->assembled      = PETSC_TRUE;
761   mumps->matstruc     = SAME_NONZERO_PATTERN;
762   mumps->CleanUpMUMPS = PETSC_TRUE;
763 
764   if (mumps->size > 1) {
765     /* distributed solution */
766     if (!mumps->scat_sol) {
767       /* Create x_seq=sol_loc for repeated use */
768       PetscInt    lsol_loc;
769       PetscScalar *sol_loc;
770 
771       lsol_loc = mumps->id.INFO(23); /* length of sol_loc */
772 
773       ierr = PetscMalloc2(lsol_loc,&sol_loc,lsol_loc,&mumps->id.isol_loc);CHKERRQ(ierr);
774 
775       mumps->id.lsol_loc = lsol_loc;
776 #if defined(PETSC_USE_COMPLEX)
777 #if defined(PETSC_USE_REAL_SINGLE)
778       mumps->id.sol_loc = (mumps_complex*)sol_loc;
779 #else
780       mumps->id.sol_loc = (mumps_double_complex*)sol_loc;
781 #endif
782 #else
783       mumps->id.sol_loc = sol_loc;
784 #endif
785       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,lsol_loc,sol_loc,&mumps->x_seq);CHKERRQ(ierr);
786     }
787   }
788   PetscFunctionReturn(0);
789 }
790 
791 /* Sets MUMPS options from the options database */
792 #undef __FUNCT__
793 #define __FUNCT__ "PetscSetMUMPSFromOptions"
794 PetscErrorCode PetscSetMUMPSFromOptions(Mat F, Mat A)
795 {
796   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
797   PetscErrorCode ierr;
798   PetscInt       icntl;
799   PetscBool      flg;
800 
801   PetscFunctionBegin;
802   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr);
803   ierr = PetscOptionsInt("-mat_mumps_icntl_1","ICNTL(1): output stream for error messages","None",mumps->id.ICNTL(1),&icntl,&flg);CHKERRQ(ierr);
804   if (flg) mumps->id.ICNTL(1) = icntl;
805   ierr = PetscOptionsInt("-mat_mumps_icntl_2","ICNTL(2): output stream for diagnostic printing, statistics, and warning","None",mumps->id.ICNTL(2),&icntl,&flg);CHKERRQ(ierr);
806   if (flg) mumps->id.ICNTL(2) = icntl;
807   ierr = PetscOptionsInt("-mat_mumps_icntl_3","ICNTL(3): output stream for global information, collected on the host","None",mumps->id.ICNTL(3),&icntl,&flg);CHKERRQ(ierr);
808   if (flg) mumps->id.ICNTL(3) = icntl;
809 
810   ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",mumps->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr);
811   if (flg) mumps->id.ICNTL(4) = icntl;
812   if (mumps->id.ICNTL(4) || PetscLogPrintInfo) mumps->id.ICNTL(3) = 6; /* resume MUMPS default id.ICNTL(3) = 6 */
813 
814   ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): permuting and/or scaling the matrix (0 to 7)","None",mumps->id.ICNTL(6),&icntl,&flg);CHKERRQ(ierr);
815   if (flg) mumps->id.ICNTL(6) = icntl;
816 
817   ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): matrix ordering (0 to 7). 3=Scotch, 4=PORD, 5=Metis","None",mumps->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr);
818   if (flg) {
819     if (icntl== 1 && mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n");
820     else mumps->id.ICNTL(7) = icntl;
821   }
822 
823   ierr = PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 8 or 77)","None",mumps->id.ICNTL(8),&mumps->id.ICNTL(8),NULL);CHKERRQ(ierr);
824   ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",mumps->id.ICNTL(10),&mumps->id.ICNTL(10),NULL);CHKERRQ(ierr);
825   ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to the linear system solved (via -ksp_view)","None",mumps->id.ICNTL(11),&mumps->id.ICNTL(11),NULL);CHKERRQ(ierr);
826   ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): efficiency control: defines the ordering strategy with scaling constraints (0 to 3)","None",mumps->id.ICNTL(12),&mumps->id.ICNTL(12),NULL);CHKERRQ(ierr);
827   ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): efficiency control: with or without ScaLAPACK","None",mumps->id.ICNTL(13),&mumps->id.ICNTL(13),NULL);CHKERRQ(ierr);
828   ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage of estimated workspace increase","None",mumps->id.ICNTL(14),&mumps->id.ICNTL(14),NULL);CHKERRQ(ierr);
829   ierr = PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): Schur complement","None",mumps->id.ICNTL(19),&mumps->id.ICNTL(19),NULL);CHKERRQ(ierr);
830 
831   ierr = PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core facility (0 or 1)","None",mumps->id.ICNTL(22),&mumps->id.ICNTL(22),NULL);CHKERRQ(ierr);
832   ierr = PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",mumps->id.ICNTL(23),&mumps->id.ICNTL(23),NULL);CHKERRQ(ierr);
833   ierr = PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",mumps->id.ICNTL(24),&mumps->id.ICNTL(24),NULL);CHKERRQ(ierr);
834   if (mumps->id.ICNTL(24)) {
835     mumps->id.ICNTL(13) = 1; /* turn-off ScaLAPACK to help with the correct detection of null pivots */
836   }
837 
838   ierr = PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): computation of a null space basis","None",mumps->id.ICNTL(25),&mumps->id.ICNTL(25),NULL);CHKERRQ(ierr);
839   ierr = PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): Schur options for right-hand side or solution vector","None",mumps->id.ICNTL(26),&mumps->id.ICNTL(26),NULL);CHKERRQ(ierr);
840   ierr = PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): experimental parameter","None",mumps->id.ICNTL(27),&mumps->id.ICNTL(27),NULL);CHKERRQ(ierr);
841   ierr = PetscOptionsInt("-mat_mumps_icntl_28","ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering","None",mumps->id.ICNTL(28),&mumps->id.ICNTL(28),NULL);CHKERRQ(ierr);
842   ierr = PetscOptionsInt("-mat_mumps_icntl_29","ICNTL(29): parallel ordering 1 = ptscotch 2 = parmetis","None",mumps->id.ICNTL(29),&mumps->id.ICNTL(29),NULL);CHKERRQ(ierr);
843   ierr = PetscOptionsInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL);CHKERRQ(ierr);
844   ierr = PetscOptionsInt("-mat_mumps_icntl_31","ICNTL(31): factors can be discarded in the solve phase","None",mumps->id.ICNTL(31),&mumps->id.ICNTL(31),NULL);CHKERRQ(ierr);
845   ierr = PetscOptionsInt("-mat_mumps_icntl_33","ICNTL(33): compute determinant","None",mumps->id.ICNTL(33),&mumps->id.ICNTL(33),NULL);CHKERRQ(ierr);
846 
847   ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",mumps->id.CNTL(1),&mumps->id.CNTL(1),NULL);CHKERRQ(ierr);
848   ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",mumps->id.CNTL(2),&mumps->id.CNTL(2),NULL);CHKERRQ(ierr);
849   ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",mumps->id.CNTL(3),&mumps->id.CNTL(3),NULL);CHKERRQ(ierr);
850   ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",mumps->id.CNTL(4),&mumps->id.CNTL(4),NULL);CHKERRQ(ierr);
851   ierr = PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",mumps->id.CNTL(5),&mumps->id.CNTL(5),NULL);CHKERRQ(ierr);
852 
853   ierr = PetscOptionsString("-mat_mumps_ooc_tmpdir", "out of core directory", "None", mumps->id.ooc_tmpdir, mumps->id.ooc_tmpdir, 256, NULL);
854   PetscOptionsEnd();
855   PetscFunctionReturn(0);
856 }
857 
858 #undef __FUNCT__
859 #define __FUNCT__ "PetscInitializeMUMPS"
860 PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS *mumps)
861 {
862   PetscErrorCode ierr;
863 
864   PetscFunctionBegin;
865   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A), &mumps->myid);
866   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&mumps->size);CHKERRQ(ierr);
867   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(mumps->comm_mumps));CHKERRQ(ierr);
868 
869   mumps->id.comm_fortran = MPI_Comm_c2f(mumps->comm_mumps);
870 
871   mumps->id.job = JOB_INIT;
872   mumps->id.par = 1;  /* host participates factorizaton and solve */
873   mumps->id.sym = mumps->sym;
874   PetscMUMPS_c(&mumps->id);
875 
876   mumps->CleanUpMUMPS = PETSC_FALSE;
877   mumps->scat_rhs     = NULL;
878   mumps->scat_sol     = NULL;
879 
880   /* set PETSc-MUMPS default options - override MUMPS default */
881   mumps->id.ICNTL(3) = 0;
882   mumps->id.ICNTL(4) = 0;
883   if (mumps->size == 1) {
884     mumps->id.ICNTL(18) = 0;   /* centralized assembled matrix input */
885   } else {
886     mumps->id.ICNTL(18) = 3;   /* distributed assembled matrix input */
887     mumps->id.ICNTL(21) = 1;   /* distributed solution */
888   }
889   PetscFunctionReturn(0);
890 }
891 
892 /* Note Petsc r(=c) permutation is used when mumps->id.ICNTL(7)==1 with centralized assembled matrix input; otherwise r and c are ignored */
893 #undef __FUNCT__
894 #define __FUNCT__ "MatLUFactorSymbolic_AIJMUMPS"
895 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
896 {
897   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
898   PetscErrorCode ierr;
899   Vec            b;
900   IS             is_iden;
901   const PetscInt M = A->rmap->N;
902 
903   PetscFunctionBegin;
904   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
905 
906   /* Set MUMPS options from the options database */
907   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
908 
909   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
910 
911   /* analysis phase */
912   /*----------------*/
913   mumps->id.job = JOB_FACTSYMBOLIC;
914   mumps->id.n   = M;
915   switch (mumps->id.ICNTL(18)) {
916   case 0:  /* centralized assembled matrix input */
917     if (!mumps->myid) {
918       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
919       if (mumps->id.ICNTL(6)>1) {
920 #if defined(PETSC_USE_COMPLEX)
921 #if defined(PETSC_USE_REAL_SINGLE)
922         mumps->id.a = (mumps_complex*)mumps->val;
923 #else
924         mumps->id.a = (mumps_double_complex*)mumps->val;
925 #endif
926 #else
927         mumps->id.a = mumps->val;
928 #endif
929       }
930       if (mumps->id.ICNTL(7) == 1) { /* use user-provide matrix ordering - assuming r = c ordering */
931         /*
932         PetscBool      flag;
933         ierr = ISEqual(r,c,&flag);CHKERRQ(ierr);
934         if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"row_perm != col_perm");
935         ierr = ISView(r,PETSC_VIEWER_STDOUT_SELF);
936          */
937         if (!mumps->myid) {
938           const PetscInt *idx;
939           PetscInt       i,*perm_in;
940 
941           ierr = PetscMalloc1(M,&perm_in);CHKERRQ(ierr);
942           ierr = ISGetIndices(r,&idx);CHKERRQ(ierr);
943 
944           mumps->id.perm_in = perm_in;
945           for (i=0; i<M; i++) perm_in[i] = idx[i]+1; /* perm_in[]: start from 1, not 0! */
946           ierr = ISRestoreIndices(r,&idx);CHKERRQ(ierr);
947         }
948       }
949     }
950     break;
951   case 3:  /* distributed assembled matrix input (size>1) */
952     mumps->id.nz_loc = mumps->nz;
953     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
954     if (mumps->id.ICNTL(6)>1) {
955 #if defined(PETSC_USE_COMPLEX)
956 #if defined(PETSC_USE_REAL_SINGLE)
957       mumps->id.a_loc = (mumps_complex*)mumps->val;
958 #else
959       mumps->id.a_loc = (mumps_double_complex*)mumps->val;
960 #endif
961 #else
962       mumps->id.a_loc = mumps->val;
963 #endif
964     }
965     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
966     if (!mumps->myid) {
967       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
968       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
969     } else {
970       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
971       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
972     }
973     ierr = MatGetVecs(A,NULL,&b);CHKERRQ(ierr);
974     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
975     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
976     ierr = VecDestroy(&b);CHKERRQ(ierr);
977     break;
978   }
979   PetscMUMPS_c(&mumps->id);
980   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
981 
982   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
983   F->ops->solve           = MatSolve_MUMPS;
984   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
985   F->ops->matsolve        = 0;  /* use MatMatSolve_Basic() until mumps supports distributed rhs */
986   PetscFunctionReturn(0);
987 }
988 
989 /* Note the Petsc r and c permutations are ignored */
990 #undef __FUNCT__
991 #define __FUNCT__ "MatLUFactorSymbolic_BAIJMUMPS"
992 PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
993 {
994   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
995   PetscErrorCode ierr;
996   Vec            b;
997   IS             is_iden;
998   const PetscInt M = A->rmap->N;
999 
1000   PetscFunctionBegin;
1001   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1002 
1003   /* Set MUMPS options from the options database */
1004   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1005 
1006   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1007 
1008   /* analysis phase */
1009   /*----------------*/
1010   mumps->id.job = JOB_FACTSYMBOLIC;
1011   mumps->id.n   = M;
1012   switch (mumps->id.ICNTL(18)) {
1013   case 0:  /* centralized assembled matrix input */
1014     if (!mumps->myid) {
1015       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1016       if (mumps->id.ICNTL(6)>1) {
1017 #if defined(PETSC_USE_COMPLEX)
1018 #if defined(PETSC_USE_REAL_SINGLE)
1019         mumps->id.a = (mumps_complex*)mumps->val;
1020 #else
1021         mumps->id.a = (mumps_double_complex*)mumps->val;
1022 #endif
1023 #else
1024         mumps->id.a = mumps->val;
1025 #endif
1026       }
1027     }
1028     break;
1029   case 3:  /* distributed assembled matrix input (size>1) */
1030     mumps->id.nz_loc = mumps->nz;
1031     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1032     if (mumps->id.ICNTL(6)>1) {
1033 #if defined(PETSC_USE_COMPLEX)
1034 #if defined(PETSC_USE_REAL_SINGLE)
1035       mumps->id.a_loc = (mumps_complex*)mumps->val;
1036 #else
1037       mumps->id.a_loc = (mumps_double_complex*)mumps->val;
1038 #endif
1039 #else
1040       mumps->id.a_loc = mumps->val;
1041 #endif
1042     }
1043     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1044     if (!mumps->myid) {
1045       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1046       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1047     } else {
1048       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1049       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1050     }
1051     ierr = MatGetVecs(A,NULL,&b);CHKERRQ(ierr);
1052     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1053     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1054     ierr = VecDestroy(&b);CHKERRQ(ierr);
1055     break;
1056   }
1057   PetscMUMPS_c(&mumps->id);
1058   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1059 
1060   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1061   F->ops->solve           = MatSolve_MUMPS;
1062   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
1063   PetscFunctionReturn(0);
1064 }
1065 
1066 /* Note the Petsc r permutation and factor info are ignored */
1067 #undef __FUNCT__
1068 #define __FUNCT__ "MatCholeskyFactorSymbolic_MUMPS"
1069 PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info)
1070 {
1071   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1072   PetscErrorCode ierr;
1073   Vec            b;
1074   IS             is_iden;
1075   const PetscInt M = A->rmap->N;
1076 
1077   PetscFunctionBegin;
1078   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1079 
1080   /* Set MUMPS options from the options database */
1081   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1082 
1083   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1084 
1085   /* analysis phase */
1086   /*----------------*/
1087   mumps->id.job = JOB_FACTSYMBOLIC;
1088   mumps->id.n   = M;
1089   switch (mumps->id.ICNTL(18)) {
1090   case 0:  /* centralized assembled matrix input */
1091     if (!mumps->myid) {
1092       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1093       if (mumps->id.ICNTL(6)>1) {
1094 #if defined(PETSC_USE_COMPLEX)
1095 #if defined(PETSC_USE_REAL_SINGLE)
1096         mumps->id.a = (mumps_complex*)mumps->val;
1097 #else
1098         mumps->id.a = (mumps_double_complex*)mumps->val;
1099 #endif
1100 #else
1101         mumps->id.a = mumps->val;
1102 #endif
1103       }
1104     }
1105     break;
1106   case 3:  /* distributed assembled matrix input (size>1) */
1107     mumps->id.nz_loc = mumps->nz;
1108     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1109     if (mumps->id.ICNTL(6)>1) {
1110 #if defined(PETSC_USE_COMPLEX)
1111 #if defined(PETSC_USE_REAL_SINGLE)
1112       mumps->id.a_loc = (mumps_complex*)mumps->val;
1113 #else
1114       mumps->id.a_loc = (mumps_double_complex*)mumps->val;
1115 #endif
1116 #else
1117       mumps->id.a_loc = mumps->val;
1118 #endif
1119     }
1120     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1121     if (!mumps->myid) {
1122       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1123       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1124     } else {
1125       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1126       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1127     }
1128     ierr = MatGetVecs(A,NULL,&b);CHKERRQ(ierr);
1129     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1130     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1131     ierr = VecDestroy(&b);CHKERRQ(ierr);
1132     break;
1133   }
1134   PetscMUMPS_c(&mumps->id);
1135   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1136 
1137   F->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS;
1138   F->ops->solve                 = MatSolve_MUMPS;
1139   F->ops->solvetranspose        = MatSolve_MUMPS;
1140   F->ops->matsolve              = 0; /* use MatMatSolve_Basic() until mumps supports distributed rhs */
1141 #if !defined(PETSC_USE_COMPLEX)
1142   F->ops->getinertia = MatGetInertia_SBAIJMUMPS;
1143 #else
1144   F->ops->getinertia = NULL;
1145 #endif
1146   PetscFunctionReturn(0);
1147 }
1148 
1149 #undef __FUNCT__
1150 #define __FUNCT__ "MatView_MUMPS"
1151 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer)
1152 {
1153   PetscErrorCode    ierr;
1154   PetscBool         iascii;
1155   PetscViewerFormat format;
1156   Mat_MUMPS         *mumps=(Mat_MUMPS*)A->spptr;
1157 
1158   PetscFunctionBegin;
1159   /* check if matrix is mumps type */
1160   if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0);
1161 
1162   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1163   if (iascii) {
1164     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1165     if (format == PETSC_VIEWER_ASCII_INFO) {
1166       ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr);
1167       ierr = PetscViewerASCIIPrintf(viewer,"  SYM (matrix type):                   %d \n",mumps->id.sym);CHKERRQ(ierr);
1168       ierr = PetscViewerASCIIPrintf(viewer,"  PAR (host participation):            %d \n",mumps->id.par);CHKERRQ(ierr);
1169       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(1) (output for error):         %d \n",mumps->id.ICNTL(1));CHKERRQ(ierr);
1170       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(2) (output of diagnostic msg): %d \n",mumps->id.ICNTL(2));CHKERRQ(ierr);
1171       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(3) (output for global info):   %d \n",mumps->id.ICNTL(3));CHKERRQ(ierr);
1172       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(4) (level of printing):        %d \n",mumps->id.ICNTL(4));CHKERRQ(ierr);
1173       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(5) (input mat struct):         %d \n",mumps->id.ICNTL(5));CHKERRQ(ierr);
1174       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(6) (matrix prescaling):        %d \n",mumps->id.ICNTL(6));CHKERRQ(ierr);
1175       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(7) (sequentia matrix ordering):%d \n",mumps->id.ICNTL(7));CHKERRQ(ierr);
1176       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(8) (scalling strategy):        %d \n",mumps->id.ICNTL(8));CHKERRQ(ierr);
1177       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(10) (max num of refinements):  %d \n",mumps->id.ICNTL(10));CHKERRQ(ierr);
1178       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(11) (error analysis):          %d \n",mumps->id.ICNTL(11));CHKERRQ(ierr);
1179       if (mumps->id.ICNTL(11)>0) {
1180         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(4) (inf norm of input mat):        %g\n",mumps->id.RINFOG(4));CHKERRQ(ierr);
1181         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(5) (inf norm of solution):         %g\n",mumps->id.RINFOG(5));CHKERRQ(ierr);
1182         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(6) (inf norm of residual):         %g\n",mumps->id.RINFOG(6));CHKERRQ(ierr);
1183         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",mumps->id.RINFOG(7),mumps->id.RINFOG(8));CHKERRQ(ierr);
1184         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(9) (error estimate):               %g \n",mumps->id.RINFOG(9));CHKERRQ(ierr);
1185         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",mumps->id.RINFOG(10),mumps->id.RINFOG(11));CHKERRQ(ierr);
1186       }
1187       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(12) (efficiency control):                         %d \n",mumps->id.ICNTL(12));CHKERRQ(ierr);
1188       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(13) (efficiency control):                         %d \n",mumps->id.ICNTL(13));CHKERRQ(ierr);
1189       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(14) (percentage of estimated workspace increase): %d \n",mumps->id.ICNTL(14));CHKERRQ(ierr);
1190       /* ICNTL(15-17) not used */
1191       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(18) (input mat struct):                           %d \n",mumps->id.ICNTL(18));CHKERRQ(ierr);
1192       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(19) (Shur complement info):                       %d \n",mumps->id.ICNTL(19));CHKERRQ(ierr);
1193       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(20) (rhs sparse pattern):                         %d \n",mumps->id.ICNTL(20));CHKERRQ(ierr);
1194       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(21) (somumpstion struct):                            %d \n",mumps->id.ICNTL(21));CHKERRQ(ierr);
1195       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(22) (in-core/out-of-core facility):               %d \n",mumps->id.ICNTL(22));CHKERRQ(ierr);
1196       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(23) (max size of memory can be allocated locally):%d \n",mumps->id.ICNTL(23));CHKERRQ(ierr);
1197 
1198       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(24) (detection of null pivot rows):               %d \n",mumps->id.ICNTL(24));CHKERRQ(ierr);
1199       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(25) (computation of a null space basis):          %d \n",mumps->id.ICNTL(25));CHKERRQ(ierr);
1200       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(26) (Schur options for rhs or solution):          %d \n",mumps->id.ICNTL(26));CHKERRQ(ierr);
1201       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(27) (experimental parameter):                     %d \n",mumps->id.ICNTL(27));CHKERRQ(ierr);
1202       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(28) (use parallel or sequential ordering):        %d \n",mumps->id.ICNTL(28));CHKERRQ(ierr);
1203       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(29) (parallel ordering):                          %d \n",mumps->id.ICNTL(29));CHKERRQ(ierr);
1204 
1205       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(30) (user-specified set of entries in inv(A)):    %d \n",mumps->id.ICNTL(30));CHKERRQ(ierr);
1206       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(31) (factors is discarded in the solve phase):    %d \n",mumps->id.ICNTL(31));CHKERRQ(ierr);
1207       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(33) (compute determinant):                        %d \n",mumps->id.ICNTL(33));CHKERRQ(ierr);
1208 
1209       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(1) (relative pivoting threshold):      %g \n",mumps->id.CNTL(1));CHKERRQ(ierr);
1210       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(2) (stopping criterion of refinement): %g \n",mumps->id.CNTL(2));CHKERRQ(ierr);
1211       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(3) (absomumpste pivoting threshold):      %g \n",mumps->id.CNTL(3));CHKERRQ(ierr);
1212       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(4) (vamumpse of static pivoting):         %g \n",mumps->id.CNTL(4));CHKERRQ(ierr);
1213       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(5) (fixation for null pivots):         %g \n",mumps->id.CNTL(5));CHKERRQ(ierr);
1214 
1215       /* infomation local to each processor */
1216       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr);
1217       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
1218       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %g \n",mumps->myid,mumps->id.RINFO(1));CHKERRQ(ierr);
1219       ierr = PetscViewerFlush(viewer);
1220       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr);
1221       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(2));CHKERRQ(ierr);
1222       ierr = PetscViewerFlush(viewer);
1223       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr);
1224       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(3));CHKERRQ(ierr);
1225       ierr = PetscViewerFlush(viewer);
1226 
1227       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr);
1228       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] %d \n",mumps->myid,mumps->id.INFO(15));CHKERRQ(ierr);
1229       ierr = PetscViewerFlush(viewer);
1230 
1231       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr);
1232       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(16));CHKERRQ(ierr);
1233       ierr = PetscViewerFlush(viewer);
1234 
1235       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr);
1236       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(23));CHKERRQ(ierr);
1237       ierr = PetscViewerFlush(viewer);
1238       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
1239 
1240       if (!mumps->myid) { /* information from the host */
1241         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",mumps->id.RINFOG(1));CHKERRQ(ierr);
1242         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",mumps->id.RINFOG(2));CHKERRQ(ierr);
1243         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",mumps->id.RINFOG(3));CHKERRQ(ierr);
1244         ierr = PetscViewerASCIIPrintf(viewer,"  (RINFOG(12) RINFOG(13))*2^INFOG(34) (determinant): (%g,%g)*(2^%d)\n",mumps->id.RINFOG(12),mumps->id.RINFOG(13),mumps->id.INFOG(34));CHKERRQ(ierr);
1245 
1246         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(3));CHKERRQ(ierr);
1247         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(4));CHKERRQ(ierr);
1248         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(5) (estimated maximum front size in the complete tree): %d \n",mumps->id.INFOG(5));CHKERRQ(ierr);
1249         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(6) (number of nodes in the complete tree): %d \n",mumps->id.INFOG(6));CHKERRQ(ierr);
1250         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(7) (ordering option effectively use after analysis): %d \n",mumps->id.INFOG(7));CHKERRQ(ierr);
1251         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",mumps->id.INFOG(8));CHKERRQ(ierr);
1252         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",mumps->id.INFOG(9));CHKERRQ(ierr);
1253         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(10) (total integer space store the matrix factors after factorization): %d \n",mumps->id.INFOG(10));CHKERRQ(ierr);
1254         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(11) (order of largest frontal matrix after factorization): %d \n",mumps->id.INFOG(11));CHKERRQ(ierr);
1255         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(12) (number of off-diagonal pivots): %d \n",mumps->id.INFOG(12));CHKERRQ(ierr);
1256         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(13) (number of delayed pivots after factorization): %d \n",mumps->id.INFOG(13));CHKERRQ(ierr);
1257         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(14) (number of memory compress after factorization): %d \n",mumps->id.INFOG(14));CHKERRQ(ierr);
1258         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(15) (number of steps of iterative refinement after solution): %d \n",mumps->id.INFOG(15));CHKERRQ(ierr);
1259         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",mumps->id.INFOG(16));CHKERRQ(ierr);
1260         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",mumps->id.INFOG(17));CHKERRQ(ierr);
1261         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",mumps->id.INFOG(18));CHKERRQ(ierr);
1262         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",mumps->id.INFOG(19));CHKERRQ(ierr);
1263         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(20) (estimated number of entries in the factors): %d \n",mumps->id.INFOG(20));CHKERRQ(ierr);
1264         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",mumps->id.INFOG(21));CHKERRQ(ierr);
1265         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",mumps->id.INFOG(22));CHKERRQ(ierr);
1266         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",mumps->id.INFOG(23));CHKERRQ(ierr);
1267         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",mumps->id.INFOG(24));CHKERRQ(ierr);
1268         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",mumps->id.INFOG(25));CHKERRQ(ierr);
1269         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(28) (after factorization: number of null pivots encountered): %d\n",mumps->id.INFOG(28));CHKERRQ(ierr);
1270         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(29) (after factorization: effective number of entries in the factors (sum over all processors)): %d\n",mumps->id.INFOG(29));CHKERRQ(ierr);
1271         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(30, 31) (after solution: size in Mbytes of memory used during solution phase): %d, %d\n",mumps->id.INFOG(30),mumps->id.INFOG(31));CHKERRQ(ierr);
1272         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(32) (after analysis: type of analysis done): %d\n",mumps->id.INFOG(32));CHKERRQ(ierr);
1273         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(33) (value used for ICNTL(8)): %d\n",mumps->id.INFOG(33));CHKERRQ(ierr);
1274         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(34) (exponent of the determinant if determinant is requested): %d\n",mumps->id.INFOG(34));CHKERRQ(ierr);
1275       }
1276     }
1277   }
1278   PetscFunctionReturn(0);
1279 }
1280 
1281 #undef __FUNCT__
1282 #define __FUNCT__ "MatGetInfo_MUMPS"
1283 PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info)
1284 {
1285   Mat_MUMPS *mumps =(Mat_MUMPS*)A->spptr;
1286 
1287   PetscFunctionBegin;
1288   info->block_size        = 1.0;
1289   info->nz_allocated      = mumps->id.INFOG(20);
1290   info->nz_used           = mumps->id.INFOG(20);
1291   info->nz_unneeded       = 0.0;
1292   info->assemblies        = 0.0;
1293   info->mallocs           = 0.0;
1294   info->memory            = 0.0;
1295   info->fill_ratio_given  = 0;
1296   info->fill_ratio_needed = 0;
1297   info->factor_mallocs    = 0;
1298   PetscFunctionReturn(0);
1299 }
1300 
1301 /* -------------------------------------------------------------------------------------------*/
1302 #undef __FUNCT__
1303 #define __FUNCT__ "MatMumpsSetIcntl_MUMPS"
1304 PetscErrorCode MatMumpsSetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt ival)
1305 {
1306   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1307 
1308   PetscFunctionBegin;
1309   mumps->id.ICNTL(icntl) = ival;
1310   PetscFunctionReturn(0);
1311 }
1312 
1313 #undef __FUNCT__
1314 #define __FUNCT__ "MatMumpsGetIcntl_MUMPS"
1315 PetscErrorCode MatMumpsGetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt *ival)
1316 {
1317   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1318 
1319   PetscFunctionBegin;
1320   *ival = mumps->id.ICNTL(icntl);
1321   PetscFunctionReturn(0);
1322 }
1323 
1324 #undef __FUNCT__
1325 #define __FUNCT__ "MatMumpsSetIcntl"
1326 /*@
1327   MatMumpsSetIcntl - Set MUMPS parameter ICNTL()
1328 
1329    Logically Collective on Mat
1330 
1331    Input Parameters:
1332 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1333 .  icntl - index of MUMPS parameter array ICNTL()
1334 -  ival - value of MUMPS ICNTL(icntl)
1335 
1336   Options Database:
1337 .   -mat_mumps_icntl_<icntl> <ival>
1338 
1339    Level: beginner
1340 
1341    References: MUMPS Users' Guide
1342 
1343 .seealso: MatGetFactor()
1344 @*/
1345 PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt icntl,PetscInt ival)
1346 {
1347   PetscErrorCode ierr;
1348 
1349   PetscFunctionBegin;
1350   PetscValidLogicalCollectiveInt(F,icntl,2);
1351   PetscValidLogicalCollectiveInt(F,ival,3);
1352   ierr = PetscTryMethod(F,"MatMumpsSetIcntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));CHKERRQ(ierr);
1353   PetscFunctionReturn(0);
1354 }
1355 
1356 #undef __FUNCT__
1357 #define __FUNCT__ "MatMumpsGetIcntl"
1358 /*@
1359   MatMumpsGetIcntl - Get MUMPS parameter ICNTL()
1360 
1361    Logically Collective on Mat
1362 
1363    Input Parameters:
1364 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1365 -  icntl - index of MUMPS parameter array ICNTL()
1366 
1367   Output Parameter:
1368 .  ival - value of MUMPS ICNTL(icntl)
1369 
1370    Level: beginner
1371 
1372    References: MUMPS Users' Guide
1373 
1374 .seealso: MatGetFactor()
1375 @*/
1376 PetscErrorCode MatMumpsGetIcntl(Mat F,PetscInt icntl,PetscInt *ival)
1377 {
1378   PetscErrorCode ierr;
1379 
1380   PetscFunctionBegin;
1381   PetscValidLogicalCollectiveInt(F,icntl,2);
1382   PetscValidIntPointer(ival,3);
1383   ierr = PetscTryMethod(F,"MatMumpsGetIcntl_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
1384   PetscFunctionReturn(0);
1385 }
1386 
1387 /* -------------------------------------------------------------------------------------------*/
1388 #undef __FUNCT__
1389 #define __FUNCT__ "MatMumpsSetCntl_MUMPS"
1390 PetscErrorCode MatMumpsSetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal val)
1391 {
1392   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1393 
1394   PetscFunctionBegin;
1395   mumps->id.CNTL(icntl) = val;
1396   PetscFunctionReturn(0);
1397 }
1398 
1399 #undef __FUNCT__
1400 #define __FUNCT__ "MatMumpsGetCntl_MUMPS"
1401 PetscErrorCode MatMumpsGetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal *val)
1402 {
1403   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1404 
1405   PetscFunctionBegin;
1406   *val = mumps->id.CNTL(icntl);
1407   PetscFunctionReturn(0);
1408 }
1409 
1410 #undef __FUNCT__
1411 #define __FUNCT__ "MatMumpsSetCntl"
1412 /*@
1413   MatMumpsSetCntl - Set MUMPS parameter CNTL()
1414 
1415    Logically Collective on Mat
1416 
1417    Input Parameters:
1418 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1419 .  icntl - index of MUMPS parameter array CNTL()
1420 -  val - value of MUMPS CNTL(icntl)
1421 
1422   Options Database:
1423 .   -mat_mumps_cntl_<icntl> <val>
1424 
1425    Level: beginner
1426 
1427    References: MUMPS Users' Guide
1428 
1429 .seealso: MatGetFactor()
1430 @*/
1431 PetscErrorCode MatMumpsSetCntl(Mat F,PetscInt icntl,PetscReal val)
1432 {
1433   PetscErrorCode ierr;
1434 
1435   PetscFunctionBegin;
1436   PetscValidLogicalCollectiveInt(F,icntl,2);
1437   PetscValidLogicalCollectiveReal(F,val,3);
1438   ierr = PetscTryMethod(F,"MatMumpsSetCntl_C",(Mat,PetscInt,PetscReal),(F,icntl,val));CHKERRQ(ierr);
1439   PetscFunctionReturn(0);
1440 }
1441 
1442 #undef __FUNCT__
1443 #define __FUNCT__ "MatMumpsGetCntl"
1444 /*@
1445   MatMumpsGetCntl - Get MUMPS parameter CNTL()
1446 
1447    Logically Collective on Mat
1448 
1449    Input Parameters:
1450 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1451 -  icntl - index of MUMPS parameter array CNTL()
1452 
1453   Output Parameter:
1454 .  val - value of MUMPS CNTL(icntl)
1455 
1456    Level: beginner
1457 
1458    References: MUMPS Users' Guide
1459 
1460 .seealso: MatGetFactor()
1461 @*/
1462 PetscErrorCode MatMumpsGetCntl(Mat F,PetscInt icntl,PetscReal *val)
1463 {
1464   PetscErrorCode ierr;
1465 
1466   PetscFunctionBegin;
1467   PetscValidLogicalCollectiveInt(F,icntl,2);
1468   PetscValidRealPointer(val,3);
1469   ierr = PetscTryMethod(F,"MatMumpsGetCntl_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
1470   PetscFunctionReturn(0);
1471 }
1472 
1473 #undef __FUNCT__
1474 #define __FUNCT__ "MatMumpsGetInfo_MUMPS"
1475 PetscErrorCode MatMumpsGetInfo_MUMPS(Mat F,PetscInt icntl,PetscInt *info)
1476 {
1477   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1478 
1479   PetscFunctionBegin;
1480   *info = mumps->id.INFO(icntl);
1481   PetscFunctionReturn(0);
1482 }
1483 
1484 #undef __FUNCT__
1485 #define __FUNCT__ "MatMumpsGetInfog_MUMPS"
1486 PetscErrorCode MatMumpsGetInfog_MUMPS(Mat F,PetscInt icntl,PetscInt *infog)
1487 {
1488   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1489 
1490   PetscFunctionBegin;
1491   *infog = mumps->id.INFOG(icntl);
1492   PetscFunctionReturn(0);
1493 }
1494 
1495 #undef __FUNCT__
1496 #define __FUNCT__ "MatMumpsGetRinfo_MUMPS"
1497 PetscErrorCode MatMumpsGetRinfo_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfo)
1498 {
1499   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1500 
1501   PetscFunctionBegin;
1502   *rinfo = mumps->id.RINFO(icntl);
1503   PetscFunctionReturn(0);
1504 }
1505 
1506 #undef __FUNCT__
1507 #define __FUNCT__ "MatMumpsGetRinfog_MUMPS"
1508 PetscErrorCode MatMumpsGetRinfog_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfog)
1509 {
1510   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1511 
1512   PetscFunctionBegin;
1513   *rinfog = mumps->id.RINFOG(icntl);
1514   PetscFunctionReturn(0);
1515 }
1516 
1517 #undef __FUNCT__
1518 #define __FUNCT__ "MatMumpsGetInfo"
1519 /*@
1520   MatMumpsGetInfo - Get MUMPS parameter INFO()
1521 
1522    Logically Collective on Mat
1523 
1524    Input Parameters:
1525 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1526 -  icntl - index of MUMPS parameter array INFO()
1527 
1528   Output Parameter:
1529 .  ival - value of MUMPS INFO(icntl)
1530 
1531    Level: beginner
1532 
1533    References: MUMPS Users' Guide
1534 
1535 .seealso: MatGetFactor()
1536 @*/
1537 PetscErrorCode MatMumpsGetInfo(Mat F,PetscInt icntl,PetscInt *ival)
1538 {
1539   PetscErrorCode ierr;
1540 
1541   PetscFunctionBegin;
1542   PetscValidIntPointer(ival,3);
1543   ierr = PetscTryMethod(F,"MatMumpsGetInfo_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
1544   PetscFunctionReturn(0);
1545 }
1546 
1547 #undef __FUNCT__
1548 #define __FUNCT__ "MatMumpsGetInfog"
1549 /*@
1550   MatMumpsGetInfog - Get MUMPS parameter INFOG()
1551 
1552    Logically Collective on Mat
1553 
1554    Input Parameters:
1555 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1556 -  icntl - index of MUMPS parameter array INFOG()
1557 
1558   Output Parameter:
1559 .  ival - value of MUMPS INFOG(icntl)
1560 
1561    Level: beginner
1562 
1563    References: MUMPS Users' Guide
1564 
1565 .seealso: MatGetFactor()
1566 @*/
1567 PetscErrorCode MatMumpsGetInfog(Mat F,PetscInt icntl,PetscInt *ival)
1568 {
1569   PetscErrorCode ierr;
1570 
1571   PetscFunctionBegin;
1572   PetscValidIntPointer(ival,3);
1573   ierr = PetscTryMethod(F,"MatMumpsGetInfog_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
1574   PetscFunctionReturn(0);
1575 }
1576 
1577 #undef __FUNCT__
1578 #define __FUNCT__ "MatMumpsGetRinfo"
1579 /*@
1580   MatMumpsGetRinfo - Get MUMPS parameter RINFO()
1581 
1582    Logically Collective on Mat
1583 
1584    Input Parameters:
1585 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1586 -  icntl - index of MUMPS parameter array RINFO()
1587 
1588   Output Parameter:
1589 .  val - value of MUMPS RINFO(icntl)
1590 
1591    Level: beginner
1592 
1593    References: MUMPS Users' Guide
1594 
1595 .seealso: MatGetFactor()
1596 @*/
1597 PetscErrorCode MatMumpsGetRinfo(Mat F,PetscInt icntl,PetscReal *val)
1598 {
1599   PetscErrorCode ierr;
1600 
1601   PetscFunctionBegin;
1602   PetscValidRealPointer(val,3);
1603   ierr = PetscTryMethod(F,"MatMumpsGetRinfo_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
1604   PetscFunctionReturn(0);
1605 }
1606 
1607 #undef __FUNCT__
1608 #define __FUNCT__ "MatMumpsGetRinfog"
1609 /*@
1610   MatMumpsGetRinfog - Get MUMPS parameter RINFOG()
1611 
1612    Logically Collective on Mat
1613 
1614    Input Parameters:
1615 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1616 -  icntl - index of MUMPS parameter array RINFOG()
1617 
1618   Output Parameter:
1619 .  val - value of MUMPS RINFOG(icntl)
1620 
1621    Level: beginner
1622 
1623    References: MUMPS Users' Guide
1624 
1625 .seealso: MatGetFactor()
1626 @*/
1627 PetscErrorCode MatMumpsGetRinfog(Mat F,PetscInt icntl,PetscReal *val)
1628 {
1629   PetscErrorCode ierr;
1630 
1631   PetscFunctionBegin;
1632   PetscValidRealPointer(val,3);
1633   ierr = PetscTryMethod(F,"MatMumpsGetRinfog_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
1634   PetscFunctionReturn(0);
1635 }
1636 
1637 /*MC
1638   MATSOLVERMUMPS -  A matrix type providing direct solvers (LU and Cholesky) for
1639   distributed and sequential matrices via the external package MUMPS.
1640 
1641   Works with MATAIJ and MATSBAIJ matrices
1642 
1643   Options Database Keys:
1644 + -mat_mumps_icntl_4 <0,...,4> - print level
1645 . -mat_mumps_icntl_6 <0,...,7> - matrix prescaling options (see MUMPS User's Guide)
1646 . -mat_mumps_icntl_7 <0,...,7> - matrix orderings (see MUMPS User's Guidec)
1647 . -mat_mumps_icntl_9 <1,2> - A or A^T x=b to be solved: 1 denotes A, 2 denotes A^T
1648 . -mat_mumps_icntl_10 <n> - maximum number of iterative refinements
1649 . -mat_mumps_icntl_11 <n> - error analysis, a positive value returns statistics during -ksp_view
1650 . -mat_mumps_icntl_12 <n> - efficiency control (see MUMPS User's Guide)
1651 . -mat_mumps_icntl_13 <n> - efficiency control (see MUMPS User's Guide)
1652 . -mat_mumps_icntl_14 <n> - efficiency control (see MUMPS User's Guide)
1653 . -mat_mumps_icntl_15 <n> - efficiency control (see MUMPS User's Guide)
1654 . -mat_mumps_cntl_1 <delta> - relative pivoting threshold
1655 . -mat_mumps_cntl_2 <tol> - stopping criterion for refinement
1656 - -mat_mumps_cntl_3 <adelta> - absolute pivoting threshold
1657 
1658   Level: beginner
1659 
1660 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
1661 
1662 M*/
1663 
1664 #undef __FUNCT__
1665 #define __FUNCT__ "MatFactorGetSolverPackage_mumps"
1666 static PetscErrorCode MatFactorGetSolverPackage_mumps(Mat A,const MatSolverPackage *type)
1667 {
1668   PetscFunctionBegin;
1669   *type = MATSOLVERMUMPS;
1670   PetscFunctionReturn(0);
1671 }
1672 
1673 /* MatGetFactor for Seq and MPI AIJ matrices */
1674 #undef __FUNCT__
1675 #define __FUNCT__ "MatGetFactor_aij_mumps"
1676 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F)
1677 {
1678   Mat            B;
1679   PetscErrorCode ierr;
1680   Mat_MUMPS      *mumps;
1681   PetscBool      isSeqAIJ;
1682 
1683   PetscFunctionBegin;
1684   /* Create the factorization matrix */
1685   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr);
1686   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1687   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1688   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1689   if (isSeqAIJ) {
1690     ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
1691   } else {
1692     ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
1693   }
1694 
1695   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
1696 
1697   B->ops->view    = MatView_MUMPS;
1698   B->ops->getinfo = MatGetInfo_MUMPS;
1699 
1700   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1701   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
1702   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
1703   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
1704   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
1705 
1706   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
1707   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
1708   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
1709   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
1710   if (ftype == MAT_FACTOR_LU) {
1711     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
1712     B->factortype            = MAT_FACTOR_LU;
1713     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij;
1714     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij;
1715     mumps->sym = 0;
1716   } else {
1717     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
1718     B->factortype                  = MAT_FACTOR_CHOLESKY;
1719     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij;
1720     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij;
1721     if (A->spd_set && A->spd) mumps->sym = 1;
1722     else                      mumps->sym = 2;
1723   }
1724 
1725   mumps->isAIJ    = PETSC_TRUE;
1726   mumps->Destroy  = B->ops->destroy;
1727   B->ops->destroy = MatDestroy_MUMPS;
1728   B->spptr        = (void*)mumps;
1729 
1730   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
1731 
1732   *F = B;
1733   PetscFunctionReturn(0);
1734 }
1735 
1736 /* MatGetFactor for Seq and MPI SBAIJ matrices */
1737 #undef __FUNCT__
1738 #define __FUNCT__ "MatGetFactor_sbaij_mumps"
1739 PETSC_EXTERN PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
1740 {
1741   Mat            B;
1742   PetscErrorCode ierr;
1743   Mat_MUMPS      *mumps;
1744   PetscBool      isSeqSBAIJ;
1745 
1746   PetscFunctionBegin;
1747   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
1748   if (A->rmap->bs > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead");
1749   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr);
1750   /* Create the factorization matrix */
1751   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1752   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1753   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1754   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
1755   if (isSeqSBAIJ) {
1756     ierr = MatSeqSBAIJSetPreallocation(B,1,0,NULL);CHKERRQ(ierr);
1757 
1758     mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij;
1759   } else {
1760     ierr = MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);CHKERRQ(ierr);
1761 
1762     mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij;
1763   }
1764 
1765   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
1766   B->ops->view                   = MatView_MUMPS;
1767 
1768   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1769   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
1770   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
1771   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
1772   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
1773 
1774   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
1775   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
1776   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
1777   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
1778 
1779   B->factortype = MAT_FACTOR_CHOLESKY;
1780   if (A->spd_set && A->spd) mumps->sym = 1;
1781   else                      mumps->sym = 2;
1782 
1783   mumps->isAIJ    = PETSC_FALSE;
1784   mumps->Destroy  = B->ops->destroy;
1785   B->ops->destroy = MatDestroy_MUMPS;
1786   B->spptr        = (void*)mumps;
1787 
1788   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
1789 
1790   *F = B;
1791   PetscFunctionReturn(0);
1792 }
1793 
1794 #undef __FUNCT__
1795 #define __FUNCT__ "MatGetFactor_baij_mumps"
1796 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F)
1797 {
1798   Mat            B;
1799   PetscErrorCode ierr;
1800   Mat_MUMPS      *mumps;
1801   PetscBool      isSeqBAIJ;
1802 
1803   PetscFunctionBegin;
1804   /* Create the factorization matrix */
1805   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);CHKERRQ(ierr);
1806   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1807   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1808   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1809   if (isSeqBAIJ) {
1810     ierr = MatSeqBAIJSetPreallocation(B,A->rmap->bs,0,NULL);CHKERRQ(ierr);
1811   } else {
1812     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
1813   }
1814 
1815   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
1816   if (ftype == MAT_FACTOR_LU) {
1817     B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS;
1818     B->factortype            = MAT_FACTOR_LU;
1819     if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij;
1820     else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij;
1821     mumps->sym = 0;
1822   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n");
1823 
1824   B->ops->view = MatView_MUMPS;
1825 
1826   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1827   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
1828   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
1829   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
1830   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
1831 
1832   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
1833   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
1834   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
1835   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
1836 
1837   mumps->isAIJ    = PETSC_TRUE;
1838   mumps->Destroy  = B->ops->destroy;
1839   B->ops->destroy = MatDestroy_MUMPS;
1840   B->spptr        = (void*)mumps;
1841 
1842   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
1843 
1844   *F = B;
1845   PetscFunctionReturn(0);
1846 }
1847