xref: /petsc/src/mat/impls/aij/mpi/mumps/mumps.c (revision eb3f19e46b38d5d7d03c8a3cfecdfb705cfcba06)
1 
2 /*
3     Provides an interface to the MUMPS sparse solver
4 */
5 
6 #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I  "petscmat.h"  I*/
7 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
8 
9 EXTERN_C_BEGIN
10 #if defined(PETSC_USE_COMPLEX)
11 #if defined(PETSC_USE_REAL_SINGLE)
12 #include <cmumps_c.h>
13 #else
14 #include <zmumps_c.h>
15 #endif
16 #else
17 #if defined(PETSC_USE_REAL_SINGLE)
18 #include <smumps_c.h>
19 #else
20 #include <dmumps_c.h>
21 #endif
22 #endif
23 EXTERN_C_END
24 #define JOB_INIT -1
25 #define JOB_FACTSYMBOLIC 1
26 #define JOB_FACTNUMERIC 2
27 #define JOB_SOLVE 3
28 #define JOB_END -2
29 
30 /* calls to MUMPS */
31 #if defined(PETSC_USE_COMPLEX)
32 #if defined(PETSC_USE_REAL_SINGLE)
33 #define PetscMUMPS_c cmumps_c
34 #else
35 #define PetscMUMPS_c zmumps_c
36 #endif
37 #else
38 #if defined(PETSC_USE_REAL_SINGLE)
39 #define PetscMUMPS_c smumps_c
40 #else
41 #define PetscMUMPS_c dmumps_c
42 #endif
43 #endif
44 
45 
46 /* macros s.t. indices match MUMPS documentation */
47 #define ICNTL(I) icntl[(I)-1]
48 #define CNTL(I) cntl[(I)-1]
49 #define INFOG(I) infog[(I)-1]
50 #define INFO(I) info[(I)-1]
51 #define RINFOG(I) rinfog[(I)-1]
52 #define RINFO(I) rinfo[(I)-1]
53 
54 typedef struct {
55 #if defined(PETSC_USE_COMPLEX)
56 #if defined(PETSC_USE_REAL_SINGLE)
57   CMUMPS_STRUC_C id;
58 #else
59   ZMUMPS_STRUC_C id;
60 #endif
61 #else
62 #if defined(PETSC_USE_REAL_SINGLE)
63   SMUMPS_STRUC_C id;
64 #else
65   DMUMPS_STRUC_C id;
66 #endif
67 #endif
68 
69   MatStructure matstruc;
70   PetscMPIInt  myid,size;
71   PetscInt     *irn,*jcn,nz,sym;
72   PetscScalar  *val;
73   MPI_Comm     comm_mumps;
74   VecScatter   scat_rhs, scat_sol;
75   PetscBool    isAIJ,CleanUpMUMPS;
76   Vec          b_seq,x_seq;
77   PetscInt     ICNTL9_pre;   /* check if ICNTL(9) is changed from previous MatSolve */
78 
79   PetscErrorCode (*Destroy)(Mat);
80   PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**);
81 } Mat_MUMPS;
82 
83 extern PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*);
84 
85 
86 /* MatConvertToTriples_A_B */
87 /*convert Petsc matrix to triples: row[nz], col[nz], val[nz] */
88 /*
89   input:
90     A       - matrix in aij,baij or sbaij (bs=1) format
91     shift   - 0: C style output triple; 1: Fortran style output triple.
92     reuse   - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple
93               MAT_REUSE_MATRIX:   only the values in v array are updated
94   output:
95     nnz     - dim of r, c, and v (number of local nonzero entries of A)
96     r, c, v - row and col index, matrix values (matrix triples)
97 
98   The returned values r, c, and sometimes v are obtained in a single PetscMalloc(). Then in MatDestroy_MUMPS() it is
99   freed with PetscFree((mumps->irn);  This is not ideal code, the fact that v is ONLY sometimes part of mumps->irn means
100   that the PetscMalloc() cannot easily be replaced with a PetscMalloc3().
101 
102  */
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "MatConvertToTriples_seqaij_seqaij"
106 PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
107 {
108   const PetscInt *ai,*aj,*ajj,M=A->rmap->n;
109   PetscInt       nz,rnz,i,j;
110   PetscErrorCode ierr;
111   PetscInt       *row,*col;
112   Mat_SeqAIJ     *aa=(Mat_SeqAIJ*)A->data;
113 
114   PetscFunctionBegin;
115   *v=aa->a;
116   if (reuse == MAT_INITIAL_MATRIX) {
117     nz   = aa->nz;
118     ai   = aa->i;
119     aj   = aa->j;
120     *nnz = nz;
121     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
122     col  = row + nz;
123 
124     nz = 0;
125     for (i=0; i<M; i++) {
126       rnz = ai[i+1] - ai[i];
127       ajj = aj + ai[i];
128       for (j=0; j<rnz; j++) {
129         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
130       }
131     }
132     *r = row; *c = col;
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatConvertToTriples_seqbaij_seqaij"
139 PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
140 {
141   Mat_SeqBAIJ    *aa=(Mat_SeqBAIJ*)A->data;
142   const PetscInt *ai,*aj,*ajj,bs2 = aa->bs2;
143   PetscInt       bs,M,nz,idx=0,rnz,i,j,k,m;
144   PetscErrorCode ierr;
145   PetscInt       *row,*col;
146 
147   PetscFunctionBegin;
148   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
149   M = A->rmap->N/bs;
150   *v = aa->a;
151   if (reuse == MAT_INITIAL_MATRIX) {
152     ai   = aa->i; aj = aa->j;
153     nz   = bs2*aa->nz;
154     *nnz = nz;
155     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
156     col  = row + nz;
157 
158     for (i=0; i<M; i++) {
159       ajj = aj + ai[i];
160       rnz = ai[i+1] - ai[i];
161       for (k=0; k<rnz; k++) {
162         for (j=0; j<bs; j++) {
163           for (m=0; m<bs; m++) {
164             row[idx]   = i*bs + m + shift;
165             col[idx++] = bs*(ajj[k]) + j + shift;
166           }
167         }
168       }
169     }
170     *r = row; *c = col;
171   }
172   PetscFunctionReturn(0);
173 }
174 
175 #undef __FUNCT__
176 #define __FUNCT__ "MatConvertToTriples_seqsbaij_seqsbaij"
177 PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
178 {
179   const PetscInt *ai, *aj,*ajj,M=A->rmap->n;
180   PetscInt       nz,rnz,i,j;
181   PetscErrorCode ierr;
182   PetscInt       *row,*col;
183   Mat_SeqSBAIJ   *aa=(Mat_SeqSBAIJ*)A->data;
184 
185   PetscFunctionBegin;
186   *v = aa->a;
187   if (reuse == MAT_INITIAL_MATRIX) {
188     nz   = aa->nz;
189     ai   = aa->i;
190     aj   = aa->j;
191     *v   = aa->a;
192     *nnz = nz;
193     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
194     col  = row + nz;
195 
196     nz = 0;
197     for (i=0; i<M; i++) {
198       rnz = ai[i+1] - ai[i];
199       ajj = aj + ai[i];
200       for (j=0; j<rnz; j++) {
201         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
202       }
203     }
204     *r = row; *c = col;
205   }
206   PetscFunctionReturn(0);
207 }
208 
209 #undef __FUNCT__
210 #define __FUNCT__ "MatConvertToTriples_seqaij_seqsbaij"
211 PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
212 {
213   const PetscInt    *ai,*aj,*ajj,*adiag,M=A->rmap->n;
214   PetscInt          nz,rnz,i,j;
215   const PetscScalar *av,*v1;
216   PetscScalar       *val;
217   PetscErrorCode    ierr;
218   PetscInt          *row,*col;
219   Mat_SeqAIJ        *aa=(Mat_SeqAIJ*)A->data;
220 
221   PetscFunctionBegin;
222   ai   =aa->i; aj=aa->j;av=aa->a;
223   adiag=aa->diag;
224   if (reuse == MAT_INITIAL_MATRIX) {
225     /* count nz in the uppper triangular part of A */
226     nz = 0;
227     for (i=0; i<M; i++) nz += ai[i+1] - adiag[i];
228     *nnz = nz;
229 
230     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
231     col  = row + nz;
232     val  = (PetscScalar*)(col + nz);
233 
234     nz = 0;
235     for (i=0; i<M; i++) {
236       rnz = ai[i+1] - adiag[i];
237       ajj = aj + adiag[i];
238       v1  = av + adiag[i];
239       for (j=0; j<rnz; j++) {
240         row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j];
241       }
242     }
243     *r = row; *c = col; *v = val;
244   } else {
245     nz = 0; val = *v;
246     for (i=0; i <M; i++) {
247       rnz = ai[i+1] - adiag[i];
248       ajj = aj + adiag[i];
249       v1  = av + adiag[i];
250       for (j=0; j<rnz; j++) {
251         val[nz++] = v1[j];
252       }
253     }
254   }
255   PetscFunctionReturn(0);
256 }
257 
258 #undef __FUNCT__
259 #define __FUNCT__ "MatConvertToTriples_mpisbaij_mpisbaij"
260 PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
261 {
262   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
263   PetscErrorCode    ierr;
264   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
265   PetscInt          *row,*col;
266   const PetscScalar *av, *bv,*v1,*v2;
267   PetscScalar       *val;
268   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ*)A->data;
269   Mat_SeqSBAIJ      *aa  = (Mat_SeqSBAIJ*)(mat->A)->data;
270   Mat_SeqBAIJ       *bb  = (Mat_SeqBAIJ*)(mat->B)->data;
271 
272   PetscFunctionBegin;
273   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
274   av=aa->a; bv=bb->a;
275 
276   garray = mat->garray;
277 
278   if (reuse == MAT_INITIAL_MATRIX) {
279     nz   = aa->nz + bb->nz;
280     *nnz = nz;
281     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
282     col  = row + nz;
283     val  = (PetscScalar*)(col + nz);
284 
285     *r = row; *c = col; *v = val;
286   } else {
287     row = *r; col = *c; val = *v;
288   }
289 
290   jj = 0; irow = rstart;
291   for (i=0; i<m; i++) {
292     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
293     countA = ai[i+1] - ai[i];
294     countB = bi[i+1] - bi[i];
295     bjj    = bj + bi[i];
296     v1     = av + ai[i];
297     v2     = bv + bi[i];
298 
299     /* A-part */
300     for (j=0; j<countA; j++) {
301       if (reuse == MAT_INITIAL_MATRIX) {
302         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
303       }
304       val[jj++] = v1[j];
305     }
306 
307     /* B-part */
308     for (j=0; j < countB; j++) {
309       if (reuse == MAT_INITIAL_MATRIX) {
310         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
311       }
312       val[jj++] = v2[j];
313     }
314     irow++;
315   }
316   PetscFunctionReturn(0);
317 }
318 
319 #undef __FUNCT__
320 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpiaij"
321 PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
322 {
323   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
324   PetscErrorCode    ierr;
325   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
326   PetscInt          *row,*col;
327   const PetscScalar *av, *bv,*v1,*v2;
328   PetscScalar       *val;
329   Mat_MPIAIJ        *mat = (Mat_MPIAIJ*)A->data;
330   Mat_SeqAIJ        *aa  = (Mat_SeqAIJ*)(mat->A)->data;
331   Mat_SeqAIJ        *bb  = (Mat_SeqAIJ*)(mat->B)->data;
332 
333   PetscFunctionBegin;
334   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
335   av=aa->a; bv=bb->a;
336 
337   garray = mat->garray;
338 
339   if (reuse == MAT_INITIAL_MATRIX) {
340     nz   = aa->nz + bb->nz;
341     *nnz = nz;
342     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
343     col  = row + nz;
344     val  = (PetscScalar*)(col + nz);
345 
346     *r = row; *c = col; *v = val;
347   } else {
348     row = *r; col = *c; val = *v;
349   }
350 
351   jj = 0; irow = rstart;
352   for (i=0; i<m; i++) {
353     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
354     countA = ai[i+1] - ai[i];
355     countB = bi[i+1] - bi[i];
356     bjj    = bj + bi[i];
357     v1     = av + ai[i];
358     v2     = bv + bi[i];
359 
360     /* A-part */
361     for (j=0; j<countA; j++) {
362       if (reuse == MAT_INITIAL_MATRIX) {
363         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
364       }
365       val[jj++] = v1[j];
366     }
367 
368     /* B-part */
369     for (j=0; j < countB; j++) {
370       if (reuse == MAT_INITIAL_MATRIX) {
371         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
372       }
373       val[jj++] = v2[j];
374     }
375     irow++;
376   }
377   PetscFunctionReturn(0);
378 }
379 
380 #undef __FUNCT__
381 #define __FUNCT__ "MatConvertToTriples_mpibaij_mpiaij"
382 PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
383 {
384   Mat_MPIBAIJ       *mat    = (Mat_MPIBAIJ*)A->data;
385   Mat_SeqBAIJ       *aa     = (Mat_SeqBAIJ*)(mat->A)->data;
386   Mat_SeqBAIJ       *bb     = (Mat_SeqBAIJ*)(mat->B)->data;
387   const PetscInt    *ai     = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj;
388   const PetscInt    *garray = mat->garray,mbs=mat->mbs,rstart=A->rmap->rstart;
389   const PetscInt    bs2=mat->bs2;
390   PetscErrorCode    ierr;
391   PetscInt          bs,nz,i,j,k,n,jj,irow,countA,countB,idx;
392   PetscInt          *row,*col;
393   const PetscScalar *av=aa->a, *bv=bb->a,*v1,*v2;
394   PetscScalar       *val;
395 
396   PetscFunctionBegin;
397   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
398   if (reuse == MAT_INITIAL_MATRIX) {
399     nz   = bs2*(aa->nz + bb->nz);
400     *nnz = nz;
401     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
402     col  = row + nz;
403     val  = (PetscScalar*)(col + nz);
404 
405     *r = row; *c = col; *v = val;
406   } else {
407     row = *r; col = *c; val = *v;
408   }
409 
410   jj = 0; irow = rstart;
411   for (i=0; i<mbs; i++) {
412     countA = ai[i+1] - ai[i];
413     countB = bi[i+1] - bi[i];
414     ajj    = aj + ai[i];
415     bjj    = bj + bi[i];
416     v1     = av + bs2*ai[i];
417     v2     = bv + bs2*bi[i];
418 
419     idx = 0;
420     /* A-part */
421     for (k=0; k<countA; k++) {
422       for (j=0; j<bs; j++) {
423         for (n=0; n<bs; n++) {
424           if (reuse == MAT_INITIAL_MATRIX) {
425             row[jj] = irow + n + shift;
426             col[jj] = rstart + bs*ajj[k] + j + shift;
427           }
428           val[jj++] = v1[idx++];
429         }
430       }
431     }
432 
433     idx = 0;
434     /* B-part */
435     for (k=0; k<countB; k++) {
436       for (j=0; j<bs; j++) {
437         for (n=0; n<bs; n++) {
438           if (reuse == MAT_INITIAL_MATRIX) {
439             row[jj] = irow + n + shift;
440             col[jj] = bs*garray[bjj[k]] + j + shift;
441           }
442           val[jj++] = v2[idx++];
443         }
444       }
445     }
446     irow += bs;
447   }
448   PetscFunctionReturn(0);
449 }
450 
451 #undef __FUNCT__
452 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpisbaij"
453 PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
454 {
455   const PetscInt    *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
456   PetscErrorCode    ierr;
457   PetscInt          rstart,nz,nza,nzb,i,j,jj,irow,countA,countB;
458   PetscInt          *row,*col;
459   const PetscScalar *av, *bv,*v1,*v2;
460   PetscScalar       *val;
461   Mat_MPIAIJ        *mat =  (Mat_MPIAIJ*)A->data;
462   Mat_SeqAIJ        *aa  =(Mat_SeqAIJ*)(mat->A)->data;
463   Mat_SeqAIJ        *bb  =(Mat_SeqAIJ*)(mat->B)->data;
464 
465   PetscFunctionBegin;
466   ai=aa->i; aj=aa->j; adiag=aa->diag;
467   bi=bb->i; bj=bb->j; garray = mat->garray;
468   av=aa->a; bv=bb->a;
469 
470   rstart = A->rmap->rstart;
471 
472   if (reuse == MAT_INITIAL_MATRIX) {
473     nza = 0;    /* num of upper triangular entries in mat->A, including diagonals */
474     nzb = 0;    /* num of upper triangular entries in mat->B */
475     for (i=0; i<m; i++) {
476       nza   += (ai[i+1] - adiag[i]);
477       countB = bi[i+1] - bi[i];
478       bjj    = bj + bi[i];
479       for (j=0; j<countB; j++) {
480         if (garray[bjj[j]] > rstart) nzb++;
481       }
482     }
483 
484     nz   = nza + nzb; /* total nz of upper triangular part of mat */
485     *nnz = nz;
486     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
487     col  = row + nz;
488     val  = (PetscScalar*)(col + nz);
489 
490     *r = row; *c = col; *v = val;
491   } else {
492     row = *r; col = *c; val = *v;
493   }
494 
495   jj = 0; irow = rstart;
496   for (i=0; i<m; i++) {
497     ajj    = aj + adiag[i];                 /* ptr to the beginning of the diagonal of this row */
498     v1     = av + adiag[i];
499     countA = ai[i+1] - adiag[i];
500     countB = bi[i+1] - bi[i];
501     bjj    = bj + bi[i];
502     v2     = bv + bi[i];
503 
504     /* A-part */
505     for (j=0; j<countA; j++) {
506       if (reuse == MAT_INITIAL_MATRIX) {
507         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
508       }
509       val[jj++] = v1[j];
510     }
511 
512     /* B-part */
513     for (j=0; j < countB; j++) {
514       if (garray[bjj[j]] > rstart) {
515         if (reuse == MAT_INITIAL_MATRIX) {
516           row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
517         }
518         val[jj++] = v2[j];
519       }
520     }
521     irow++;
522   }
523   PetscFunctionReturn(0);
524 }
525 
526 #undef __FUNCT__
527 #define __FUNCT__ "MatDestroy_MUMPS"
528 PetscErrorCode MatDestroy_MUMPS(Mat A)
529 {
530   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
531   PetscErrorCode ierr;
532 
533   PetscFunctionBegin;
534   if (mumps->CleanUpMUMPS) {
535     /* Terminate instance, deallocate memories */
536     ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
537     ierr = VecScatterDestroy(&mumps->scat_rhs);CHKERRQ(ierr);
538     ierr = VecDestroy(&mumps->b_seq);CHKERRQ(ierr);
539     ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
540     ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
541     ierr = PetscFree(mumps->id.perm_in);CHKERRQ(ierr);
542     ierr = PetscFree(mumps->irn);CHKERRQ(ierr);
543 
544     mumps->id.job = JOB_END;
545     PetscMUMPS_c(&mumps->id);
546     ierr = MPI_Comm_free(&(mumps->comm_mumps));CHKERRQ(ierr);
547   }
548   if (mumps->Destroy) {
549     ierr = (mumps->Destroy)(A);CHKERRQ(ierr);
550   }
551   ierr = PetscFree(A->spptr);CHKERRQ(ierr);
552 
553   /* clear composed functions */
554   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);CHKERRQ(ierr);
555   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetIcntl_C",NULL);CHKERRQ(ierr);
556   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetIcntl_C",NULL);CHKERRQ(ierr);
557   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetCntl_C",NULL);CHKERRQ(ierr);
558   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetCntl_C",NULL);CHKERRQ(ierr);
559 
560   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfo_C",NULL);CHKERRQ(ierr);
561   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfog_C",NULL);CHKERRQ(ierr);
562   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfo_C",NULL);CHKERRQ(ierr);
563   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfog_C",NULL);CHKERRQ(ierr);
564   PetscFunctionReturn(0);
565 }
566 
567 #undef __FUNCT__
568 #define __FUNCT__ "MatSolve_MUMPS"
569 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x)
570 {
571   Mat_MUMPS        *mumps=(Mat_MUMPS*)A->spptr;
572   PetscScalar      *array;
573   Vec              b_seq;
574   IS               is_iden,is_petsc;
575   PetscErrorCode   ierr;
576   PetscInt         i;
577   static PetscBool cite1 = PETSC_FALSE,cite2 = PETSC_FALSE;
578 
579   PetscFunctionBegin;
580   ierr = PetscCitationsRegister("@article{MUMPS01,\n  author = {P.~R. Amestoy and I.~S. Duff and J.-Y. L'Excellent and J. Koster},\n  title = {A fully asynchronous multifrontal solver using distributed dynamic scheduling},\n  journal = {SIAM Journal on Matrix Analysis and Applications},\n  volume = {23},\n  number = {1},\n  pages = {15--41},\n  year = {2001}\n}\n",&cite1);CHKERRQ(ierr);
581   ierr = PetscCitationsRegister("@article{MUMPS02,\n  author = {P.~R. Amestoy and A. Guermouche and J.-Y. L'Excellent and S. Pralet},\n  title = {Hybrid scheduling for the parallel solution of linear systems},\n  journal = {Parallel Computing},\n  volume = {32},\n  number = {2},\n  pages = {136--156},\n  year = {2006}\n}\n",&cite2);CHKERRQ(ierr);
582   mumps->id.nrhs = 1;
583   b_seq          = mumps->b_seq;
584   if (mumps->size > 1) {
585     /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */
586     ierr = VecScatterBegin(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
587     ierr = VecScatterEnd(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
588     if (!mumps->myid) {ierr = VecGetArray(b_seq,&array);CHKERRQ(ierr);}
589   } else {  /* size == 1 */
590     ierr = VecCopy(b,x);CHKERRQ(ierr);
591     ierr = VecGetArray(x,&array);CHKERRQ(ierr);
592   }
593   if (!mumps->myid) { /* define rhs on the host */
594     mumps->id.nrhs = 1;
595 #if defined(PETSC_USE_COMPLEX)
596 #if defined(PETSC_USE_REAL_SINGLE)
597     mumps->id.rhs = (mumps_complex*)array;
598 #else
599     mumps->id.rhs = (mumps_double_complex*)array;
600 #endif
601 #else
602     mumps->id.rhs = array;
603 #endif
604   }
605 
606   /* solve phase */
607   /*-------------*/
608   mumps->id.job = JOB_SOLVE;
609   PetscMUMPS_c(&mumps->id);
610   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
611 
612   if (mumps->size > 1) { /* convert mumps distributed solution to petsc mpi x */
613     if (mumps->scat_sol && mumps->ICNTL9_pre != mumps->id.ICNTL(9)) {
614       /* when id.ICNTL(9) changes, the contents of lsol_loc may change (not its size, lsol_loc), recreates scat_sol */
615       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
616     }
617     if (!mumps->scat_sol) { /* create scatter scat_sol */
618       ierr = ISCreateStride(PETSC_COMM_SELF,mumps->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */
619       for (i=0; i<mumps->id.lsol_loc; i++) {
620         mumps->id.isol_loc[i] -= 1; /* change Fortran style to C style */
621       }
622       ierr = ISCreateGeneral(PETSC_COMM_SELF,mumps->id.lsol_loc,mumps->id.isol_loc,PETSC_COPY_VALUES,&is_petsc);CHKERRQ(ierr);  /* to */
623       ierr = VecScatterCreate(mumps->x_seq,is_iden,x,is_petsc,&mumps->scat_sol);CHKERRQ(ierr);
624       ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
625       ierr = ISDestroy(&is_petsc);CHKERRQ(ierr);
626 
627       mumps->ICNTL9_pre = mumps->id.ICNTL(9); /* save current value of id.ICNTL(9) */
628     }
629 
630     ierr = VecScatterBegin(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
631     ierr = VecScatterEnd(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
632   }
633   PetscFunctionReturn(0);
634 }
635 
636 #undef __FUNCT__
637 #define __FUNCT__ "MatSolveTranspose_MUMPS"
638 PetscErrorCode MatSolveTranspose_MUMPS(Mat A,Vec b,Vec x)
639 {
640   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
641   PetscErrorCode ierr;
642 
643   PetscFunctionBegin;
644   mumps->id.ICNTL(9) = 0;
645 
646   ierr = MatSolve_MUMPS(A,b,x);CHKERRQ(ierr);
647 
648   mumps->id.ICNTL(9) = 1;
649   PetscFunctionReturn(0);
650 }
651 
652 #undef __FUNCT__
653 #define __FUNCT__ "MatMatSolve_MUMPS"
654 PetscErrorCode MatMatSolve_MUMPS(Mat A,Mat B,Mat X)
655 {
656   PetscErrorCode ierr;
657   PetscBool      flg;
658 
659   PetscFunctionBegin;
660   ierr = PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
661   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
662   ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
663   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
664   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatMatSolve_MUMPS() is not implemented yet");
665   PetscFunctionReturn(0);
666 }
667 
668 #if !defined(PETSC_USE_COMPLEX)
669 /*
670   input:
671    F:        numeric factor
672   output:
673    nneg:     total number of negative pivots
674    nzero:    0
675    npos:     (global dimension of F) - nneg
676 */
677 
678 #undef __FUNCT__
679 #define __FUNCT__ "MatGetInertia_SBAIJMUMPS"
680 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos)
681 {
682   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
683   PetscErrorCode ierr;
684   PetscMPIInt    size;
685 
686   PetscFunctionBegin;
687   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)F),&size);CHKERRQ(ierr);
688   /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */
689   if (size > 1 && mumps->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",mumps->id.INFOG(13));
690 
691   if (nneg) *nneg = mumps->id.INFOG(12);
692   if (nzero || npos) {
693     if (mumps->id.ICNTL(24) != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"-mat_mumps_icntl_24 must be set as 1 for null pivot row detection");
694     if (nzero) *nzero = mumps->id.INFOG(28);
695     if (npos) *npos   = F->rmap->N - (mumps->id.INFOG(12) + mumps->id.INFOG(28));
696   }
697   PetscFunctionReturn(0);
698 }
699 #endif /* !defined(PETSC_USE_COMPLEX) */
700 
701 #undef __FUNCT__
702 #define __FUNCT__ "MatFactorNumeric_MUMPS"
703 PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info)
704 {
705   Mat_MUMPS      *mumps =(Mat_MUMPS*)(F)->spptr;
706   PetscErrorCode ierr;
707   Mat            F_diag;
708   PetscBool      isMPIAIJ;
709 
710   PetscFunctionBegin;
711   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_REUSE_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
712 
713   /* numerical factorization phase */
714   /*-------------------------------*/
715   mumps->id.job = JOB_FACTNUMERIC;
716   if (!mumps->id.ICNTL(18)) {
717     if (!mumps->myid) {
718 #if defined(PETSC_USE_COMPLEX)
719 #if defined(PETSC_USE_REAL_SINGLE)
720       mumps->id.a = (mumps_complex*)mumps->val;
721 #else
722       mumps->id.a = (mumps_double_complex*)mumps->val;
723 #endif
724 #else
725       mumps->id.a = mumps->val;
726 #endif
727     }
728   } else {
729 #if defined(PETSC_USE_COMPLEX)
730 #if defined(PETSC_USE_REAL_SINGLE)
731     mumps->id.a_loc = (mumps_complex*)mumps->val;
732 #else
733     mumps->id.a_loc = (mumps_double_complex*)mumps->val;
734 #endif
735 #else
736     mumps->id.a_loc = mumps->val;
737 #endif
738   }
739   PetscMUMPS_c(&mumps->id);
740   if (mumps->id.INFOG(1) < 0) {
741     if (mumps->id.INFO(1) == -13) {
742       if (mumps->id.INFO(2) < 0) {
743         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d megabytes\n",-mumps->id.INFO(2));
744       } else {
745         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d bytes\n",mumps->id.INFO(2));
746       }
747     } else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFO(1)=%d, INFO(2)=%d\n",mumps->id.INFO(1),mumps->id.INFO(2));
748   }
749   if (!mumps->myid && mumps->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"  mumps->id.ICNTL(16):=%d\n",mumps->id.INFOG(16));
750 
751   (F)->assembled      = PETSC_TRUE;
752   mumps->matstruc     = SAME_NONZERO_PATTERN;
753   mumps->CleanUpMUMPS = PETSC_TRUE;
754 
755   if (mumps->size > 1) {
756     PetscInt    lsol_loc;
757     PetscScalar *sol_loc;
758 
759     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr);
760     if (isMPIAIJ) F_diag = ((Mat_MPIAIJ*)(F)->data)->A;
761     else F_diag = ((Mat_MPISBAIJ*)(F)->data)->A;
762     F_diag->assembled = PETSC_TRUE;
763 
764     /* distributed solution; Create x_seq=sol_loc for repeated use */
765     if (mumps->x_seq) {
766       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
767       ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
768       ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
769     }
770     lsol_loc = mumps->id.INFO(23); /* length of sol_loc */
771     ierr = PetscMalloc2(lsol_loc,&sol_loc,lsol_loc,&mumps->id.isol_loc);CHKERRQ(ierr);
772     mumps->id.lsol_loc = lsol_loc;
773 #if defined(PETSC_USE_COMPLEX)
774 #if defined(PETSC_USE_REAL_SINGLE)
775     mumps->id.sol_loc = (mumps_complex*)sol_loc;
776 #else
777     mumps->id.sol_loc = (mumps_double_complex*)sol_loc;
778 #endif
779 #else
780     mumps->id.sol_loc = sol_loc;
781 #endif
782     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,lsol_loc,sol_loc,&mumps->x_seq);CHKERRQ(ierr);
783   }
784   PetscFunctionReturn(0);
785 }
786 
787 /* Sets MUMPS options from the options database */
788 #undef __FUNCT__
789 #define __FUNCT__ "PetscSetMUMPSFromOptions"
790 PetscErrorCode PetscSetMUMPSFromOptions(Mat F, Mat A)
791 {
792   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
793   PetscErrorCode ierr;
794   PetscInt       icntl;
795   PetscBool      flg;
796 
797   PetscFunctionBegin;
798   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr);
799   ierr = PetscOptionsInt("-mat_mumps_icntl_1","ICNTL(1): output stream for error messages","None",mumps->id.ICNTL(1),&icntl,&flg);CHKERRQ(ierr);
800   if (flg) mumps->id.ICNTL(1) = icntl;
801   ierr = PetscOptionsInt("-mat_mumps_icntl_2","ICNTL(2): output stream for diagnostic printing, statistics, and warning","None",mumps->id.ICNTL(2),&icntl,&flg);CHKERRQ(ierr);
802   if (flg) mumps->id.ICNTL(2) = icntl;
803   ierr = PetscOptionsInt("-mat_mumps_icntl_3","ICNTL(3): output stream for global information, collected on the host","None",mumps->id.ICNTL(3),&icntl,&flg);CHKERRQ(ierr);
804   if (flg) mumps->id.ICNTL(3) = icntl;
805 
806   ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",mumps->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr);
807   if (flg) mumps->id.ICNTL(4) = icntl;
808   if (mumps->id.ICNTL(4) || PetscLogPrintInfo) mumps->id.ICNTL(3) = 6; /* resume MUMPS default id.ICNTL(3) = 6 */
809 
810   ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): permuting and/or scaling the matrix (0 to 7)","None",mumps->id.ICNTL(6),&icntl,&flg);CHKERRQ(ierr);
811   if (flg) mumps->id.ICNTL(6) = icntl;
812 
813   ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): matrix ordering (0 to 7). 3=Scotch, 4=PORD, 5=Metis","None",mumps->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr);
814   if (flg) {
815     if (icntl== 1 && mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n");
816     else mumps->id.ICNTL(7) = icntl;
817   }
818 
819   ierr = PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 8 or 77)","None",mumps->id.ICNTL(8),&mumps->id.ICNTL(8),NULL);CHKERRQ(ierr);
820   ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",mumps->id.ICNTL(10),&mumps->id.ICNTL(10),NULL);CHKERRQ(ierr);
821   ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to the linear system solved (via -ksp_view)","None",mumps->id.ICNTL(11),&mumps->id.ICNTL(11),NULL);CHKERRQ(ierr);
822   ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): efficiency control: defines the ordering strategy with scaling constraints (0 to 3)","None",mumps->id.ICNTL(12),&mumps->id.ICNTL(12),NULL);CHKERRQ(ierr);
823   ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): efficiency control: with or without ScaLAPACK","None",mumps->id.ICNTL(13),&mumps->id.ICNTL(13),NULL);CHKERRQ(ierr);
824   ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage of estimated workspace increase","None",mumps->id.ICNTL(14),&mumps->id.ICNTL(14),NULL);CHKERRQ(ierr);
825   ierr = PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): Schur complement","None",mumps->id.ICNTL(19),&mumps->id.ICNTL(19),NULL);CHKERRQ(ierr);
826 
827   ierr = PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core facility (0 or 1)","None",mumps->id.ICNTL(22),&mumps->id.ICNTL(22),NULL);CHKERRQ(ierr);
828   ierr = PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",mumps->id.ICNTL(23),&mumps->id.ICNTL(23),NULL);CHKERRQ(ierr);
829   ierr = PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",mumps->id.ICNTL(24),&mumps->id.ICNTL(24),NULL);CHKERRQ(ierr);
830   if (mumps->id.ICNTL(24)) {
831     mumps->id.ICNTL(13) = 1; /* turn-off ScaLAPACK to help with the correct detection of null pivots */
832   }
833 
834   ierr = PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): computation of a null space basis","None",mumps->id.ICNTL(25),&mumps->id.ICNTL(25),NULL);CHKERRQ(ierr);
835   ierr = PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): Schur options for right-hand side or solution vector","None",mumps->id.ICNTL(26),&mumps->id.ICNTL(26),NULL);CHKERRQ(ierr);
836   ierr = PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): experimental parameter","None",mumps->id.ICNTL(27),&mumps->id.ICNTL(27),NULL);CHKERRQ(ierr);
837   ierr = PetscOptionsInt("-mat_mumps_icntl_28","ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering","None",mumps->id.ICNTL(28),&mumps->id.ICNTL(28),NULL);CHKERRQ(ierr);
838   ierr = PetscOptionsInt("-mat_mumps_icntl_29","ICNTL(29): parallel ordering 1 = ptscotch 2 = parmetis","None",mumps->id.ICNTL(29),&mumps->id.ICNTL(29),NULL);CHKERRQ(ierr);
839   ierr = PetscOptionsInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL);CHKERRQ(ierr);
840   ierr = PetscOptionsInt("-mat_mumps_icntl_31","ICNTL(31): factors can be discarded in the solve phase","None",mumps->id.ICNTL(31),&mumps->id.ICNTL(31),NULL);CHKERRQ(ierr);
841   ierr = PetscOptionsInt("-mat_mumps_icntl_33","ICNTL(33): compute determinant","None",mumps->id.ICNTL(33),&mumps->id.ICNTL(33),NULL);CHKERRQ(ierr);
842 
843   ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",mumps->id.CNTL(1),&mumps->id.CNTL(1),NULL);CHKERRQ(ierr);
844   ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",mumps->id.CNTL(2),&mumps->id.CNTL(2),NULL);CHKERRQ(ierr);
845   ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",mumps->id.CNTL(3),&mumps->id.CNTL(3),NULL);CHKERRQ(ierr);
846   ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",mumps->id.CNTL(4),&mumps->id.CNTL(4),NULL);CHKERRQ(ierr);
847   ierr = PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",mumps->id.CNTL(5),&mumps->id.CNTL(5),NULL);CHKERRQ(ierr);
848 
849   ierr = PetscOptionsString("-mat_mumps_ooc_tmpdir", "out of core directory", "None", mumps->id.ooc_tmpdir, mumps->id.ooc_tmpdir, 256, NULL);
850   PetscOptionsEnd();
851   PetscFunctionReturn(0);
852 }
853 
854 #undef __FUNCT__
855 #define __FUNCT__ "PetscInitializeMUMPS"
856 PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS *mumps)
857 {
858   PetscErrorCode ierr;
859 
860   PetscFunctionBegin;
861   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A), &mumps->myid);
862   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&mumps->size);CHKERRQ(ierr);
863   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(mumps->comm_mumps));CHKERRQ(ierr);
864 
865   mumps->id.comm_fortran = MPI_Comm_c2f(mumps->comm_mumps);
866 
867   mumps->id.job = JOB_INIT;
868   mumps->id.par = 1;  /* host participates factorizaton and solve */
869   mumps->id.sym = mumps->sym;
870   PetscMUMPS_c(&mumps->id);
871 
872   mumps->CleanUpMUMPS = PETSC_FALSE;
873   mumps->scat_rhs     = NULL;
874   mumps->scat_sol     = NULL;
875 
876   /* set PETSc-MUMPS default options - override MUMPS default */
877   mumps->id.ICNTL(3) = 0;
878   mumps->id.ICNTL(4) = 0;
879   if (mumps->size == 1) {
880     mumps->id.ICNTL(18) = 0;   /* centralized assembled matrix input */
881   } else {
882     mumps->id.ICNTL(18) = 3;   /* distributed assembled matrix input */
883     mumps->id.ICNTL(21) = 1;   /* distributed solution */
884   }
885   PetscFunctionReturn(0);
886 }
887 
888 /* Note Petsc r(=c) permutation is used when mumps->id.ICNTL(7)==1 with centralized assembled matrix input; otherwise r and c are ignored */
889 #undef __FUNCT__
890 #define __FUNCT__ "MatLUFactorSymbolic_AIJMUMPS"
891 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
892 {
893   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
894   PetscErrorCode ierr;
895   Vec            b;
896   IS             is_iden;
897   const PetscInt M = A->rmap->N;
898 
899   PetscFunctionBegin;
900   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
901 
902   /* Set MUMPS options from the options database */
903   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
904 
905   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
906 
907   /* analysis phase */
908   /*----------------*/
909   mumps->id.job = JOB_FACTSYMBOLIC;
910   mumps->id.n   = M;
911   switch (mumps->id.ICNTL(18)) {
912   case 0:  /* centralized assembled matrix input */
913     if (!mumps->myid) {
914       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
915       if (mumps->id.ICNTL(6)>1) {
916 #if defined(PETSC_USE_COMPLEX)
917 #if defined(PETSC_USE_REAL_SINGLE)
918         mumps->id.a = (mumps_complex*)mumps->val;
919 #else
920         mumps->id.a = (mumps_double_complex*)mumps->val;
921 #endif
922 #else
923         mumps->id.a = mumps->val;
924 #endif
925       }
926       if (mumps->id.ICNTL(7) == 1) { /* use user-provide matrix ordering - assuming r = c ordering */
927         /*
928         PetscBool      flag;
929         ierr = ISEqual(r,c,&flag);CHKERRQ(ierr);
930         if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"row_perm != col_perm");
931         ierr = ISView(r,PETSC_VIEWER_STDOUT_SELF);
932          */
933         if (!mumps->myid) {
934           const PetscInt *idx;
935           PetscInt       i,*perm_in;
936 
937           ierr = PetscMalloc1(M,&perm_in);CHKERRQ(ierr);
938           ierr = ISGetIndices(r,&idx);CHKERRQ(ierr);
939 
940           mumps->id.perm_in = perm_in;
941           for (i=0; i<M; i++) perm_in[i] = idx[i]+1; /* perm_in[]: start from 1, not 0! */
942           ierr = ISRestoreIndices(r,&idx);CHKERRQ(ierr);
943         }
944       }
945     }
946     break;
947   case 3:  /* distributed assembled matrix input (size>1) */
948     mumps->id.nz_loc = mumps->nz;
949     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
950     if (mumps->id.ICNTL(6)>1) {
951 #if defined(PETSC_USE_COMPLEX)
952 #if defined(PETSC_USE_REAL_SINGLE)
953       mumps->id.a_loc = (mumps_complex*)mumps->val;
954 #else
955       mumps->id.a_loc = (mumps_double_complex*)mumps->val;
956 #endif
957 #else
958       mumps->id.a_loc = mumps->val;
959 #endif
960     }
961     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
962     if (!mumps->myid) {
963       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
964       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
965     } else {
966       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
967       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
968     }
969     ierr = MatGetVecs(A,NULL,&b);CHKERRQ(ierr);
970     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
971     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
972     ierr = VecDestroy(&b);CHKERRQ(ierr);
973     break;
974   }
975   PetscMUMPS_c(&mumps->id);
976   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
977 
978   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
979   F->ops->solve           = MatSolve_MUMPS;
980   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
981   F->ops->matsolve        = 0;  /* use MatMatSolve_Basic() until mumps supports distributed rhs */
982   PetscFunctionReturn(0);
983 }
984 
985 /* Note the Petsc r and c permutations are ignored */
986 #undef __FUNCT__
987 #define __FUNCT__ "MatLUFactorSymbolic_BAIJMUMPS"
988 PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
989 {
990   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
991   PetscErrorCode ierr;
992   Vec            b;
993   IS             is_iden;
994   const PetscInt M = A->rmap->N;
995 
996   PetscFunctionBegin;
997   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
998 
999   /* Set MUMPS options from the options database */
1000   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1001 
1002   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1003 
1004   /* analysis phase */
1005   /*----------------*/
1006   mumps->id.job = JOB_FACTSYMBOLIC;
1007   mumps->id.n   = M;
1008   switch (mumps->id.ICNTL(18)) {
1009   case 0:  /* centralized assembled matrix input */
1010     if (!mumps->myid) {
1011       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1012       if (mumps->id.ICNTL(6)>1) {
1013 #if defined(PETSC_USE_COMPLEX)
1014 #if defined(PETSC_USE_REAL_SINGLE)
1015         mumps->id.a = (mumps_complex*)mumps->val;
1016 #else
1017         mumps->id.a = (mumps_double_complex*)mumps->val;
1018 #endif
1019 #else
1020         mumps->id.a = mumps->val;
1021 #endif
1022       }
1023     }
1024     break;
1025   case 3:  /* distributed assembled matrix input (size>1) */
1026     mumps->id.nz_loc = mumps->nz;
1027     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1028     if (mumps->id.ICNTL(6)>1) {
1029 #if defined(PETSC_USE_COMPLEX)
1030 #if defined(PETSC_USE_REAL_SINGLE)
1031       mumps->id.a_loc = (mumps_complex*)mumps->val;
1032 #else
1033       mumps->id.a_loc = (mumps_double_complex*)mumps->val;
1034 #endif
1035 #else
1036       mumps->id.a_loc = mumps->val;
1037 #endif
1038     }
1039     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1040     if (!mumps->myid) {
1041       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1042       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1043     } else {
1044       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1045       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1046     }
1047     ierr = MatGetVecs(A,NULL,&b);CHKERRQ(ierr);
1048     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1049     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1050     ierr = VecDestroy(&b);CHKERRQ(ierr);
1051     break;
1052   }
1053   PetscMUMPS_c(&mumps->id);
1054   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1055 
1056   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1057   F->ops->solve           = MatSolve_MUMPS;
1058   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
1059   PetscFunctionReturn(0);
1060 }
1061 
1062 /* Note the Petsc r permutation and factor info are ignored */
1063 #undef __FUNCT__
1064 #define __FUNCT__ "MatCholeskyFactorSymbolic_MUMPS"
1065 PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info)
1066 {
1067   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1068   PetscErrorCode ierr;
1069   Vec            b;
1070   IS             is_iden;
1071   const PetscInt M = A->rmap->N;
1072 
1073   PetscFunctionBegin;
1074   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1075 
1076   /* Set MUMPS options from the options database */
1077   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1078 
1079   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1080 
1081   /* analysis phase */
1082   /*----------------*/
1083   mumps->id.job = JOB_FACTSYMBOLIC;
1084   mumps->id.n   = M;
1085   switch (mumps->id.ICNTL(18)) {
1086   case 0:  /* centralized assembled matrix input */
1087     if (!mumps->myid) {
1088       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1089       if (mumps->id.ICNTL(6)>1) {
1090 #if defined(PETSC_USE_COMPLEX)
1091 #if defined(PETSC_USE_REAL_SINGLE)
1092         mumps->id.a = (mumps_complex*)mumps->val;
1093 #else
1094         mumps->id.a = (mumps_double_complex*)mumps->val;
1095 #endif
1096 #else
1097         mumps->id.a = mumps->val;
1098 #endif
1099       }
1100     }
1101     break;
1102   case 3:  /* distributed assembled matrix input (size>1) */
1103     mumps->id.nz_loc = mumps->nz;
1104     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1105     if (mumps->id.ICNTL(6)>1) {
1106 #if defined(PETSC_USE_COMPLEX)
1107 #if defined(PETSC_USE_REAL_SINGLE)
1108       mumps->id.a_loc = (mumps_complex*)mumps->val;
1109 #else
1110       mumps->id.a_loc = (mumps_double_complex*)mumps->val;
1111 #endif
1112 #else
1113       mumps->id.a_loc = mumps->val;
1114 #endif
1115     }
1116     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1117     if (!mumps->myid) {
1118       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1119       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1120     } else {
1121       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1122       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1123     }
1124     ierr = MatGetVecs(A,NULL,&b);CHKERRQ(ierr);
1125     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1126     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1127     ierr = VecDestroy(&b);CHKERRQ(ierr);
1128     break;
1129   }
1130   PetscMUMPS_c(&mumps->id);
1131   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1132 
1133   F->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS;
1134   F->ops->solve                 = MatSolve_MUMPS;
1135   F->ops->solvetranspose        = MatSolve_MUMPS;
1136   F->ops->matsolve              = 0; /* use MatMatSolve_Basic() until mumps supports distributed rhs */
1137 #if !defined(PETSC_USE_COMPLEX)
1138   F->ops->getinertia = MatGetInertia_SBAIJMUMPS;
1139 #else
1140   F->ops->getinertia = NULL;
1141 #endif
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 #undef __FUNCT__
1146 #define __FUNCT__ "MatView_MUMPS"
1147 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer)
1148 {
1149   PetscErrorCode    ierr;
1150   PetscBool         iascii;
1151   PetscViewerFormat format;
1152   Mat_MUMPS         *mumps=(Mat_MUMPS*)A->spptr;
1153 
1154   PetscFunctionBegin;
1155   /* check if matrix is mumps type */
1156   if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0);
1157 
1158   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1159   if (iascii) {
1160     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1161     if (format == PETSC_VIEWER_ASCII_INFO) {
1162       ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr);
1163       ierr = PetscViewerASCIIPrintf(viewer,"  SYM (matrix type):                   %d \n",mumps->id.sym);CHKERRQ(ierr);
1164       ierr = PetscViewerASCIIPrintf(viewer,"  PAR (host participation):            %d \n",mumps->id.par);CHKERRQ(ierr);
1165       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(1) (output for error):         %d \n",mumps->id.ICNTL(1));CHKERRQ(ierr);
1166       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(2) (output of diagnostic msg): %d \n",mumps->id.ICNTL(2));CHKERRQ(ierr);
1167       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(3) (output for global info):   %d \n",mumps->id.ICNTL(3));CHKERRQ(ierr);
1168       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(4) (level of printing):        %d \n",mumps->id.ICNTL(4));CHKERRQ(ierr);
1169       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(5) (input mat struct):         %d \n",mumps->id.ICNTL(5));CHKERRQ(ierr);
1170       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(6) (matrix prescaling):        %d \n",mumps->id.ICNTL(6));CHKERRQ(ierr);
1171       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(7) (sequentia matrix ordering):%d \n",mumps->id.ICNTL(7));CHKERRQ(ierr);
1172       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(8) (scalling strategy):        %d \n",mumps->id.ICNTL(8));CHKERRQ(ierr);
1173       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(10) (max num of refinements):  %d \n",mumps->id.ICNTL(10));CHKERRQ(ierr);
1174       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(11) (error analysis):          %d \n",mumps->id.ICNTL(11));CHKERRQ(ierr);
1175       if (mumps->id.ICNTL(11)>0) {
1176         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(4) (inf norm of input mat):        %g\n",mumps->id.RINFOG(4));CHKERRQ(ierr);
1177         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(5) (inf norm of solution):         %g\n",mumps->id.RINFOG(5));CHKERRQ(ierr);
1178         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(6) (inf norm of residual):         %g\n",mumps->id.RINFOG(6));CHKERRQ(ierr);
1179         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",mumps->id.RINFOG(7),mumps->id.RINFOG(8));CHKERRQ(ierr);
1180         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(9) (error estimate):               %g \n",mumps->id.RINFOG(9));CHKERRQ(ierr);
1181         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",mumps->id.RINFOG(10),mumps->id.RINFOG(11));CHKERRQ(ierr);
1182       }
1183       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(12) (efficiency control):                         %d \n",mumps->id.ICNTL(12));CHKERRQ(ierr);
1184       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(13) (efficiency control):                         %d \n",mumps->id.ICNTL(13));CHKERRQ(ierr);
1185       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(14) (percentage of estimated workspace increase): %d \n",mumps->id.ICNTL(14));CHKERRQ(ierr);
1186       /* ICNTL(15-17) not used */
1187       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(18) (input mat struct):                           %d \n",mumps->id.ICNTL(18));CHKERRQ(ierr);
1188       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(19) (Shur complement info):                       %d \n",mumps->id.ICNTL(19));CHKERRQ(ierr);
1189       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(20) (rhs sparse pattern):                         %d \n",mumps->id.ICNTL(20));CHKERRQ(ierr);
1190       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(21) (somumpstion struct):                            %d \n",mumps->id.ICNTL(21));CHKERRQ(ierr);
1191       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(22) (in-core/out-of-core facility):               %d \n",mumps->id.ICNTL(22));CHKERRQ(ierr);
1192       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(23) (max size of memory can be allocated locally):%d \n",mumps->id.ICNTL(23));CHKERRQ(ierr);
1193 
1194       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(24) (detection of null pivot rows):               %d \n",mumps->id.ICNTL(24));CHKERRQ(ierr);
1195       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(25) (computation of a null space basis):          %d \n",mumps->id.ICNTL(25));CHKERRQ(ierr);
1196       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(26) (Schur options for rhs or solution):          %d \n",mumps->id.ICNTL(26));CHKERRQ(ierr);
1197       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(27) (experimental parameter):                     %d \n",mumps->id.ICNTL(27));CHKERRQ(ierr);
1198       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(28) (use parallel or sequential ordering):        %d \n",mumps->id.ICNTL(28));CHKERRQ(ierr);
1199       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(29) (parallel ordering):                          %d \n",mumps->id.ICNTL(29));CHKERRQ(ierr);
1200 
1201       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(30) (user-specified set of entries in inv(A)):    %d \n",mumps->id.ICNTL(30));CHKERRQ(ierr);
1202       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(31) (factors is discarded in the solve phase):    %d \n",mumps->id.ICNTL(31));CHKERRQ(ierr);
1203       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(33) (compute determinant):                        %d \n",mumps->id.ICNTL(33));CHKERRQ(ierr);
1204 
1205       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(1) (relative pivoting threshold):      %g \n",mumps->id.CNTL(1));CHKERRQ(ierr);
1206       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(2) (stopping criterion of refinement): %g \n",mumps->id.CNTL(2));CHKERRQ(ierr);
1207       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(3) (absomumpste pivoting threshold):      %g \n",mumps->id.CNTL(3));CHKERRQ(ierr);
1208       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(4) (vamumpse of static pivoting):         %g \n",mumps->id.CNTL(4));CHKERRQ(ierr);
1209       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(5) (fixation for null pivots):         %g \n",mumps->id.CNTL(5));CHKERRQ(ierr);
1210 
1211       /* infomation local to each processor */
1212       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr);
1213       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
1214       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %g \n",mumps->myid,mumps->id.RINFO(1));CHKERRQ(ierr);
1215       ierr = PetscViewerFlush(viewer);
1216       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr);
1217       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(2));CHKERRQ(ierr);
1218       ierr = PetscViewerFlush(viewer);
1219       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr);
1220       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(3));CHKERRQ(ierr);
1221       ierr = PetscViewerFlush(viewer);
1222 
1223       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr);
1224       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] %d \n",mumps->myid,mumps->id.INFO(15));CHKERRQ(ierr);
1225       ierr = PetscViewerFlush(viewer);
1226 
1227       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr);
1228       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(16));CHKERRQ(ierr);
1229       ierr = PetscViewerFlush(viewer);
1230 
1231       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr);
1232       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(23));CHKERRQ(ierr);
1233       ierr = PetscViewerFlush(viewer);
1234       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
1235 
1236       if (!mumps->myid) { /* information from the host */
1237         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",mumps->id.RINFOG(1));CHKERRQ(ierr);
1238         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",mumps->id.RINFOG(2));CHKERRQ(ierr);
1239         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",mumps->id.RINFOG(3));CHKERRQ(ierr);
1240         ierr = PetscViewerASCIIPrintf(viewer,"  (RINFOG(12) RINFOG(13))*2^INFOG(34) (determinant): (%g,%g)*(2^%d)\n",mumps->id.RINFOG(12),mumps->id.RINFOG(13),mumps->id.INFOG(34));CHKERRQ(ierr);
1241 
1242         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(3));CHKERRQ(ierr);
1243         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(4));CHKERRQ(ierr);
1244         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(5) (estimated maximum front size in the complete tree): %d \n",mumps->id.INFOG(5));CHKERRQ(ierr);
1245         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(6) (number of nodes in the complete tree): %d \n",mumps->id.INFOG(6));CHKERRQ(ierr);
1246         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(7) (ordering option effectively use after analysis): %d \n",mumps->id.INFOG(7));CHKERRQ(ierr);
1247         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",mumps->id.INFOG(8));CHKERRQ(ierr);
1248         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",mumps->id.INFOG(9));CHKERRQ(ierr);
1249         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(10) (total integer space store the matrix factors after factorization): %d \n",mumps->id.INFOG(10));CHKERRQ(ierr);
1250         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(11) (order of largest frontal matrix after factorization): %d \n",mumps->id.INFOG(11));CHKERRQ(ierr);
1251         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(12) (number of off-diagonal pivots): %d \n",mumps->id.INFOG(12));CHKERRQ(ierr);
1252         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(13) (number of delayed pivots after factorization): %d \n",mumps->id.INFOG(13));CHKERRQ(ierr);
1253         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(14) (number of memory compress after factorization): %d \n",mumps->id.INFOG(14));CHKERRQ(ierr);
1254         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(15) (number of steps of iterative refinement after solution): %d \n",mumps->id.INFOG(15));CHKERRQ(ierr);
1255         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",mumps->id.INFOG(16));CHKERRQ(ierr);
1256         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",mumps->id.INFOG(17));CHKERRQ(ierr);
1257         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",mumps->id.INFOG(18));CHKERRQ(ierr);
1258         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",mumps->id.INFOG(19));CHKERRQ(ierr);
1259         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(20) (estimated number of entries in the factors): %d \n",mumps->id.INFOG(20));CHKERRQ(ierr);
1260         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",mumps->id.INFOG(21));CHKERRQ(ierr);
1261         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",mumps->id.INFOG(22));CHKERRQ(ierr);
1262         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",mumps->id.INFOG(23));CHKERRQ(ierr);
1263         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",mumps->id.INFOG(24));CHKERRQ(ierr);
1264         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",mumps->id.INFOG(25));CHKERRQ(ierr);
1265         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(28) (after factorization: number of null pivots encountered): %d\n",mumps->id.INFOG(28));CHKERRQ(ierr);
1266         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(29) (after factorization: effective number of entries in the factors (sum over all processors)): %d\n",mumps->id.INFOG(29));CHKERRQ(ierr);
1267         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(30, 31) (after solution: size in Mbytes of memory used during solution phase): %d, %d\n",mumps->id.INFOG(30),mumps->id.INFOG(31));CHKERRQ(ierr);
1268         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(32) (after analysis: type of analysis done): %d\n",mumps->id.INFOG(32));CHKERRQ(ierr);
1269         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(33) (value used for ICNTL(8)): %d\n",mumps->id.INFOG(33));CHKERRQ(ierr);
1270         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(34) (exponent of the determinant if determinant is requested): %d\n",mumps->id.INFOG(34));CHKERRQ(ierr);
1271       }
1272     }
1273   }
1274   PetscFunctionReturn(0);
1275 }
1276 
1277 #undef __FUNCT__
1278 #define __FUNCT__ "MatGetInfo_MUMPS"
1279 PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info)
1280 {
1281   Mat_MUMPS *mumps =(Mat_MUMPS*)A->spptr;
1282 
1283   PetscFunctionBegin;
1284   info->block_size        = 1.0;
1285   info->nz_allocated      = mumps->id.INFOG(20);
1286   info->nz_used           = mumps->id.INFOG(20);
1287   info->nz_unneeded       = 0.0;
1288   info->assemblies        = 0.0;
1289   info->mallocs           = 0.0;
1290   info->memory            = 0.0;
1291   info->fill_ratio_given  = 0;
1292   info->fill_ratio_needed = 0;
1293   info->factor_mallocs    = 0;
1294   PetscFunctionReturn(0);
1295 }
1296 
1297 /* -------------------------------------------------------------------------------------------*/
1298 #undef __FUNCT__
1299 #define __FUNCT__ "MatMumpsSetIcntl_MUMPS"
1300 PetscErrorCode MatMumpsSetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt ival)
1301 {
1302   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1303 
1304   PetscFunctionBegin;
1305   mumps->id.ICNTL(icntl) = ival;
1306   PetscFunctionReturn(0);
1307 }
1308 
1309 #undef __FUNCT__
1310 #define __FUNCT__ "MatMumpsGetIcntl_MUMPS"
1311 PetscErrorCode MatMumpsGetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt *ival)
1312 {
1313   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1314 
1315   PetscFunctionBegin;
1316   *ival = mumps->id.ICNTL(icntl);
1317   PetscFunctionReturn(0);
1318 }
1319 
1320 #undef __FUNCT__
1321 #define __FUNCT__ "MatMumpsSetIcntl"
1322 /*@
1323   MatMumpsSetIcntl - Set MUMPS parameter ICNTL()
1324 
1325    Logically Collective on Mat
1326 
1327    Input Parameters:
1328 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1329 .  icntl - index of MUMPS parameter array ICNTL()
1330 -  ival - value of MUMPS ICNTL(icntl)
1331 
1332   Options Database:
1333 .   -mat_mumps_icntl_<icntl> <ival>
1334 
1335    Level: beginner
1336 
1337    References: MUMPS Users' Guide
1338 
1339 .seealso: MatGetFactor()
1340 @*/
1341 PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt icntl,PetscInt ival)
1342 {
1343   PetscErrorCode ierr;
1344 
1345   PetscFunctionBegin;
1346   PetscValidLogicalCollectiveInt(F,icntl,2);
1347   PetscValidLogicalCollectiveInt(F,ival,3);
1348   ierr = PetscTryMethod(F,"MatMumpsSetIcntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));CHKERRQ(ierr);
1349   PetscFunctionReturn(0);
1350 }
1351 
1352 #undef __FUNCT__
1353 #define __FUNCT__ "MatMumpsGetIcntl"
1354 /*@
1355   MatMumpsGetIcntl - Get MUMPS parameter ICNTL()
1356 
1357    Logically Collective on Mat
1358 
1359    Input Parameters:
1360 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1361 -  icntl - index of MUMPS parameter array ICNTL()
1362 
1363   Output Parameter:
1364 .  ival - value of MUMPS ICNTL(icntl)
1365 
1366    Level: beginner
1367 
1368    References: MUMPS Users' Guide
1369 
1370 .seealso: MatGetFactor()
1371 @*/
1372 PetscErrorCode MatMumpsGetIcntl(Mat F,PetscInt icntl,PetscInt *ival)
1373 {
1374   PetscErrorCode ierr;
1375 
1376   PetscFunctionBegin;
1377   PetscValidLogicalCollectiveInt(F,icntl,2);
1378   PetscValidIntPointer(ival,3);
1379   ierr = PetscTryMethod(F,"MatMumpsGetIcntl_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
1380   PetscFunctionReturn(0);
1381 }
1382 
1383 /* -------------------------------------------------------------------------------------------*/
1384 #undef __FUNCT__
1385 #define __FUNCT__ "MatMumpsSetCntl_MUMPS"
1386 PetscErrorCode MatMumpsSetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal val)
1387 {
1388   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1389 
1390   PetscFunctionBegin;
1391   mumps->id.CNTL(icntl) = val;
1392   PetscFunctionReturn(0);
1393 }
1394 
1395 #undef __FUNCT__
1396 #define __FUNCT__ "MatMumpsGetCntl_MUMPS"
1397 PetscErrorCode MatMumpsGetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal *val)
1398 {
1399   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1400 
1401   PetscFunctionBegin;
1402   *val = mumps->id.CNTL(icntl);
1403   PetscFunctionReturn(0);
1404 }
1405 
1406 #undef __FUNCT__
1407 #define __FUNCT__ "MatMumpsSetCntl"
1408 /*@
1409   MatMumpsSetCntl - Set MUMPS parameter CNTL()
1410 
1411    Logically Collective on Mat
1412 
1413    Input Parameters:
1414 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1415 .  icntl - index of MUMPS parameter array CNTL()
1416 -  val - value of MUMPS CNTL(icntl)
1417 
1418   Options Database:
1419 .   -mat_mumps_cntl_<icntl> <val>
1420 
1421    Level: beginner
1422 
1423    References: MUMPS Users' Guide
1424 
1425 .seealso: MatGetFactor()
1426 @*/
1427 PetscErrorCode MatMumpsSetCntl(Mat F,PetscInt icntl,PetscReal val)
1428 {
1429   PetscErrorCode ierr;
1430 
1431   PetscFunctionBegin;
1432   PetscValidLogicalCollectiveInt(F,icntl,2);
1433   PetscValidLogicalCollectiveReal(F,val,3);
1434   ierr = PetscTryMethod(F,"MatMumpsSetCntl_C",(Mat,PetscInt,PetscReal),(F,icntl,val));CHKERRQ(ierr);
1435   PetscFunctionReturn(0);
1436 }
1437 
1438 #undef __FUNCT__
1439 #define __FUNCT__ "MatMumpsGetCntl"
1440 /*@
1441   MatMumpsGetCntl - Get MUMPS parameter CNTL()
1442 
1443    Logically Collective on Mat
1444 
1445    Input Parameters:
1446 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1447 -  icntl - index of MUMPS parameter array CNTL()
1448 
1449   Output Parameter:
1450 .  val - value of MUMPS CNTL(icntl)
1451 
1452    Level: beginner
1453 
1454    References: MUMPS Users' Guide
1455 
1456 .seealso: MatGetFactor()
1457 @*/
1458 PetscErrorCode MatMumpsGetCntl(Mat F,PetscInt icntl,PetscReal *val)
1459 {
1460   PetscErrorCode ierr;
1461 
1462   PetscFunctionBegin;
1463   PetscValidLogicalCollectiveInt(F,icntl,2);
1464   PetscValidRealPointer(val,3);
1465   ierr = PetscTryMethod(F,"MatMumpsGetCntl_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
1466   PetscFunctionReturn(0);
1467 }
1468 
1469 #undef __FUNCT__
1470 #define __FUNCT__ "MatMumpsGetInfo_MUMPS"
1471 PetscErrorCode MatMumpsGetInfo_MUMPS(Mat F,PetscInt icntl,PetscInt *info)
1472 {
1473   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1474 
1475   PetscFunctionBegin;
1476   *info = mumps->id.INFO(icntl);
1477   PetscFunctionReturn(0);
1478 }
1479 
1480 #undef __FUNCT__
1481 #define __FUNCT__ "MatMumpsGetInfog_MUMPS"
1482 PetscErrorCode MatMumpsGetInfog_MUMPS(Mat F,PetscInt icntl,PetscInt *infog)
1483 {
1484   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1485 
1486   PetscFunctionBegin;
1487   *infog = mumps->id.INFOG(icntl);
1488   PetscFunctionReturn(0);
1489 }
1490 
1491 #undef __FUNCT__
1492 #define __FUNCT__ "MatMumpsGetRinfo_MUMPS"
1493 PetscErrorCode MatMumpsGetRinfo_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfo)
1494 {
1495   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1496 
1497   PetscFunctionBegin;
1498   *rinfo = mumps->id.RINFO(icntl);
1499   PetscFunctionReturn(0);
1500 }
1501 
1502 #undef __FUNCT__
1503 #define __FUNCT__ "MatMumpsGetRinfog_MUMPS"
1504 PetscErrorCode MatMumpsGetRinfog_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfog)
1505 {
1506   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1507 
1508   PetscFunctionBegin;
1509   *rinfog = mumps->id.RINFOG(icntl);
1510   PetscFunctionReturn(0);
1511 }
1512 
1513 #undef __FUNCT__
1514 #define __FUNCT__ "MatMumpsGetInfo"
1515 /*@
1516   MatMumpsGetInfo - Get MUMPS parameter INFO()
1517 
1518    Logically Collective on Mat
1519 
1520    Input Parameters:
1521 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1522 -  icntl - index of MUMPS parameter array INFO()
1523 
1524   Output Parameter:
1525 .  ival - value of MUMPS INFO(icntl)
1526 
1527    Level: beginner
1528 
1529    References: MUMPS Users' Guide
1530 
1531 .seealso: MatGetFactor()
1532 @*/
1533 PetscErrorCode MatMumpsGetInfo(Mat F,PetscInt icntl,PetscInt *ival)
1534 {
1535   PetscErrorCode ierr;
1536 
1537   PetscFunctionBegin;
1538   PetscValidIntPointer(ival,3);
1539   ierr = PetscTryMethod(F,"MatMumpsGetInfo_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
1540   PetscFunctionReturn(0);
1541 }
1542 
1543 #undef __FUNCT__
1544 #define __FUNCT__ "MatMumpsGetInfog"
1545 /*@
1546   MatMumpsGetInfog - Get MUMPS parameter INFOG()
1547 
1548    Logically Collective on Mat
1549 
1550    Input Parameters:
1551 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1552 -  icntl - index of MUMPS parameter array INFOG()
1553 
1554   Output Parameter:
1555 .  ival - value of MUMPS INFOG(icntl)
1556 
1557    Level: beginner
1558 
1559    References: MUMPS Users' Guide
1560 
1561 .seealso: MatGetFactor()
1562 @*/
1563 PetscErrorCode MatMumpsGetInfog(Mat F,PetscInt icntl,PetscInt *ival)
1564 {
1565   PetscErrorCode ierr;
1566 
1567   PetscFunctionBegin;
1568   PetscValidIntPointer(ival,3);
1569   ierr = PetscTryMethod(F,"MatMumpsGetInfog_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
1570   PetscFunctionReturn(0);
1571 }
1572 
1573 #undef __FUNCT__
1574 #define __FUNCT__ "MatMumpsGetRinfo"
1575 /*@
1576   MatMumpsGetRinfo - Get MUMPS parameter RINFO()
1577 
1578    Logically Collective on Mat
1579 
1580    Input Parameters:
1581 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1582 -  icntl - index of MUMPS parameter array RINFO()
1583 
1584   Output Parameter:
1585 .  val - value of MUMPS RINFO(icntl)
1586 
1587    Level: beginner
1588 
1589    References: MUMPS Users' Guide
1590 
1591 .seealso: MatGetFactor()
1592 @*/
1593 PetscErrorCode MatMumpsGetRinfo(Mat F,PetscInt icntl,PetscReal *val)
1594 {
1595   PetscErrorCode ierr;
1596 
1597   PetscFunctionBegin;
1598   PetscValidRealPointer(val,3);
1599   ierr = PetscTryMethod(F,"MatMumpsGetRinfo_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
1600   PetscFunctionReturn(0);
1601 }
1602 
1603 #undef __FUNCT__
1604 #define __FUNCT__ "MatMumpsGetRinfog"
1605 /*@
1606   MatMumpsGetRinfog - Get MUMPS parameter RINFOG()
1607 
1608    Logically Collective on Mat
1609 
1610    Input Parameters:
1611 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1612 -  icntl - index of MUMPS parameter array RINFOG()
1613 
1614   Output Parameter:
1615 .  val - value of MUMPS RINFOG(icntl)
1616 
1617    Level: beginner
1618 
1619    References: MUMPS Users' Guide
1620 
1621 .seealso: MatGetFactor()
1622 @*/
1623 PetscErrorCode MatMumpsGetRinfog(Mat F,PetscInt icntl,PetscReal *val)
1624 {
1625   PetscErrorCode ierr;
1626 
1627   PetscFunctionBegin;
1628   PetscValidRealPointer(val,3);
1629   ierr = PetscTryMethod(F,"MatMumpsGetRinfog_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
1630   PetscFunctionReturn(0);
1631 }
1632 
1633 /*MC
1634   MATSOLVERMUMPS -  A matrix type providing direct solvers (LU and Cholesky) for
1635   distributed and sequential matrices via the external package MUMPS.
1636 
1637   Works with MATAIJ and MATSBAIJ matrices
1638 
1639   Options Database Keys:
1640 + -mat_mumps_icntl_4 <0,...,4> - print level
1641 . -mat_mumps_icntl_6 <0,...,7> - matrix prescaling options (see MUMPS User's Guide)
1642 . -mat_mumps_icntl_7 <0,...,7> - matrix orderings (see MUMPS User's Guidec)
1643 . -mat_mumps_icntl_9 <1,2> - A or A^T x=b to be solved: 1 denotes A, 2 denotes A^T
1644 . -mat_mumps_icntl_10 <n> - maximum number of iterative refinements
1645 . -mat_mumps_icntl_11 <n> - error analysis, a positive value returns statistics during -ksp_view
1646 . -mat_mumps_icntl_12 <n> - efficiency control (see MUMPS User's Guide)
1647 . -mat_mumps_icntl_13 <n> - efficiency control (see MUMPS User's Guide)
1648 . -mat_mumps_icntl_14 <n> - efficiency control (see MUMPS User's Guide)
1649 . -mat_mumps_icntl_15 <n> - efficiency control (see MUMPS User's Guide)
1650 . -mat_mumps_cntl_1 <delta> - relative pivoting threshold
1651 . -mat_mumps_cntl_2 <tol> - stopping criterion for refinement
1652 - -mat_mumps_cntl_3 <adelta> - absolute pivoting threshold
1653 
1654   Level: beginner
1655 
1656 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
1657 
1658 M*/
1659 
1660 #undef __FUNCT__
1661 #define __FUNCT__ "MatFactorGetSolverPackage_mumps"
1662 static PetscErrorCode MatFactorGetSolverPackage_mumps(Mat A,const MatSolverPackage *type)
1663 {
1664   PetscFunctionBegin;
1665   *type = MATSOLVERMUMPS;
1666   PetscFunctionReturn(0);
1667 }
1668 
1669 /* MatGetFactor for Seq and MPI AIJ matrices */
1670 #undef __FUNCT__
1671 #define __FUNCT__ "MatGetFactor_aij_mumps"
1672 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F)
1673 {
1674   Mat            B;
1675   PetscErrorCode ierr;
1676   Mat_MUMPS      *mumps;
1677   PetscBool      isSeqAIJ;
1678 
1679   PetscFunctionBegin;
1680   /* Create the factorization matrix */
1681   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr);
1682   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1683   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1684   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1685   if (isSeqAIJ) {
1686     ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
1687   } else {
1688     ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
1689   }
1690 
1691   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
1692 
1693   B->ops->view    = MatView_MUMPS;
1694   B->ops->getinfo = MatGetInfo_MUMPS;
1695 
1696   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1697   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
1698   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
1699   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
1700   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
1701 
1702   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
1703   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
1704   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
1705   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
1706   if (ftype == MAT_FACTOR_LU) {
1707     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
1708     B->factortype            = MAT_FACTOR_LU;
1709     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij;
1710     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij;
1711     mumps->sym = 0;
1712   } else {
1713     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
1714     B->factortype                  = MAT_FACTOR_CHOLESKY;
1715     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij;
1716     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij;
1717     if (A->spd_set && A->spd) mumps->sym = 1;
1718     else                      mumps->sym = 2;
1719   }
1720 
1721   mumps->isAIJ    = PETSC_TRUE;
1722   mumps->Destroy  = B->ops->destroy;
1723   B->ops->destroy = MatDestroy_MUMPS;
1724   B->spptr        = (void*)mumps;
1725 
1726   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
1727 
1728   *F = B;
1729   PetscFunctionReturn(0);
1730 }
1731 
1732 /* MatGetFactor for Seq and MPI SBAIJ matrices */
1733 #undef __FUNCT__
1734 #define __FUNCT__ "MatGetFactor_sbaij_mumps"
1735 PETSC_EXTERN PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
1736 {
1737   Mat            B;
1738   PetscErrorCode ierr;
1739   Mat_MUMPS      *mumps;
1740   PetscBool      isSeqSBAIJ;
1741 
1742   PetscFunctionBegin;
1743   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
1744   if (A->rmap->bs > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead");
1745   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr);
1746   /* Create the factorization matrix */
1747   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1748   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1749   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1750   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
1751   if (isSeqSBAIJ) {
1752     ierr = MatSeqSBAIJSetPreallocation(B,1,0,NULL);CHKERRQ(ierr);
1753 
1754     mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij;
1755   } else {
1756     ierr = MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);CHKERRQ(ierr);
1757 
1758     mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij;
1759   }
1760 
1761   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
1762   B->ops->view                   = MatView_MUMPS;
1763 
1764   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1765   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
1766   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
1767   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
1768   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
1769 
1770   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
1771   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
1772   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
1773   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
1774 
1775   B->factortype = MAT_FACTOR_CHOLESKY;
1776   if (A->spd_set && A->spd) mumps->sym = 1;
1777   else                      mumps->sym = 2;
1778 
1779   mumps->isAIJ    = PETSC_FALSE;
1780   mumps->Destroy  = B->ops->destroy;
1781   B->ops->destroy = MatDestroy_MUMPS;
1782   B->spptr        = (void*)mumps;
1783 
1784   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
1785 
1786   *F = B;
1787   PetscFunctionReturn(0);
1788 }
1789 
1790 #undef __FUNCT__
1791 #define __FUNCT__ "MatGetFactor_baij_mumps"
1792 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F)
1793 {
1794   Mat            B;
1795   PetscErrorCode ierr;
1796   Mat_MUMPS      *mumps;
1797   PetscBool      isSeqBAIJ;
1798 
1799   PetscFunctionBegin;
1800   /* Create the factorization matrix */
1801   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);CHKERRQ(ierr);
1802   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1803   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1804   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1805   if (isSeqBAIJ) {
1806     ierr = MatSeqBAIJSetPreallocation(B,A->rmap->bs,0,NULL);CHKERRQ(ierr);
1807   } else {
1808     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
1809   }
1810 
1811   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
1812   if (ftype == MAT_FACTOR_LU) {
1813     B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS;
1814     B->factortype            = MAT_FACTOR_LU;
1815     if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij;
1816     else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij;
1817     mumps->sym = 0;
1818   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n");
1819 
1820   B->ops->view = MatView_MUMPS;
1821 
1822   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1823   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
1824   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
1825   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
1826   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
1827 
1828   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
1829   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
1830   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
1831   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
1832 
1833   mumps->isAIJ    = PETSC_TRUE;
1834   mumps->Destroy  = B->ops->destroy;
1835   B->ops->destroy = MatDestroy_MUMPS;
1836   B->spptr        = (void*)mumps;
1837 
1838   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
1839 
1840   *F = B;
1841   PetscFunctionReturn(0);
1842 }
1843