xref: /petsc/src/mat/impls/aij/mpi/mumps/mumps.c (revision cf1aed2ce99d23e50336629af3ca8cf096900abb)
1 
2 /*
3     Provides an interface to the MUMPS sparse solver
4 */
5 
6 #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I  "petscmat.h"  I*/
7 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
8 
9 EXTERN_C_BEGIN
10 #if defined(PETSC_USE_COMPLEX)
11 #if defined(PETSC_USE_REAL_SINGLE)
12 #include <cmumps_c.h>
13 #else
14 #include <zmumps_c.h>
15 #endif
16 #else
17 #if defined(PETSC_USE_REAL_SINGLE)
18 #include <smumps_c.h>
19 #else
20 #include <dmumps_c.h>
21 #endif
22 #endif
23 EXTERN_C_END
24 #define JOB_INIT -1
25 #define JOB_FACTSYMBOLIC 1
26 #define JOB_FACTNUMERIC 2
27 #define JOB_SOLVE 3
28 #define JOB_END -2
29 
30 /* calls to MUMPS */
31 #if defined(PETSC_USE_COMPLEX)
32 #if defined(PETSC_USE_REAL_SINGLE)
33 #define PetscMUMPS_c cmumps_c
34 #else
35 #define PetscMUMPS_c zmumps_c
36 #endif
37 #else
38 #if defined(PETSC_USE_REAL_SINGLE)
39 #define PetscMUMPS_c smumps_c
40 #else
41 #define PetscMUMPS_c dmumps_c
42 #endif
43 #endif
44 
45 
46 /* macros s.t. indices match MUMPS documentation */
47 #define ICNTL(I) icntl[(I)-1]
48 #define CNTL(I) cntl[(I)-1]
49 #define INFOG(I) infog[(I)-1]
50 #define INFO(I) info[(I)-1]
51 #define RINFOG(I) rinfog[(I)-1]
52 #define RINFO(I) rinfo[(I)-1]
53 
54 typedef struct {
55 #if defined(PETSC_USE_COMPLEX)
56 #if defined(PETSC_USE_REAL_SINGLE)
57   CMUMPS_STRUC_C id;
58 #else
59   ZMUMPS_STRUC_C id;
60 #endif
61 #else
62 #if defined(PETSC_USE_REAL_SINGLE)
63   SMUMPS_STRUC_C id;
64 #else
65   DMUMPS_STRUC_C id;
66 #endif
67 #endif
68 
69   MatStructure matstruc;
70   PetscMPIInt  myid,size;
71   PetscInt     *irn,*jcn,nz,sym;
72   PetscScalar  *val;
73   MPI_Comm     comm_mumps;
74   VecScatter   scat_rhs, scat_sol;
75   PetscBool    isAIJ,CleanUpMUMPS;
76   Vec          b_seq,x_seq;
77   PetscInt     ICNTL9_pre;   /* check if ICNTL(9) is changed from previous MatSolve */
78 
79   PetscErrorCode (*Destroy)(Mat);
80   PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**);
81 } Mat_MUMPS;
82 
83 extern PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*);
84 
85 
86 /* MatConvertToTriples_A_B */
87 /*convert Petsc matrix to triples: row[nz], col[nz], val[nz] */
88 /*
89   input:
90     A       - matrix in aij,baij or sbaij (bs=1) format
91     shift   - 0: C style output triple; 1: Fortran style output triple.
92     reuse   - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple
93               MAT_REUSE_MATRIX:   only the values in v array are updated
94   output:
95     nnz     - dim of r, c, and v (number of local nonzero entries of A)
96     r, c, v - row and col index, matrix values (matrix triples)
97 
98   The returned values r, c, and sometimes v are obtained in a single PetscMalloc(). Then in MatDestroy_MUMPS() it is
99   freed with PetscFree((mumps->irn);  This is not ideal code, the fact that v is ONLY sometimes part of mumps->irn means
100   that the PetscMalloc() cannot easily be replaced with a PetscMalloc3().
101 
102  */
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "MatConvertToTriples_seqaij_seqaij"
106 PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
107 {
108   const PetscInt *ai,*aj,*ajj,M=A->rmap->n;
109   PetscInt       nz,rnz,i,j;
110   PetscErrorCode ierr;
111   PetscInt       *row,*col;
112   Mat_SeqAIJ     *aa=(Mat_SeqAIJ*)A->data;
113 
114   PetscFunctionBegin;
115   *v=aa->a;
116   if (reuse == MAT_INITIAL_MATRIX) {
117     nz   = aa->nz;
118     ai   = aa->i;
119     aj   = aa->j;
120     *nnz = nz;
121     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
122     col  = row + nz;
123 
124     nz = 0;
125     for (i=0; i<M; i++) {
126       rnz = ai[i+1] - ai[i];
127       ajj = aj + ai[i];
128       for (j=0; j<rnz; j++) {
129         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
130       }
131     }
132     *r = row; *c = col;
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatConvertToTriples_seqbaij_seqaij"
139 PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
140 {
141   Mat_SeqBAIJ    *aa=(Mat_SeqBAIJ*)A->data;
142   const PetscInt *ai,*aj,*ajj,bs2 = aa->bs2;
143   PetscInt       bs,M,nz,idx=0,rnz,i,j,k,m;
144   PetscErrorCode ierr;
145   PetscInt       *row,*col;
146 
147   PetscFunctionBegin;
148   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
149   M = A->rmap->N/bs;
150   *v = aa->a;
151   if (reuse == MAT_INITIAL_MATRIX) {
152     ai   = aa->i; aj = aa->j;
153     nz   = bs2*aa->nz;
154     *nnz = nz;
155     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
156     col  = row + nz;
157 
158     for (i=0; i<M; i++) {
159       ajj = aj + ai[i];
160       rnz = ai[i+1] - ai[i];
161       for (k=0; k<rnz; k++) {
162         for (j=0; j<bs; j++) {
163           for (m=0; m<bs; m++) {
164             row[idx]   = i*bs + m + shift;
165             col[idx++] = bs*(ajj[k]) + j + shift;
166           }
167         }
168       }
169     }
170     *r = row; *c = col;
171   }
172   PetscFunctionReturn(0);
173 }
174 
175 #undef __FUNCT__
176 #define __FUNCT__ "MatConvertToTriples_seqsbaij_seqsbaij"
177 PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
178 {
179   const PetscInt *ai, *aj,*ajj,M=A->rmap->n;
180   PetscInt       nz,rnz,i,j;
181   PetscErrorCode ierr;
182   PetscInt       *row,*col;
183   Mat_SeqSBAIJ   *aa=(Mat_SeqSBAIJ*)A->data;
184 
185   PetscFunctionBegin;
186   *v = aa->a;
187   if (reuse == MAT_INITIAL_MATRIX) {
188     nz   = aa->nz;
189     ai   = aa->i;
190     aj   = aa->j;
191     *v   = aa->a;
192     *nnz = nz;
193     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
194     col  = row + nz;
195 
196     nz = 0;
197     for (i=0; i<M; i++) {
198       rnz = ai[i+1] - ai[i];
199       ajj = aj + ai[i];
200       for (j=0; j<rnz; j++) {
201         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
202       }
203     }
204     *r = row; *c = col;
205   }
206   PetscFunctionReturn(0);
207 }
208 
209 #undef __FUNCT__
210 #define __FUNCT__ "MatConvertToTriples_seqaij_seqsbaij"
211 PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
212 {
213   const PetscInt    *ai,*aj,*ajj,*adiag,M=A->rmap->n;
214   PetscInt          nz,rnz,i,j;
215   const PetscScalar *av,*v1;
216   PetscScalar       *val;
217   PetscErrorCode    ierr;
218   PetscInt          *row,*col;
219   Mat_SeqSBAIJ      *aa=(Mat_SeqSBAIJ*)A->data;
220 
221   PetscFunctionBegin;
222   ai   =aa->i; aj=aa->j;av=aa->a;
223   adiag=aa->diag;
224   if (reuse == MAT_INITIAL_MATRIX) {
225     nz   = M + (aa->nz-M)/2;
226     *nnz = nz;
227     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
228     col  = row + nz;
229     val  = (PetscScalar*)(col + nz);
230 
231     nz = 0;
232     for (i=0; i<M; i++) {
233       rnz = ai[i+1] - adiag[i];
234       ajj = aj + adiag[i];
235       v1  = av + adiag[i];
236       for (j=0; j<rnz; j++) {
237         row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j];
238       }
239     }
240     *r = row; *c = col; *v = val;
241   } else {
242     nz = 0; val = *v;
243     for (i=0; i <M; i++) {
244       rnz = ai[i+1] - adiag[i];
245       ajj = aj + adiag[i];
246       v1  = av + adiag[i];
247       for (j=0; j<rnz; j++) {
248         val[nz++] = v1[j];
249       }
250     }
251   }
252   PetscFunctionReturn(0);
253 }
254 
255 #undef __FUNCT__
256 #define __FUNCT__ "MatConvertToTriples_mpisbaij_mpisbaij"
257 PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
258 {
259   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
260   PetscErrorCode    ierr;
261   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
262   PetscInt          *row,*col;
263   const PetscScalar *av, *bv,*v1,*v2;
264   PetscScalar       *val;
265   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ*)A->data;
266   Mat_SeqSBAIJ      *aa  = (Mat_SeqSBAIJ*)(mat->A)->data;
267   Mat_SeqBAIJ       *bb  = (Mat_SeqBAIJ*)(mat->B)->data;
268 
269   PetscFunctionBegin;
270   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
271   av=aa->a; bv=bb->a;
272 
273   garray = mat->garray;
274 
275   if (reuse == MAT_INITIAL_MATRIX) {
276     nz   = aa->nz + bb->nz;
277     *nnz = nz;
278     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
279     col  = row + nz;
280     val  = (PetscScalar*)(col + nz);
281 
282     *r = row; *c = col; *v = val;
283   } else {
284     row = *r; col = *c; val = *v;
285   }
286 
287   jj = 0; irow = rstart;
288   for (i=0; i<m; i++) {
289     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
290     countA = ai[i+1] - ai[i];
291     countB = bi[i+1] - bi[i];
292     bjj    = bj + bi[i];
293     v1     = av + ai[i];
294     v2     = bv + bi[i];
295 
296     /* A-part */
297     for (j=0; j<countA; j++) {
298       if (reuse == MAT_INITIAL_MATRIX) {
299         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
300       }
301       val[jj++] = v1[j];
302     }
303 
304     /* B-part */
305     for (j=0; j < countB; j++) {
306       if (reuse == MAT_INITIAL_MATRIX) {
307         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
308       }
309       val[jj++] = v2[j];
310     }
311     irow++;
312   }
313   PetscFunctionReturn(0);
314 }
315 
316 #undef __FUNCT__
317 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpiaij"
318 PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
319 {
320   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
321   PetscErrorCode    ierr;
322   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
323   PetscInt          *row,*col;
324   const PetscScalar *av, *bv,*v1,*v2;
325   PetscScalar       *val;
326   Mat_MPIAIJ        *mat = (Mat_MPIAIJ*)A->data;
327   Mat_SeqAIJ        *aa  = (Mat_SeqAIJ*)(mat->A)->data;
328   Mat_SeqAIJ        *bb  = (Mat_SeqAIJ*)(mat->B)->data;
329 
330   PetscFunctionBegin;
331   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
332   av=aa->a; bv=bb->a;
333 
334   garray = mat->garray;
335 
336   if (reuse == MAT_INITIAL_MATRIX) {
337     nz   = aa->nz + bb->nz;
338     *nnz = nz;
339     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
340     col  = row + nz;
341     val  = (PetscScalar*)(col + nz);
342 
343     *r = row; *c = col; *v = val;
344   } else {
345     row = *r; col = *c; val = *v;
346   }
347 
348   jj = 0; irow = rstart;
349   for (i=0; i<m; i++) {
350     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
351     countA = ai[i+1] - ai[i];
352     countB = bi[i+1] - bi[i];
353     bjj    = bj + bi[i];
354     v1     = av + ai[i];
355     v2     = bv + bi[i];
356 
357     /* A-part */
358     for (j=0; j<countA; j++) {
359       if (reuse == MAT_INITIAL_MATRIX) {
360         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
361       }
362       val[jj++] = v1[j];
363     }
364 
365     /* B-part */
366     for (j=0; j < countB; j++) {
367       if (reuse == MAT_INITIAL_MATRIX) {
368         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
369       }
370       val[jj++] = v2[j];
371     }
372     irow++;
373   }
374   PetscFunctionReturn(0);
375 }
376 
377 #undef __FUNCT__
378 #define __FUNCT__ "MatConvertToTriples_mpibaij_mpiaij"
379 PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
380 {
381   Mat_MPIBAIJ       *mat    = (Mat_MPIBAIJ*)A->data;
382   Mat_SeqBAIJ       *aa     = (Mat_SeqBAIJ*)(mat->A)->data;
383   Mat_SeqBAIJ       *bb     = (Mat_SeqBAIJ*)(mat->B)->data;
384   const PetscInt    *ai     = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj;
385   const PetscInt    *garray = mat->garray,mbs=mat->mbs,rstart=A->rmap->rstart;
386   const PetscInt    bs2=mat->bs2;
387   PetscErrorCode    ierr;
388   PetscInt          bs,nz,i,j,k,n,jj,irow,countA,countB,idx;
389   PetscInt          *row,*col;
390   const PetscScalar *av=aa->a, *bv=bb->a,*v1,*v2;
391   PetscScalar       *val;
392 
393   PetscFunctionBegin;
394   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
395   if (reuse == MAT_INITIAL_MATRIX) {
396     nz   = bs2*(aa->nz + bb->nz);
397     *nnz = nz;
398     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
399     col  = row + nz;
400     val  = (PetscScalar*)(col + nz);
401 
402     *r = row; *c = col; *v = val;
403   } else {
404     row = *r; col = *c; val = *v;
405   }
406 
407   jj = 0; irow = rstart;
408   for (i=0; i<mbs; i++) {
409     countA = ai[i+1] - ai[i];
410     countB = bi[i+1] - bi[i];
411     ajj    = aj + ai[i];
412     bjj    = bj + bi[i];
413     v1     = av + bs2*ai[i];
414     v2     = bv + bs2*bi[i];
415 
416     idx = 0;
417     /* A-part */
418     for (k=0; k<countA; k++) {
419       for (j=0; j<bs; j++) {
420         for (n=0; n<bs; n++) {
421           if (reuse == MAT_INITIAL_MATRIX) {
422             row[jj] = irow + n + shift;
423             col[jj] = rstart + bs*ajj[k] + j + shift;
424           }
425           val[jj++] = v1[idx++];
426         }
427       }
428     }
429 
430     idx = 0;
431     /* B-part */
432     for (k=0; k<countB; k++) {
433       for (j=0; j<bs; j++) {
434         for (n=0; n<bs; n++) {
435           if (reuse == MAT_INITIAL_MATRIX) {
436             row[jj] = irow + n + shift;
437             col[jj] = bs*garray[bjj[k]] + j + shift;
438           }
439           val[jj++] = v2[idx++];
440         }
441       }
442     }
443     irow += bs;
444   }
445   PetscFunctionReturn(0);
446 }
447 
448 #undef __FUNCT__
449 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpisbaij"
450 PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
451 {
452   const PetscInt    *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
453   PetscErrorCode    ierr;
454   PetscInt          rstart,nz,nza,nzb,i,j,jj,irow,countA,countB;
455   PetscInt          *row,*col;
456   const PetscScalar *av, *bv,*v1,*v2;
457   PetscScalar       *val;
458   Mat_MPIAIJ        *mat =  (Mat_MPIAIJ*)A->data;
459   Mat_SeqAIJ        *aa  =(Mat_SeqAIJ*)(mat->A)->data;
460   Mat_SeqAIJ        *bb  =(Mat_SeqAIJ*)(mat->B)->data;
461 
462   PetscFunctionBegin;
463   ai=aa->i; aj=aa->j; adiag=aa->diag;
464   bi=bb->i; bj=bb->j; garray = mat->garray;
465   av=aa->a; bv=bb->a;
466 
467   rstart = A->rmap->rstart;
468 
469   if (reuse == MAT_INITIAL_MATRIX) {
470     nza = 0;    /* num of upper triangular entries in mat->A, including diagonals */
471     nzb = 0;    /* num of upper triangular entries in mat->B */
472     for (i=0; i<m; i++) {
473       nza   += (ai[i+1] - adiag[i]);
474       countB = bi[i+1] - bi[i];
475       bjj    = bj + bi[i];
476       for (j=0; j<countB; j++) {
477         if (garray[bjj[j]] > rstart) nzb++;
478       }
479     }
480 
481     nz   = nza + nzb; /* total nz of upper triangular part of mat */
482     *nnz = nz;
483     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
484     col  = row + nz;
485     val  = (PetscScalar*)(col + nz);
486 
487     *r = row; *c = col; *v = val;
488   } else {
489     row = *r; col = *c; val = *v;
490   }
491 
492   jj = 0; irow = rstart;
493   for (i=0; i<m; i++) {
494     ajj    = aj + adiag[i];                 /* ptr to the beginning of the diagonal of this row */
495     v1     = av + adiag[i];
496     countA = ai[i+1] - adiag[i];
497     countB = bi[i+1] - bi[i];
498     bjj    = bj + bi[i];
499     v2     = bv + bi[i];
500 
501     /* A-part */
502     for (j=0; j<countA; j++) {
503       if (reuse == MAT_INITIAL_MATRIX) {
504         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
505       }
506       val[jj++] = v1[j];
507     }
508 
509     /* B-part */
510     for (j=0; j < countB; j++) {
511       if (garray[bjj[j]] > rstart) {
512         if (reuse == MAT_INITIAL_MATRIX) {
513           row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
514         }
515         val[jj++] = v2[j];
516       }
517     }
518     irow++;
519   }
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "MatDestroy_MUMPS"
525 PetscErrorCode MatDestroy_MUMPS(Mat A)
526 {
527   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
528   PetscErrorCode ierr;
529 
530   PetscFunctionBegin;
531   if (mumps->CleanUpMUMPS) {
532     /* Terminate instance, deallocate memories */
533     ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
534     ierr = VecScatterDestroy(&mumps->scat_rhs);CHKERRQ(ierr);
535     ierr = VecDestroy(&mumps->b_seq);CHKERRQ(ierr);
536     ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
537     ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
538     ierr = PetscFree(mumps->id.perm_in);CHKERRQ(ierr);
539     ierr = PetscFree(mumps->irn);CHKERRQ(ierr);
540 
541     mumps->id.job = JOB_END;
542     PetscMUMPS_c(&mumps->id);
543     ierr = MPI_Comm_free(&(mumps->comm_mumps));CHKERRQ(ierr);
544   }
545   if (mumps->Destroy) {
546     ierr = (mumps->Destroy)(A);CHKERRQ(ierr);
547   }
548   ierr = PetscFree(A->spptr);CHKERRQ(ierr);
549 
550   /* clear composed functions */
551   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);CHKERRQ(ierr);
552   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetIcntl_C",NULL);CHKERRQ(ierr);
553   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetIcntl_C",NULL);CHKERRQ(ierr);
554   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetCntl_C",NULL);CHKERRQ(ierr);
555   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetCntl_C",NULL);CHKERRQ(ierr);
556 
557   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfo_C",NULL);CHKERRQ(ierr);
558   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfog_C",NULL);CHKERRQ(ierr);
559   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfo_C",NULL);CHKERRQ(ierr);
560   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfog_C",NULL);CHKERRQ(ierr);
561   PetscFunctionReturn(0);
562 }
563 
564 #undef __FUNCT__
565 #define __FUNCT__ "MatSolve_MUMPS"
566 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x)
567 {
568   Mat_MUMPS        *mumps=(Mat_MUMPS*)A->spptr;
569   PetscScalar      *array;
570   Vec              b_seq;
571   IS               is_iden,is_petsc;
572   PetscErrorCode   ierr;
573   PetscInt         i;
574   static PetscBool cite1 = PETSC_FALSE,cite2 = PETSC_FALSE;
575 
576   PetscFunctionBegin;
577   ierr = PetscCitationsRegister("@article{MUMPS01,\n  author = {P.~R. Amestoy and I.~S. Duff and J.-Y. L'Excellent and J. Koster},\n  title = {A fully asynchronous multifrontal solver using distributed dynamic scheduling},\n  journal = {SIAM Journal on Matrix Analysis and Applications},\n  volume = {23},\n  number = {1},\n  pages = {15--41},\n  year = {2001}\n}\n",&cite1);CHKERRQ(ierr);
578   ierr = PetscCitationsRegister("@article{MUMPS02,\n  author = {P.~R. Amestoy and A. Guermouche and J.-Y. L'Excellent and S. Pralet},\n  title = {Hybrid scheduling for the parallel solution of linear systems},\n  journal = {Parallel Computing},\n  volume = {32},\n  number = {2},\n  pages = {136--156},\n  year = {2006}\n}\n",&cite2);CHKERRQ(ierr);
579   mumps->id.nrhs = 1;
580   b_seq          = mumps->b_seq;
581   if (mumps->size > 1) {
582     /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */
583     ierr = VecScatterBegin(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
584     ierr = VecScatterEnd(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
585     if (!mumps->myid) {ierr = VecGetArray(b_seq,&array);CHKERRQ(ierr);}
586   } else {  /* size == 1 */
587     ierr = VecCopy(b,x);CHKERRQ(ierr);
588     ierr = VecGetArray(x,&array);CHKERRQ(ierr);
589   }
590   if (!mumps->myid) { /* define rhs on the host */
591     mumps->id.nrhs = 1;
592 #if defined(PETSC_USE_COMPLEX)
593 #if defined(PETSC_USE_REAL_SINGLE)
594     mumps->id.rhs = (mumps_complex*)array;
595 #else
596     mumps->id.rhs = (mumps_double_complex*)array;
597 #endif
598 #else
599     mumps->id.rhs = array;
600 #endif
601   }
602 
603   /* solve phase */
604   /*-------------*/
605   mumps->id.job = JOB_SOLVE;
606   PetscMUMPS_c(&mumps->id);
607   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
608 
609   if (mumps->size > 1) { /* convert mumps distributed solution to petsc mpi x */
610     if (mumps->scat_sol && mumps->ICNTL9_pre != mumps->id.ICNTL(9)) {
611       /* when id.ICNTL(9) changes, the contents of lsol_loc may change (not its size, lsol_loc), recreates scat_sol */
612       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
613     }
614     if (!mumps->scat_sol) { /* create scatter scat_sol */
615       ierr = ISCreateStride(PETSC_COMM_SELF,mumps->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */
616       for (i=0; i<mumps->id.lsol_loc; i++) {
617         mumps->id.isol_loc[i] -= 1; /* change Fortran style to C style */
618       }
619       ierr = ISCreateGeneral(PETSC_COMM_SELF,mumps->id.lsol_loc,mumps->id.isol_loc,PETSC_COPY_VALUES,&is_petsc);CHKERRQ(ierr);  /* to */
620       ierr = VecScatterCreate(mumps->x_seq,is_iden,x,is_petsc,&mumps->scat_sol);CHKERRQ(ierr);
621       ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
622       ierr = ISDestroy(&is_petsc);CHKERRQ(ierr);
623 
624       mumps->ICNTL9_pre = mumps->id.ICNTL(9); /* save current value of id.ICNTL(9) */
625     }
626 
627     ierr = VecScatterBegin(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
628     ierr = VecScatterEnd(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
629   }
630   PetscFunctionReturn(0);
631 }
632 
633 #undef __FUNCT__
634 #define __FUNCT__ "MatSolveTranspose_MUMPS"
635 PetscErrorCode MatSolveTranspose_MUMPS(Mat A,Vec b,Vec x)
636 {
637   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
638   PetscErrorCode ierr;
639 
640   PetscFunctionBegin;
641   mumps->id.ICNTL(9) = 0;
642 
643   ierr = MatSolve_MUMPS(A,b,x);CHKERRQ(ierr);
644 
645   mumps->id.ICNTL(9) = 1;
646   PetscFunctionReturn(0);
647 }
648 
649 #undef __FUNCT__
650 #define __FUNCT__ "MatMatSolve_MUMPS"
651 PetscErrorCode MatMatSolve_MUMPS(Mat A,Mat B,Mat X)
652 {
653   PetscErrorCode ierr;
654   PetscBool      flg;
655 
656   PetscFunctionBegin;
657   ierr = PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
658   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
659   ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
660   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
661   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatMatSolve_MUMPS() is not implemented yet");
662   PetscFunctionReturn(0);
663 }
664 
665 #if !defined(PETSC_USE_COMPLEX)
666 /*
667   input:
668    F:        numeric factor
669   output:
670    nneg:     total number of negative pivots
671    nzero:    0
672    npos:     (global dimension of F) - nneg
673 */
674 
675 #undef __FUNCT__
676 #define __FUNCT__ "MatGetInertia_SBAIJMUMPS"
677 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos)
678 {
679   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
680   PetscErrorCode ierr;
681   PetscMPIInt    size;
682 
683   PetscFunctionBegin;
684   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)F),&size);CHKERRQ(ierr);
685   /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */
686   if (size > 1 && mumps->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",mumps->id.INFOG(13));
687 
688   if (nneg) *nneg = mumps->id.INFOG(12);
689   if (nzero || npos) {
690     if (mumps->id.ICNTL(24) != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"-mat_mumps_icntl_24 must be set as 1 for null pivot row detection");
691     if (nzero) *nzero = mumps->id.INFOG(28);
692     if (npos) *npos   = F->rmap->N - (mumps->id.INFOG(12) + mumps->id.INFOG(28));
693   }
694   PetscFunctionReturn(0);
695 }
696 #endif /* !defined(PETSC_USE_COMPLEX) */
697 
698 #undef __FUNCT__
699 #define __FUNCT__ "MatFactorNumeric_MUMPS"
700 PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info)
701 {
702   Mat_MUMPS      *mumps =(Mat_MUMPS*)(F)->spptr;
703   PetscErrorCode ierr;
704   Mat            F_diag;
705   PetscBool      isMPIAIJ;
706 
707   PetscFunctionBegin;
708   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_REUSE_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
709 
710   /* numerical factorization phase */
711   /*-------------------------------*/
712   mumps->id.job = JOB_FACTNUMERIC;
713   if (!mumps->id.ICNTL(18)) {
714     if (!mumps->myid) {
715 #if defined(PETSC_USE_COMPLEX)
716 #if defined(PETSC_USE_REAL_SINGLE)
717       mumps->id.a = (mumps_complex*)mumps->val;
718 #else
719       mumps->id.a = (mumps_double_complex*)mumps->val;
720 #endif
721 #else
722       mumps->id.a = mumps->val;
723 #endif
724     }
725   } else {
726 #if defined(PETSC_USE_COMPLEX)
727 #if defined(PETSC_USE_REAL_SINGLE)
728     mumps->id.a_loc = (mumps_complex*)mumps->val;
729 #else
730     mumps->id.a_loc = (mumps_double_complex*)mumps->val;
731 #endif
732 #else
733     mumps->id.a_loc = mumps->val;
734 #endif
735   }
736   PetscMUMPS_c(&mumps->id);
737   if (mumps->id.INFOG(1) < 0) {
738     if (mumps->id.INFO(1) == -13) {
739       if (mumps->id.INFO(2) < 0) {
740         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d megabytes\n",-mumps->id.INFO(2));
741       } else {
742         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d bytes\n",mumps->id.INFO(2));
743       }
744     } else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFO(1)=%d, INFO(2)=%d\n",mumps->id.INFO(1),mumps->id.INFO(2));
745   }
746   if (!mumps->myid && mumps->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"  mumps->id.ICNTL(16):=%d\n",mumps->id.INFOG(16));
747 
748   if (mumps->size > 1) {
749     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr);
750     if (isMPIAIJ) F_diag = ((Mat_MPIAIJ*)(F)->data)->A;
751     else F_diag = ((Mat_MPISBAIJ*)(F)->data)->A;
752     F_diag->assembled = PETSC_TRUE;
753     if (mumps->scat_sol) {
754       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
755       ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
756       ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
757     }
758   }
759   (F)->assembled      = PETSC_TRUE;
760   mumps->matstruc     = SAME_NONZERO_PATTERN;
761   mumps->CleanUpMUMPS = PETSC_TRUE;
762 
763   if (mumps->size > 1) {
764     /* distributed solution */
765     if (!mumps->scat_sol) {
766       /* Create x_seq=sol_loc for repeated use */
767       PetscInt    lsol_loc;
768       PetscScalar *sol_loc;
769 
770       lsol_loc = mumps->id.INFO(23); /* length of sol_loc */
771 
772       ierr = PetscMalloc2(lsol_loc,&sol_loc,lsol_loc,&mumps->id.isol_loc);CHKERRQ(ierr);
773 
774       mumps->id.lsol_loc = lsol_loc;
775 #if defined(PETSC_USE_COMPLEX)
776 #if defined(PETSC_USE_REAL_SINGLE)
777       mumps->id.sol_loc = (mumps_complex*)sol_loc;
778 #else
779       mumps->id.sol_loc = (mumps_double_complex*)sol_loc;
780 #endif
781 #else
782       mumps->id.sol_loc = sol_loc;
783 #endif
784       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,lsol_loc,sol_loc,&mumps->x_seq);CHKERRQ(ierr);
785     }
786   }
787   PetscFunctionReturn(0);
788 }
789 
790 /* Sets MUMPS options from the options database */
791 #undef __FUNCT__
792 #define __FUNCT__ "PetscSetMUMPSFromOptions"
793 PetscErrorCode PetscSetMUMPSFromOptions(Mat F, Mat A)
794 {
795   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
796   PetscErrorCode ierr;
797   PetscInt       icntl;
798   PetscBool      flg;
799 
800   PetscFunctionBegin;
801   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr);
802   ierr = PetscOptionsInt("-mat_mumps_icntl_1","ICNTL(1): output stream for error messages","None",mumps->id.ICNTL(1),&icntl,&flg);CHKERRQ(ierr);
803   if (flg) mumps->id.ICNTL(1) = icntl;
804   ierr = PetscOptionsInt("-mat_mumps_icntl_2","ICNTL(2): output stream for diagnostic printing, statistics, and warning","None",mumps->id.ICNTL(2),&icntl,&flg);CHKERRQ(ierr);
805   if (flg) mumps->id.ICNTL(2) = icntl;
806   ierr = PetscOptionsInt("-mat_mumps_icntl_3","ICNTL(3): output stream for global information, collected on the host","None",mumps->id.ICNTL(3),&icntl,&flg);CHKERRQ(ierr);
807   if (flg) mumps->id.ICNTL(3) = icntl;
808 
809   ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",mumps->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr);
810   if (flg) mumps->id.ICNTL(4) = icntl;
811   if (mumps->id.ICNTL(4) || PetscLogPrintInfo) mumps->id.ICNTL(3) = 6; /* resume MUMPS default id.ICNTL(3) = 6 */
812 
813   ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): permuting and/or scaling the matrix (0 to 7)","None",mumps->id.ICNTL(6),&icntl,&flg);CHKERRQ(ierr);
814   if (flg) mumps->id.ICNTL(6) = icntl;
815 
816   ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): matrix ordering (0 to 7). 3=Scotch, 4=PORD, 5=Metis","None",mumps->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr);
817   if (flg) {
818     if (icntl== 1 && mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n");
819     else mumps->id.ICNTL(7) = icntl;
820   }
821 
822   ierr = PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 8 or 77)","None",mumps->id.ICNTL(8),&mumps->id.ICNTL(8),NULL);CHKERRQ(ierr);
823   ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",mumps->id.ICNTL(10),&mumps->id.ICNTL(10),NULL);CHKERRQ(ierr);
824   ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to the linear system solved (via -ksp_view)","None",mumps->id.ICNTL(11),&mumps->id.ICNTL(11),NULL);CHKERRQ(ierr);
825   ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): efficiency control: defines the ordering strategy with scaling constraints (0 to 3)","None",mumps->id.ICNTL(12),&mumps->id.ICNTL(12),NULL);CHKERRQ(ierr);
826   ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): efficiency control: with or without ScaLAPACK","None",mumps->id.ICNTL(13),&mumps->id.ICNTL(13),NULL);CHKERRQ(ierr);
827   ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage of estimated workspace increase","None",mumps->id.ICNTL(14),&mumps->id.ICNTL(14),NULL);CHKERRQ(ierr);
828   ierr = PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): Schur complement","None",mumps->id.ICNTL(19),&mumps->id.ICNTL(19),NULL);CHKERRQ(ierr);
829 
830   ierr = PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core facility (0 or 1)","None",mumps->id.ICNTL(22),&mumps->id.ICNTL(22),NULL);CHKERRQ(ierr);
831   ierr = PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",mumps->id.ICNTL(23),&mumps->id.ICNTL(23),NULL);CHKERRQ(ierr);
832   ierr = PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",mumps->id.ICNTL(24),&mumps->id.ICNTL(24),NULL);CHKERRQ(ierr);
833   if (mumps->id.ICNTL(24)) {
834     mumps->id.ICNTL(13) = 1; /* turn-off ScaLAPACK to help with the correct detection of null pivots */
835   }
836 
837   ierr = PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): computation of a null space basis","None",mumps->id.ICNTL(25),&mumps->id.ICNTL(25),NULL);CHKERRQ(ierr);
838   ierr = PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): Schur options for right-hand side or solution vector","None",mumps->id.ICNTL(26),&mumps->id.ICNTL(26),NULL);CHKERRQ(ierr);
839   ierr = PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): experimental parameter","None",mumps->id.ICNTL(27),&mumps->id.ICNTL(27),NULL);CHKERRQ(ierr);
840   ierr = PetscOptionsInt("-mat_mumps_icntl_28","ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering","None",mumps->id.ICNTL(28),&mumps->id.ICNTL(28),NULL);CHKERRQ(ierr);
841   ierr = PetscOptionsInt("-mat_mumps_icntl_29","ICNTL(29): parallel ordering 1 = ptscotch 2 = parmetis","None",mumps->id.ICNTL(29),&mumps->id.ICNTL(29),NULL);CHKERRQ(ierr);
842   ierr = PetscOptionsInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL);CHKERRQ(ierr);
843   ierr = PetscOptionsInt("-mat_mumps_icntl_31","ICNTL(31): factors can be discarded in the solve phase","None",mumps->id.ICNTL(31),&mumps->id.ICNTL(31),NULL);CHKERRQ(ierr);
844   ierr = PetscOptionsInt("-mat_mumps_icntl_33","ICNTL(33): compute determinant","None",mumps->id.ICNTL(33),&mumps->id.ICNTL(33),NULL);CHKERRQ(ierr);
845 
846   ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",mumps->id.CNTL(1),&mumps->id.CNTL(1),NULL);CHKERRQ(ierr);
847   ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",mumps->id.CNTL(2),&mumps->id.CNTL(2),NULL);CHKERRQ(ierr);
848   ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",mumps->id.CNTL(3),&mumps->id.CNTL(3),NULL);CHKERRQ(ierr);
849   ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",mumps->id.CNTL(4),&mumps->id.CNTL(4),NULL);CHKERRQ(ierr);
850   ierr = PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",mumps->id.CNTL(5),&mumps->id.CNTL(5),NULL);CHKERRQ(ierr);
851 
852   ierr = PetscOptionsString("-mat_mumps_ooc_tmpdir", "out of core directory", "None", mumps->id.ooc_tmpdir, mumps->id.ooc_tmpdir, 256, NULL);
853   PetscOptionsEnd();
854   PetscFunctionReturn(0);
855 }
856 
857 #undef __FUNCT__
858 #define __FUNCT__ "PetscInitializeMUMPS"
859 PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS *mumps)
860 {
861   PetscErrorCode ierr;
862 
863   PetscFunctionBegin;
864   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A), &mumps->myid);
865   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&mumps->size);CHKERRQ(ierr);
866   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(mumps->comm_mumps));CHKERRQ(ierr);
867 
868   mumps->id.comm_fortran = MPI_Comm_c2f(mumps->comm_mumps);
869 
870   mumps->id.job = JOB_INIT;
871   mumps->id.par = 1;  /* host participates factorizaton and solve */
872   mumps->id.sym = mumps->sym;
873   PetscMUMPS_c(&mumps->id);
874 
875   mumps->CleanUpMUMPS = PETSC_FALSE;
876   mumps->scat_rhs     = NULL;
877   mumps->scat_sol     = NULL;
878 
879   /* set PETSc-MUMPS default options - override MUMPS default */
880   mumps->id.ICNTL(3) = 0;
881   mumps->id.ICNTL(4) = 0;
882   if (mumps->size == 1) {
883     mumps->id.ICNTL(18) = 0;   /* centralized assembled matrix input */
884   } else {
885     mumps->id.ICNTL(18) = 3;   /* distributed assembled matrix input */
886     mumps->id.ICNTL(21) = 1;   /* distributed solution */
887   }
888   PetscFunctionReturn(0);
889 }
890 
891 /* Note Petsc r(=c) permutation is used when mumps->id.ICNTL(7)==1 with centralized assembled matrix input; otherwise r and c are ignored */
892 #undef __FUNCT__
893 #define __FUNCT__ "MatLUFactorSymbolic_AIJMUMPS"
894 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
895 {
896   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
897   PetscErrorCode ierr;
898   Vec            b;
899   IS             is_iden;
900   const PetscInt M = A->rmap->N;
901 
902   PetscFunctionBegin;
903   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
904 
905   /* Set MUMPS options from the options database */
906   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
907 
908   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
909 
910   /* analysis phase */
911   /*----------------*/
912   mumps->id.job = JOB_FACTSYMBOLIC;
913   mumps->id.n   = M;
914   switch (mumps->id.ICNTL(18)) {
915   case 0:  /* centralized assembled matrix input */
916     if (!mumps->myid) {
917       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
918       if (mumps->id.ICNTL(6)>1) {
919 #if defined(PETSC_USE_COMPLEX)
920 #if defined(PETSC_USE_REAL_SINGLE)
921         mumps->id.a = (mumps_complex*)mumps->val;
922 #else
923         mumps->id.a = (mumps_double_complex*)mumps->val;
924 #endif
925 #else
926         mumps->id.a = mumps->val;
927 #endif
928       }
929       if (mumps->id.ICNTL(7) == 1) { /* use user-provide matrix ordering - assuming r = c ordering */
930         /*
931         PetscBool      flag;
932         ierr = ISEqual(r,c,&flag);CHKERRQ(ierr);
933         if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"row_perm != col_perm");
934         ierr = ISView(r,PETSC_VIEWER_STDOUT_SELF);
935          */
936         if (!mumps->myid) {
937           const PetscInt *idx;
938           PetscInt       i,*perm_in;
939 
940           ierr = PetscMalloc1(M,&perm_in);CHKERRQ(ierr);
941           ierr = ISGetIndices(r,&idx);CHKERRQ(ierr);
942 
943           mumps->id.perm_in = perm_in;
944           for (i=0; i<M; i++) perm_in[i] = idx[i]+1; /* perm_in[]: start from 1, not 0! */
945           ierr = ISRestoreIndices(r,&idx);CHKERRQ(ierr);
946         }
947       }
948     }
949     break;
950   case 3:  /* distributed assembled matrix input (size>1) */
951     mumps->id.nz_loc = mumps->nz;
952     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
953     if (mumps->id.ICNTL(6)>1) {
954 #if defined(PETSC_USE_COMPLEX)
955 #if defined(PETSC_USE_REAL_SINGLE)
956       mumps->id.a_loc = (mumps_complex*)mumps->val;
957 #else
958       mumps->id.a_loc = (mumps_double_complex*)mumps->val;
959 #endif
960 #else
961       mumps->id.a_loc = mumps->val;
962 #endif
963     }
964     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
965     if (!mumps->myid) {
966       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
967       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
968     } else {
969       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
970       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
971     }
972     ierr = MatGetVecs(A,NULL,&b);CHKERRQ(ierr);
973     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
974     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
975     ierr = VecDestroy(&b);CHKERRQ(ierr);
976     break;
977   }
978   PetscMUMPS_c(&mumps->id);
979   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
980 
981   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
982   F->ops->solve           = MatSolve_MUMPS;
983   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
984   F->ops->matsolve        = 0;  /* use MatMatSolve_Basic() until mumps supports distributed rhs */
985   PetscFunctionReturn(0);
986 }
987 
988 /* Note the Petsc r and c permutations are ignored */
989 #undef __FUNCT__
990 #define __FUNCT__ "MatLUFactorSymbolic_BAIJMUMPS"
991 PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
992 {
993   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
994   PetscErrorCode ierr;
995   Vec            b;
996   IS             is_iden;
997   const PetscInt M = A->rmap->N;
998 
999   PetscFunctionBegin;
1000   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1001 
1002   /* Set MUMPS options from the options database */
1003   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1004 
1005   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1006 
1007   /* analysis phase */
1008   /*----------------*/
1009   mumps->id.job = JOB_FACTSYMBOLIC;
1010   mumps->id.n   = M;
1011   switch (mumps->id.ICNTL(18)) {
1012   case 0:  /* centralized assembled matrix input */
1013     if (!mumps->myid) {
1014       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1015       if (mumps->id.ICNTL(6)>1) {
1016 #if defined(PETSC_USE_COMPLEX)
1017 #if defined(PETSC_USE_REAL_SINGLE)
1018         mumps->id.a = (mumps_complex*)mumps->val;
1019 #else
1020         mumps->id.a = (mumps_double_complex*)mumps->val;
1021 #endif
1022 #else
1023         mumps->id.a = mumps->val;
1024 #endif
1025       }
1026     }
1027     break;
1028   case 3:  /* distributed assembled matrix input (size>1) */
1029     mumps->id.nz_loc = mumps->nz;
1030     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1031     if (mumps->id.ICNTL(6)>1) {
1032 #if defined(PETSC_USE_COMPLEX)
1033 #if defined(PETSC_USE_REAL_SINGLE)
1034       mumps->id.a_loc = (mumps_complex*)mumps->val;
1035 #else
1036       mumps->id.a_loc = (mumps_double_complex*)mumps->val;
1037 #endif
1038 #else
1039       mumps->id.a_loc = mumps->val;
1040 #endif
1041     }
1042     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1043     if (!mumps->myid) {
1044       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1045       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1046     } else {
1047       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1048       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1049     }
1050     ierr = MatGetVecs(A,NULL,&b);CHKERRQ(ierr);
1051     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1052     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1053     ierr = VecDestroy(&b);CHKERRQ(ierr);
1054     break;
1055   }
1056   PetscMUMPS_c(&mumps->id);
1057   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1058 
1059   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1060   F->ops->solve           = MatSolve_MUMPS;
1061   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
1062   PetscFunctionReturn(0);
1063 }
1064 
1065 /* Note the Petsc r permutation and factor info are ignored */
1066 #undef __FUNCT__
1067 #define __FUNCT__ "MatCholeskyFactorSymbolic_MUMPS"
1068 PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info)
1069 {
1070   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1071   PetscErrorCode ierr;
1072   Vec            b;
1073   IS             is_iden;
1074   const PetscInt M = A->rmap->N;
1075 
1076   PetscFunctionBegin;
1077   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1078 
1079   /* Set MUMPS options from the options database */
1080   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1081 
1082   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1083 
1084   /* analysis phase */
1085   /*----------------*/
1086   mumps->id.job = JOB_FACTSYMBOLIC;
1087   mumps->id.n   = M;
1088   switch (mumps->id.ICNTL(18)) {
1089   case 0:  /* centralized assembled matrix input */
1090     if (!mumps->myid) {
1091       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1092       if (mumps->id.ICNTL(6)>1) {
1093 #if defined(PETSC_USE_COMPLEX)
1094 #if defined(PETSC_USE_REAL_SINGLE)
1095         mumps->id.a = (mumps_complex*)mumps->val;
1096 #else
1097         mumps->id.a = (mumps_double_complex*)mumps->val;
1098 #endif
1099 #else
1100         mumps->id.a = mumps->val;
1101 #endif
1102       }
1103     }
1104     break;
1105   case 3:  /* distributed assembled matrix input (size>1) */
1106     mumps->id.nz_loc = mumps->nz;
1107     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1108     if (mumps->id.ICNTL(6)>1) {
1109 #if defined(PETSC_USE_COMPLEX)
1110 #if defined(PETSC_USE_REAL_SINGLE)
1111       mumps->id.a_loc = (mumps_complex*)mumps->val;
1112 #else
1113       mumps->id.a_loc = (mumps_double_complex*)mumps->val;
1114 #endif
1115 #else
1116       mumps->id.a_loc = mumps->val;
1117 #endif
1118     }
1119     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1120     if (!mumps->myid) {
1121       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1122       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1123     } else {
1124       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1125       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1126     }
1127     ierr = MatGetVecs(A,NULL,&b);CHKERRQ(ierr);
1128     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1129     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1130     ierr = VecDestroy(&b);CHKERRQ(ierr);
1131     break;
1132   }
1133   PetscMUMPS_c(&mumps->id);
1134   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1135 
1136   F->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS;
1137   F->ops->solve                 = MatSolve_MUMPS;
1138   F->ops->solvetranspose        = MatSolve_MUMPS;
1139   F->ops->matsolve              = 0; /* use MatMatSolve_Basic() until mumps supports distributed rhs */
1140 #if !defined(PETSC_USE_COMPLEX)
1141   F->ops->getinertia = MatGetInertia_SBAIJMUMPS;
1142 #else
1143   F->ops->getinertia = NULL;
1144 #endif
1145   PetscFunctionReturn(0);
1146 }
1147 
1148 #undef __FUNCT__
1149 #define __FUNCT__ "MatView_MUMPS"
1150 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer)
1151 {
1152   PetscErrorCode    ierr;
1153   PetscBool         iascii;
1154   PetscViewerFormat format;
1155   Mat_MUMPS         *mumps=(Mat_MUMPS*)A->spptr;
1156 
1157   PetscFunctionBegin;
1158   /* check if matrix is mumps type */
1159   if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0);
1160 
1161   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1162   if (iascii) {
1163     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1164     if (format == PETSC_VIEWER_ASCII_INFO) {
1165       ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr);
1166       ierr = PetscViewerASCIIPrintf(viewer,"  SYM (matrix type):                   %d \n",mumps->id.sym);CHKERRQ(ierr);
1167       ierr = PetscViewerASCIIPrintf(viewer,"  PAR (host participation):            %d \n",mumps->id.par);CHKERRQ(ierr);
1168       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(1) (output for error):         %d \n",mumps->id.ICNTL(1));CHKERRQ(ierr);
1169       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(2) (output of diagnostic msg): %d \n",mumps->id.ICNTL(2));CHKERRQ(ierr);
1170       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(3) (output for global info):   %d \n",mumps->id.ICNTL(3));CHKERRQ(ierr);
1171       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(4) (level of printing):        %d \n",mumps->id.ICNTL(4));CHKERRQ(ierr);
1172       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(5) (input mat struct):         %d \n",mumps->id.ICNTL(5));CHKERRQ(ierr);
1173       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(6) (matrix prescaling):        %d \n",mumps->id.ICNTL(6));CHKERRQ(ierr);
1174       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(7) (sequentia matrix ordering):%d \n",mumps->id.ICNTL(7));CHKERRQ(ierr);
1175       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(8) (scalling strategy):        %d \n",mumps->id.ICNTL(8));CHKERRQ(ierr);
1176       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(10) (max num of refinements):  %d \n",mumps->id.ICNTL(10));CHKERRQ(ierr);
1177       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(11) (error analysis):          %d \n",mumps->id.ICNTL(11));CHKERRQ(ierr);
1178       if (mumps->id.ICNTL(11)>0) {
1179         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(4) (inf norm of input mat):        %g\n",mumps->id.RINFOG(4));CHKERRQ(ierr);
1180         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(5) (inf norm of solution):         %g\n",mumps->id.RINFOG(5));CHKERRQ(ierr);
1181         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(6) (inf norm of residual):         %g\n",mumps->id.RINFOG(6));CHKERRQ(ierr);
1182         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",mumps->id.RINFOG(7),mumps->id.RINFOG(8));CHKERRQ(ierr);
1183         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(9) (error estimate):               %g \n",mumps->id.RINFOG(9));CHKERRQ(ierr);
1184         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",mumps->id.RINFOG(10),mumps->id.RINFOG(11));CHKERRQ(ierr);
1185       }
1186       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(12) (efficiency control):                         %d \n",mumps->id.ICNTL(12));CHKERRQ(ierr);
1187       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(13) (efficiency control):                         %d \n",mumps->id.ICNTL(13));CHKERRQ(ierr);
1188       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(14) (percentage of estimated workspace increase): %d \n",mumps->id.ICNTL(14));CHKERRQ(ierr);
1189       /* ICNTL(15-17) not used */
1190       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(18) (input mat struct):                           %d \n",mumps->id.ICNTL(18));CHKERRQ(ierr);
1191       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(19) (Shur complement info):                       %d \n",mumps->id.ICNTL(19));CHKERRQ(ierr);
1192       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(20) (rhs sparse pattern):                         %d \n",mumps->id.ICNTL(20));CHKERRQ(ierr);
1193       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(21) (somumpstion struct):                            %d \n",mumps->id.ICNTL(21));CHKERRQ(ierr);
1194       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(22) (in-core/out-of-core facility):               %d \n",mumps->id.ICNTL(22));CHKERRQ(ierr);
1195       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(23) (max size of memory can be allocated locally):%d \n",mumps->id.ICNTL(23));CHKERRQ(ierr);
1196 
1197       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(24) (detection of null pivot rows):               %d \n",mumps->id.ICNTL(24));CHKERRQ(ierr);
1198       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(25) (computation of a null space basis):          %d \n",mumps->id.ICNTL(25));CHKERRQ(ierr);
1199       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(26) (Schur options for rhs or solution):          %d \n",mumps->id.ICNTL(26));CHKERRQ(ierr);
1200       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(27) (experimental parameter):                     %d \n",mumps->id.ICNTL(27));CHKERRQ(ierr);
1201       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(28) (use parallel or sequential ordering):        %d \n",mumps->id.ICNTL(28));CHKERRQ(ierr);
1202       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(29) (parallel ordering):                          %d \n",mumps->id.ICNTL(29));CHKERRQ(ierr);
1203 
1204       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(30) (user-specified set of entries in inv(A)):    %d \n",mumps->id.ICNTL(30));CHKERRQ(ierr);
1205       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(31) (factors is discarded in the solve phase):    %d \n",mumps->id.ICNTL(31));CHKERRQ(ierr);
1206       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(33) (compute determinant):                        %d \n",mumps->id.ICNTL(33));CHKERRQ(ierr);
1207 
1208       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(1) (relative pivoting threshold):      %g \n",mumps->id.CNTL(1));CHKERRQ(ierr);
1209       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(2) (stopping criterion of refinement): %g \n",mumps->id.CNTL(2));CHKERRQ(ierr);
1210       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(3) (absomumpste pivoting threshold):      %g \n",mumps->id.CNTL(3));CHKERRQ(ierr);
1211       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(4) (vamumpse of static pivoting):         %g \n",mumps->id.CNTL(4));CHKERRQ(ierr);
1212       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(5) (fixation for null pivots):         %g \n",mumps->id.CNTL(5));CHKERRQ(ierr);
1213 
1214       /* infomation local to each processor */
1215       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr);
1216       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
1217       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %g \n",mumps->myid,mumps->id.RINFO(1));CHKERRQ(ierr);
1218       ierr = PetscViewerFlush(viewer);
1219       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr);
1220       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(2));CHKERRQ(ierr);
1221       ierr = PetscViewerFlush(viewer);
1222       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr);
1223       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(3));CHKERRQ(ierr);
1224       ierr = PetscViewerFlush(viewer);
1225 
1226       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr);
1227       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] %d \n",mumps->myid,mumps->id.INFO(15));CHKERRQ(ierr);
1228       ierr = PetscViewerFlush(viewer);
1229 
1230       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr);
1231       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(16));CHKERRQ(ierr);
1232       ierr = PetscViewerFlush(viewer);
1233 
1234       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr);
1235       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(23));CHKERRQ(ierr);
1236       ierr = PetscViewerFlush(viewer);
1237       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
1238 
1239       if (!mumps->myid) { /* information from the host */
1240         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",mumps->id.RINFOG(1));CHKERRQ(ierr);
1241         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",mumps->id.RINFOG(2));CHKERRQ(ierr);
1242         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",mumps->id.RINFOG(3));CHKERRQ(ierr);
1243         ierr = PetscViewerASCIIPrintf(viewer,"  (RINFOG(12) RINFOG(13))*2^INFOG(34) (determinant): (%g,%g)*(2^%d)\n",mumps->id.RINFOG(12),mumps->id.RINFOG(13),mumps->id.INFOG(34));CHKERRQ(ierr);
1244 
1245         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(3));CHKERRQ(ierr);
1246         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(4));CHKERRQ(ierr);
1247         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(5) (estimated maximum front size in the complete tree): %d \n",mumps->id.INFOG(5));CHKERRQ(ierr);
1248         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(6) (number of nodes in the complete tree): %d \n",mumps->id.INFOG(6));CHKERRQ(ierr);
1249         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(7) (ordering option effectively use after analysis): %d \n",mumps->id.INFOG(7));CHKERRQ(ierr);
1250         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",mumps->id.INFOG(8));CHKERRQ(ierr);
1251         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",mumps->id.INFOG(9));CHKERRQ(ierr);
1252         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(10) (total integer space store the matrix factors after factorization): %d \n",mumps->id.INFOG(10));CHKERRQ(ierr);
1253         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(11) (order of largest frontal matrix after factorization): %d \n",mumps->id.INFOG(11));CHKERRQ(ierr);
1254         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(12) (number of off-diagonal pivots): %d \n",mumps->id.INFOG(12));CHKERRQ(ierr);
1255         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(13) (number of delayed pivots after factorization): %d \n",mumps->id.INFOG(13));CHKERRQ(ierr);
1256         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(14) (number of memory compress after factorization): %d \n",mumps->id.INFOG(14));CHKERRQ(ierr);
1257         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(15) (number of steps of iterative refinement after solution): %d \n",mumps->id.INFOG(15));CHKERRQ(ierr);
1258         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",mumps->id.INFOG(16));CHKERRQ(ierr);
1259         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",mumps->id.INFOG(17));CHKERRQ(ierr);
1260         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",mumps->id.INFOG(18));CHKERRQ(ierr);
1261         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",mumps->id.INFOG(19));CHKERRQ(ierr);
1262         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(20) (estimated number of entries in the factors): %d \n",mumps->id.INFOG(20));CHKERRQ(ierr);
1263         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",mumps->id.INFOG(21));CHKERRQ(ierr);
1264         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",mumps->id.INFOG(22));CHKERRQ(ierr);
1265         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",mumps->id.INFOG(23));CHKERRQ(ierr);
1266         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",mumps->id.INFOG(24));CHKERRQ(ierr);
1267         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",mumps->id.INFOG(25));CHKERRQ(ierr);
1268         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(28) (after factorization: number of null pivots encountered): %d\n",mumps->id.INFOG(28));CHKERRQ(ierr);
1269         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(29) (after factorization: effective number of entries in the factors (sum over all processors)): %d\n",mumps->id.INFOG(29));CHKERRQ(ierr);
1270         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(30, 31) (after solution: size in Mbytes of memory used during solution phase): %d, %d\n",mumps->id.INFOG(30),mumps->id.INFOG(31));CHKERRQ(ierr);
1271         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(32) (after analysis: type of analysis done): %d\n",mumps->id.INFOG(32));CHKERRQ(ierr);
1272         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(33) (value used for ICNTL(8)): %d\n",mumps->id.INFOG(33));CHKERRQ(ierr);
1273         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(34) (exponent of the determinant if determinant is requested): %d\n",mumps->id.INFOG(34));CHKERRQ(ierr);
1274       }
1275     }
1276   }
1277   PetscFunctionReturn(0);
1278 }
1279 
1280 #undef __FUNCT__
1281 #define __FUNCT__ "MatGetInfo_MUMPS"
1282 PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info)
1283 {
1284   Mat_MUMPS *mumps =(Mat_MUMPS*)A->spptr;
1285 
1286   PetscFunctionBegin;
1287   info->block_size        = 1.0;
1288   info->nz_allocated      = mumps->id.INFOG(20);
1289   info->nz_used           = mumps->id.INFOG(20);
1290   info->nz_unneeded       = 0.0;
1291   info->assemblies        = 0.0;
1292   info->mallocs           = 0.0;
1293   info->memory            = 0.0;
1294   info->fill_ratio_given  = 0;
1295   info->fill_ratio_needed = 0;
1296   info->factor_mallocs    = 0;
1297   PetscFunctionReturn(0);
1298 }
1299 
1300 /* -------------------------------------------------------------------------------------------*/
1301 #undef __FUNCT__
1302 #define __FUNCT__ "MatMumpsSetIcntl_MUMPS"
1303 PetscErrorCode MatMumpsSetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt ival)
1304 {
1305   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1306 
1307   PetscFunctionBegin;
1308   mumps->id.ICNTL(icntl) = ival;
1309   PetscFunctionReturn(0);
1310 }
1311 
1312 #undef __FUNCT__
1313 #define __FUNCT__ "MatMumpsGetIcntl_MUMPS"
1314 PetscErrorCode MatMumpsGetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt *ival)
1315 {
1316   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1317 
1318   PetscFunctionBegin;
1319   *ival = mumps->id.ICNTL(icntl);
1320   PetscFunctionReturn(0);
1321 }
1322 
1323 #undef __FUNCT__
1324 #define __FUNCT__ "MatMumpsSetIcntl"
1325 /*@
1326   MatMumpsSetIcntl - Set MUMPS parameter ICNTL()
1327 
1328    Logically Collective on Mat
1329 
1330    Input Parameters:
1331 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1332 .  icntl - index of MUMPS parameter array ICNTL()
1333 -  ival - value of MUMPS ICNTL(icntl)
1334 
1335   Options Database:
1336 .   -mat_mumps_icntl_<icntl> <ival>
1337 
1338    Level: beginner
1339 
1340    References: MUMPS Users' Guide
1341 
1342 .seealso: MatGetFactor()
1343 @*/
1344 PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt icntl,PetscInt ival)
1345 {
1346   PetscErrorCode ierr;
1347 
1348   PetscFunctionBegin;
1349   PetscValidLogicalCollectiveInt(F,icntl,2);
1350   PetscValidLogicalCollectiveInt(F,ival,3);
1351   ierr = PetscTryMethod(F,"MatMumpsSetIcntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));CHKERRQ(ierr);
1352   PetscFunctionReturn(0);
1353 }
1354 
1355 #undef __FUNCT__
1356 #define __FUNCT__ "MatMumpsGetIcntl"
1357 /*@
1358   MatMumpsGetIcntl - Get MUMPS parameter ICNTL()
1359 
1360    Logically Collective on Mat
1361 
1362    Input Parameters:
1363 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1364 -  icntl - index of MUMPS parameter array ICNTL()
1365 
1366   Output Parameter:
1367 .  ival - value of MUMPS ICNTL(icntl)
1368 
1369    Level: beginner
1370 
1371    References: MUMPS Users' Guide
1372 
1373 .seealso: MatGetFactor()
1374 @*/
1375 PetscErrorCode MatMumpsGetIcntl(Mat F,PetscInt icntl,PetscInt *ival)
1376 {
1377   PetscErrorCode ierr;
1378 
1379   PetscFunctionBegin;
1380   PetscValidLogicalCollectiveInt(F,icntl,2);
1381   PetscValidIntPointer(ival,3);
1382   ierr = PetscTryMethod(F,"MatMumpsGetIcntl_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
1383   PetscFunctionReturn(0);
1384 }
1385 
1386 /* -------------------------------------------------------------------------------------------*/
1387 #undef __FUNCT__
1388 #define __FUNCT__ "MatMumpsSetCntl_MUMPS"
1389 PetscErrorCode MatMumpsSetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal val)
1390 {
1391   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1392 
1393   PetscFunctionBegin;
1394   mumps->id.CNTL(icntl) = val;
1395   PetscFunctionReturn(0);
1396 }
1397 
1398 #undef __FUNCT__
1399 #define __FUNCT__ "MatMumpsGetCntl_MUMPS"
1400 PetscErrorCode MatMumpsGetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal *val)
1401 {
1402   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1403 
1404   PetscFunctionBegin;
1405   *val = mumps->id.CNTL(icntl);
1406   PetscFunctionReturn(0);
1407 }
1408 
1409 #undef __FUNCT__
1410 #define __FUNCT__ "MatMumpsSetCntl"
1411 /*@
1412   MatMumpsSetCntl - Set MUMPS parameter CNTL()
1413 
1414    Logically Collective on Mat
1415 
1416    Input Parameters:
1417 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1418 .  icntl - index of MUMPS parameter array CNTL()
1419 -  val - value of MUMPS CNTL(icntl)
1420 
1421   Options Database:
1422 .   -mat_mumps_cntl_<icntl> <val>
1423 
1424    Level: beginner
1425 
1426    References: MUMPS Users' Guide
1427 
1428 .seealso: MatGetFactor()
1429 @*/
1430 PetscErrorCode MatMumpsSetCntl(Mat F,PetscInt icntl,PetscReal val)
1431 {
1432   PetscErrorCode ierr;
1433 
1434   PetscFunctionBegin;
1435   PetscValidLogicalCollectiveInt(F,icntl,2);
1436   PetscValidLogicalCollectiveReal(F,val,3);
1437   ierr = PetscTryMethod(F,"MatMumpsSetCntl_C",(Mat,PetscInt,PetscReal),(F,icntl,val));CHKERRQ(ierr);
1438   PetscFunctionReturn(0);
1439 }
1440 
1441 #undef __FUNCT__
1442 #define __FUNCT__ "MatMumpsGetCntl"
1443 /*@
1444   MatMumpsGetCntl - Get MUMPS parameter CNTL()
1445 
1446    Logically Collective on Mat
1447 
1448    Input Parameters:
1449 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1450 -  icntl - index of MUMPS parameter array CNTL()
1451 
1452   Output Parameter:
1453 .  val - value of MUMPS CNTL(icntl)
1454 
1455    Level: beginner
1456 
1457    References: MUMPS Users' Guide
1458 
1459 .seealso: MatGetFactor()
1460 @*/
1461 PetscErrorCode MatMumpsGetCntl(Mat F,PetscInt icntl,PetscReal *val)
1462 {
1463   PetscErrorCode ierr;
1464 
1465   PetscFunctionBegin;
1466   PetscValidLogicalCollectiveInt(F,icntl,2);
1467   PetscValidRealPointer(val,3);
1468   ierr = PetscTryMethod(F,"MatMumpsGetCntl_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
1469   PetscFunctionReturn(0);
1470 }
1471 
1472 #undef __FUNCT__
1473 #define __FUNCT__ "MatMumpsGetInfo_MUMPS"
1474 PetscErrorCode MatMumpsGetInfo_MUMPS(Mat F,PetscInt icntl,PetscInt *info)
1475 {
1476   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1477 
1478   PetscFunctionBegin;
1479   *info = mumps->id.INFO(icntl);
1480   PetscFunctionReturn(0);
1481 }
1482 
1483 #undef __FUNCT__
1484 #define __FUNCT__ "MatMumpsGetInfog_MUMPS"
1485 PetscErrorCode MatMumpsGetInfog_MUMPS(Mat F,PetscInt icntl,PetscInt *infog)
1486 {
1487   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1488 
1489   PetscFunctionBegin;
1490   *infog = mumps->id.INFOG(icntl);
1491   PetscFunctionReturn(0);
1492 }
1493 
1494 #undef __FUNCT__
1495 #define __FUNCT__ "MatMumpsGetRinfo_MUMPS"
1496 PetscErrorCode MatMumpsGetRinfo_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfo)
1497 {
1498   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1499 
1500   PetscFunctionBegin;
1501   *rinfo = mumps->id.RINFO(icntl);
1502   PetscFunctionReturn(0);
1503 }
1504 
1505 #undef __FUNCT__
1506 #define __FUNCT__ "MatMumpsGetRinfog_MUMPS"
1507 PetscErrorCode MatMumpsGetRinfog_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfog)
1508 {
1509   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1510 
1511   PetscFunctionBegin;
1512   *rinfog = mumps->id.RINFOG(icntl);
1513   PetscFunctionReturn(0);
1514 }
1515 
1516 #undef __FUNCT__
1517 #define __FUNCT__ "MatMumpsGetInfo"
1518 /*@
1519   MatMumpsGetInfo - Get MUMPS parameter INFO()
1520 
1521    Logically Collective on Mat
1522 
1523    Input Parameters:
1524 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1525 -  icntl - index of MUMPS parameter array INFO()
1526 
1527   Output Parameter:
1528 .  ival - value of MUMPS INFO(icntl)
1529 
1530    Level: beginner
1531 
1532    References: MUMPS Users' Guide
1533 
1534 .seealso: MatGetFactor()
1535 @*/
1536 PetscErrorCode MatMumpsGetInfo(Mat F,PetscInt icntl,PetscInt *ival)
1537 {
1538   PetscErrorCode ierr;
1539 
1540   PetscFunctionBegin;
1541   PetscValidIntPointer(ival,3);
1542   ierr = PetscTryMethod(F,"MatMumpsGetInfo_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
1543   PetscFunctionReturn(0);
1544 }
1545 
1546 #undef __FUNCT__
1547 #define __FUNCT__ "MatMumpsGetInfog"
1548 /*@
1549   MatMumpsGetInfog - Get MUMPS parameter INFOG()
1550 
1551    Logically Collective on Mat
1552 
1553    Input Parameters:
1554 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1555 -  icntl - index of MUMPS parameter array INFOG()
1556 
1557   Output Parameter:
1558 .  ival - value of MUMPS INFOG(icntl)
1559 
1560    Level: beginner
1561 
1562    References: MUMPS Users' Guide
1563 
1564 .seealso: MatGetFactor()
1565 @*/
1566 PetscErrorCode MatMumpsGetInfog(Mat F,PetscInt icntl,PetscInt *ival)
1567 {
1568   PetscErrorCode ierr;
1569 
1570   PetscFunctionBegin;
1571   PetscValidIntPointer(ival,3);
1572   ierr = PetscTryMethod(F,"MatMumpsGetInfog_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
1573   PetscFunctionReturn(0);
1574 }
1575 
1576 #undef __FUNCT__
1577 #define __FUNCT__ "MatMumpsGetRinfo"
1578 /*@
1579   MatMumpsGetRinfo - Get MUMPS parameter RINFO()
1580 
1581    Logically Collective on Mat
1582 
1583    Input Parameters:
1584 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1585 -  icntl - index of MUMPS parameter array RINFO()
1586 
1587   Output Parameter:
1588 .  val - value of MUMPS RINFO(icntl)
1589 
1590    Level: beginner
1591 
1592    References: MUMPS Users' Guide
1593 
1594 .seealso: MatGetFactor()
1595 @*/
1596 PetscErrorCode MatMumpsGetRinfo(Mat F,PetscInt icntl,PetscReal *val)
1597 {
1598   PetscErrorCode ierr;
1599 
1600   PetscFunctionBegin;
1601   PetscValidRealPointer(val,3);
1602   ierr = PetscTryMethod(F,"MatMumpsGetRinfo_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
1603   PetscFunctionReturn(0);
1604 }
1605 
1606 #undef __FUNCT__
1607 #define __FUNCT__ "MatMumpsGetRinfog"
1608 /*@
1609   MatMumpsGetRinfog - Get MUMPS parameter RINFOG()
1610 
1611    Logically Collective on Mat
1612 
1613    Input Parameters:
1614 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1615 -  icntl - index of MUMPS parameter array RINFOG()
1616 
1617   Output Parameter:
1618 .  val - value of MUMPS RINFOG(icntl)
1619 
1620    Level: beginner
1621 
1622    References: MUMPS Users' Guide
1623 
1624 .seealso: MatGetFactor()
1625 @*/
1626 PetscErrorCode MatMumpsGetRinfog(Mat F,PetscInt icntl,PetscReal *val)
1627 {
1628   PetscErrorCode ierr;
1629 
1630   PetscFunctionBegin;
1631   PetscValidRealPointer(val,3);
1632   ierr = PetscTryMethod(F,"MatMumpsGetRinfog_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
1633   PetscFunctionReturn(0);
1634 }
1635 
1636 /*MC
1637   MATSOLVERMUMPS -  A matrix type providing direct solvers (LU and Cholesky) for
1638   distributed and sequential matrices via the external package MUMPS.
1639 
1640   Works with MATAIJ and MATSBAIJ matrices
1641 
1642   Options Database Keys:
1643 + -mat_mumps_icntl_4 <0,...,4> - print level
1644 . -mat_mumps_icntl_6 <0,...,7> - matrix prescaling options (see MUMPS User's Guide)
1645 . -mat_mumps_icntl_7 <0,...,7> - matrix orderings (see MUMPS User's Guidec)
1646 . -mat_mumps_icntl_9 <1,2> - A or A^T x=b to be solved: 1 denotes A, 2 denotes A^T
1647 . -mat_mumps_icntl_10 <n> - maximum number of iterative refinements
1648 . -mat_mumps_icntl_11 <n> - error analysis, a positive value returns statistics during -ksp_view
1649 . -mat_mumps_icntl_12 <n> - efficiency control (see MUMPS User's Guide)
1650 . -mat_mumps_icntl_13 <n> - efficiency control (see MUMPS User's Guide)
1651 . -mat_mumps_icntl_14 <n> - efficiency control (see MUMPS User's Guide)
1652 . -mat_mumps_icntl_15 <n> - efficiency control (see MUMPS User's Guide)
1653 . -mat_mumps_cntl_1 <delta> - relative pivoting threshold
1654 . -mat_mumps_cntl_2 <tol> - stopping criterion for refinement
1655 - -mat_mumps_cntl_3 <adelta> - absolute pivoting threshold
1656 
1657   Level: beginner
1658 
1659 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
1660 
1661 M*/
1662 
1663 #undef __FUNCT__
1664 #define __FUNCT__ "MatFactorGetSolverPackage_mumps"
1665 static PetscErrorCode MatFactorGetSolverPackage_mumps(Mat A,const MatSolverPackage *type)
1666 {
1667   PetscFunctionBegin;
1668   *type = MATSOLVERMUMPS;
1669   PetscFunctionReturn(0);
1670 }
1671 
1672 /* MatGetFactor for Seq and MPI AIJ matrices */
1673 #undef __FUNCT__
1674 #define __FUNCT__ "MatGetFactor_aij_mumps"
1675 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F)
1676 {
1677   Mat            B;
1678   PetscErrorCode ierr;
1679   Mat_MUMPS      *mumps;
1680   PetscBool      isSeqAIJ;
1681 
1682   PetscFunctionBegin;
1683   /* Create the factorization matrix */
1684   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr);
1685   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1686   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1687   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1688   if (isSeqAIJ) {
1689     ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
1690   } else {
1691     ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
1692   }
1693 
1694   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
1695 
1696   B->ops->view    = MatView_MUMPS;
1697   B->ops->getinfo = MatGetInfo_MUMPS;
1698 
1699   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1700   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
1701   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
1702   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
1703   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
1704 
1705   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
1706   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
1707   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
1708   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
1709   if (ftype == MAT_FACTOR_LU) {
1710     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
1711     B->factortype            = MAT_FACTOR_LU;
1712     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij;
1713     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij;
1714     mumps->sym = 0;
1715   } else {
1716     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
1717     B->factortype                  = MAT_FACTOR_CHOLESKY;
1718     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij;
1719     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij;
1720     if (A->spd_set && A->spd) mumps->sym = 1;
1721     else                      mumps->sym = 2;
1722   }
1723 
1724   mumps->isAIJ    = PETSC_TRUE;
1725   mumps->Destroy  = B->ops->destroy;
1726   B->ops->destroy = MatDestroy_MUMPS;
1727   B->spptr        = (void*)mumps;
1728 
1729   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
1730 
1731   *F = B;
1732   PetscFunctionReturn(0);
1733 }
1734 
1735 /* MatGetFactor for Seq and MPI SBAIJ matrices */
1736 #undef __FUNCT__
1737 #define __FUNCT__ "MatGetFactor_sbaij_mumps"
1738 PETSC_EXTERN PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
1739 {
1740   Mat            B;
1741   PetscErrorCode ierr;
1742   Mat_MUMPS      *mumps;
1743   PetscBool      isSeqSBAIJ;
1744 
1745   PetscFunctionBegin;
1746   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
1747   if (A->rmap->bs > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead");
1748   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr);
1749   /* Create the factorization matrix */
1750   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1751   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1752   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1753   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
1754   if (isSeqSBAIJ) {
1755     ierr = MatSeqSBAIJSetPreallocation(B,1,0,NULL);CHKERRQ(ierr);
1756 
1757     mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij;
1758   } else {
1759     ierr = MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);CHKERRQ(ierr);
1760 
1761     mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij;
1762   }
1763 
1764   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
1765   B->ops->view                   = MatView_MUMPS;
1766 
1767   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1768   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
1769   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
1770   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
1771   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
1772 
1773   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
1774   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
1775   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
1776   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
1777 
1778   B->factortype = MAT_FACTOR_CHOLESKY;
1779   if (A->spd_set && A->spd) mumps->sym = 1;
1780   else                      mumps->sym = 2;
1781 
1782   mumps->isAIJ    = PETSC_FALSE;
1783   mumps->Destroy  = B->ops->destroy;
1784   B->ops->destroy = MatDestroy_MUMPS;
1785   B->spptr        = (void*)mumps;
1786 
1787   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
1788 
1789   *F = B;
1790   PetscFunctionReturn(0);
1791 }
1792 
1793 #undef __FUNCT__
1794 #define __FUNCT__ "MatGetFactor_baij_mumps"
1795 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F)
1796 {
1797   Mat            B;
1798   PetscErrorCode ierr;
1799   Mat_MUMPS      *mumps;
1800   PetscBool      isSeqBAIJ;
1801 
1802   PetscFunctionBegin;
1803   /* Create the factorization matrix */
1804   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);CHKERRQ(ierr);
1805   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1806   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1807   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1808   if (isSeqBAIJ) {
1809     ierr = MatSeqBAIJSetPreallocation(B,A->rmap->bs,0,NULL);CHKERRQ(ierr);
1810   } else {
1811     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
1812   }
1813 
1814   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
1815   if (ftype == MAT_FACTOR_LU) {
1816     B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS;
1817     B->factortype            = MAT_FACTOR_LU;
1818     if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij;
1819     else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij;
1820     mumps->sym = 0;
1821   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n");
1822 
1823   B->ops->view = MatView_MUMPS;
1824 
1825   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1826   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
1827   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
1828   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
1829   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
1830 
1831   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
1832   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
1833   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
1834   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
1835 
1836   mumps->isAIJ    = PETSC_TRUE;
1837   mumps->Destroy  = B->ops->destroy;
1838   B->ops->destroy = MatDestroy_MUMPS;
1839   B->spptr        = (void*)mumps;
1840 
1841   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
1842 
1843   *F = B;
1844   PetscFunctionReturn(0);
1845 }
1846