xref: /petsc/src/mat/impls/aij/mpi/mumps/mumps.c (revision d5574798c8464fdb92aede3bf97f4457bb71bbdc)
1 
2 /*
3     Provides an interface to the MUMPS sparse solver
4 */
5 
6 #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I  "petscmat.h"  I*/
7 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
8 
9 EXTERN_C_BEGIN
10 #if defined(PETSC_USE_COMPLEX)
11 #if defined(PETSC_USE_REAL_SINGLE)
12 #include <cmumps_c.h>
13 #else
14 #include <zmumps_c.h>
15 #endif
16 #else
17 #if defined(PETSC_USE_REAL_SINGLE)
18 #include <smumps_c.h>
19 #else
20 #include <dmumps_c.h>
21 #endif
22 #endif
23 EXTERN_C_END
24 #define JOB_INIT -1
25 #define JOB_FACTSYMBOLIC 1
26 #define JOB_FACTNUMERIC 2
27 #define JOB_SOLVE 3
28 #define JOB_END -2
29 
30 /* calls to MUMPS */
31 #if defined(PETSC_USE_COMPLEX)
32 #if defined(PETSC_USE_REAL_SINGLE)
33 #define PetscMUMPS_c cmumps_c
34 #else
35 #define PetscMUMPS_c zmumps_c
36 #endif
37 #else
38 #if defined(PETSC_USE_REAL_SINGLE)
39 #define PetscMUMPS_c smumps_c
40 #else
41 #define PetscMUMPS_c dmumps_c
42 #endif
43 #endif
44 
45 /* declare MumpsScalar */
46 #if defined(PETSC_USE_COMPLEX)
47 #if defined(PETSC_USE_REAL_SINGLE)
48 #define MumpsScalar mumps_complex
49 #else
50 #define MumpsScalar mumps_double_complex
51 #endif
52 #else
53 #define MumpsScalar PetscScalar
54 #endif
55 
56 /* macros s.t. indices match MUMPS documentation */
57 #define ICNTL(I) icntl[(I)-1]
58 #define CNTL(I) cntl[(I)-1]
59 #define INFOG(I) infog[(I)-1]
60 #define INFO(I) info[(I)-1]
61 #define RINFOG(I) rinfog[(I)-1]
62 #define RINFO(I) rinfo[(I)-1]
63 
64 typedef struct {
65 #if defined(PETSC_USE_COMPLEX)
66 #if defined(PETSC_USE_REAL_SINGLE)
67   CMUMPS_STRUC_C id;
68 #else
69   ZMUMPS_STRUC_C id;
70 #endif
71 #else
72 #if defined(PETSC_USE_REAL_SINGLE)
73   SMUMPS_STRUC_C id;
74 #else
75   DMUMPS_STRUC_C id;
76 #endif
77 #endif
78 
79   MatStructure matstruc;
80   PetscMPIInt  myid,size;
81   PetscInt     *irn,*jcn,nz,sym;
82   PetscScalar  *val;
83   MPI_Comm     comm_mumps;
84   PetscBool    isAIJ,CleanUpMUMPS;
85   PetscInt     ICNTL9_pre;           /* check if ICNTL(9) is changed from previous MatSolve */
86   VecScatter   scat_rhs, scat_sol;   /* used by MatSolve() */
87   Vec          b_seq,x_seq;
88   PetscInt     ninfo,*info;          /* display INFO */
89 
90   PetscErrorCode (*Destroy)(Mat);
91   PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**);
92 } Mat_MUMPS;
93 
94 extern PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*);
95 
96 /*
97   MatConvertToTriples_A_B - convert Petsc matrix to triples: row[nz], col[nz], val[nz]
98 
99   input:
100     A       - matrix in aij,baij or sbaij (bs=1) format
101     shift   - 0: C style output triple; 1: Fortran style output triple.
102     reuse   - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple
103               MAT_REUSE_MATRIX:   only the values in v array are updated
104   output:
105     nnz     - dim of r, c, and v (number of local nonzero entries of A)
106     r, c, v - row and col index, matrix values (matrix triples)
107 
108   The returned values r, c, and sometimes v are obtained in a single PetscMalloc(). Then in MatDestroy_MUMPS() it is
109   freed with PetscFree((mumps->irn);  This is not ideal code, the fact that v is ONLY sometimes part of mumps->irn means
110   that the PetscMalloc() cannot easily be replaced with a PetscMalloc3().
111 
112  */
113 
114 #undef __FUNCT__
115 #define __FUNCT__ "MatConvertToTriples_seqaij_seqaij"
116 PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
117 {
118   const PetscInt *ai,*aj,*ajj,M=A->rmap->n;
119   PetscInt       nz,rnz,i,j;
120   PetscErrorCode ierr;
121   PetscInt       *row,*col;
122   Mat_SeqAIJ     *aa=(Mat_SeqAIJ*)A->data;
123 
124   PetscFunctionBegin;
125   *v=aa->a;
126   if (reuse == MAT_INITIAL_MATRIX) {
127     nz   = aa->nz;
128     ai   = aa->i;
129     aj   = aa->j;
130     *nnz = nz;
131     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
132     col  = row + nz;
133 
134     nz = 0;
135     for (i=0; i<M; i++) {
136       rnz = ai[i+1] - ai[i];
137       ajj = aj + ai[i];
138       for (j=0; j<rnz; j++) {
139         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
140       }
141     }
142     *r = row; *c = col;
143   }
144   PetscFunctionReturn(0);
145 }
146 
147 #undef __FUNCT__
148 #define __FUNCT__ "MatConvertToTriples_seqbaij_seqaij"
149 PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
150 {
151   Mat_SeqBAIJ    *aa=(Mat_SeqBAIJ*)A->data;
152   const PetscInt *ai,*aj,*ajj,bs2 = aa->bs2;
153   PetscInt       bs,M,nz,idx=0,rnz,i,j,k,m;
154   PetscErrorCode ierr;
155   PetscInt       *row,*col;
156 
157   PetscFunctionBegin;
158   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
159   M = A->rmap->N/bs;
160   *v = aa->a;
161   if (reuse == MAT_INITIAL_MATRIX) {
162     ai   = aa->i; aj = aa->j;
163     nz   = bs2*aa->nz;
164     *nnz = nz;
165     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
166     col  = row + nz;
167 
168     for (i=0; i<M; i++) {
169       ajj = aj + ai[i];
170       rnz = ai[i+1] - ai[i];
171       for (k=0; k<rnz; k++) {
172         for (j=0; j<bs; j++) {
173           for (m=0; m<bs; m++) {
174             row[idx]   = i*bs + m + shift;
175             col[idx++] = bs*(ajj[k]) + j + shift;
176           }
177         }
178       }
179     }
180     *r = row; *c = col;
181   }
182   PetscFunctionReturn(0);
183 }
184 
185 #undef __FUNCT__
186 #define __FUNCT__ "MatConvertToTriples_seqsbaij_seqsbaij"
187 PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
188 {
189   const PetscInt *ai, *aj,*ajj,M=A->rmap->n;
190   PetscInt       nz,rnz,i,j;
191   PetscErrorCode ierr;
192   PetscInt       *row,*col;
193   Mat_SeqSBAIJ   *aa=(Mat_SeqSBAIJ*)A->data;
194 
195   PetscFunctionBegin;
196   *v = aa->a;
197   if (reuse == MAT_INITIAL_MATRIX) {
198     nz   = aa->nz;
199     ai   = aa->i;
200     aj   = aa->j;
201     *v   = aa->a;
202     *nnz = nz;
203     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
204     col  = row + nz;
205 
206     nz = 0;
207     for (i=0; i<M; i++) {
208       rnz = ai[i+1] - ai[i];
209       ajj = aj + ai[i];
210       for (j=0; j<rnz; j++) {
211         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
212       }
213     }
214     *r = row; *c = col;
215   }
216   PetscFunctionReturn(0);
217 }
218 
219 #undef __FUNCT__
220 #define __FUNCT__ "MatConvertToTriples_seqaij_seqsbaij"
221 PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
222 {
223   const PetscInt    *ai,*aj,*ajj,*adiag,M=A->rmap->n;
224   PetscInt          nz,rnz,i,j;
225   const PetscScalar *av,*v1;
226   PetscScalar       *val;
227   PetscErrorCode    ierr;
228   PetscInt          *row,*col;
229   Mat_SeqAIJ        *aa=(Mat_SeqAIJ*)A->data;
230 
231   PetscFunctionBegin;
232   ai   =aa->i; aj=aa->j;av=aa->a;
233   adiag=aa->diag;
234   if (reuse == MAT_INITIAL_MATRIX) {
235     /* count nz in the uppper triangular part of A */
236     nz = 0;
237     for (i=0; i<M; i++) nz += ai[i+1] - adiag[i];
238     *nnz = nz;
239 
240     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
241     col  = row + nz;
242     val  = (PetscScalar*)(col + nz);
243 
244     nz = 0;
245     for (i=0; i<M; i++) {
246       rnz = ai[i+1] - adiag[i];
247       ajj = aj + adiag[i];
248       v1  = av + adiag[i];
249       for (j=0; j<rnz; j++) {
250         row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j];
251       }
252     }
253     *r = row; *c = col; *v = val;
254   } else {
255     nz = 0; val = *v;
256     for (i=0; i <M; i++) {
257       rnz = ai[i+1] - adiag[i];
258       ajj = aj + adiag[i];
259       v1  = av + adiag[i];
260       for (j=0; j<rnz; j++) {
261         val[nz++] = v1[j];
262       }
263     }
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 #undef __FUNCT__
269 #define __FUNCT__ "MatConvertToTriples_mpisbaij_mpisbaij"
270 PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
271 {
272   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
273   PetscErrorCode    ierr;
274   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
275   PetscInt          *row,*col;
276   const PetscScalar *av, *bv,*v1,*v2;
277   PetscScalar       *val;
278   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ*)A->data;
279   Mat_SeqSBAIJ      *aa  = (Mat_SeqSBAIJ*)(mat->A)->data;
280   Mat_SeqBAIJ       *bb  = (Mat_SeqBAIJ*)(mat->B)->data;
281 
282   PetscFunctionBegin;
283   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
284   av=aa->a; bv=bb->a;
285 
286   garray = mat->garray;
287 
288   if (reuse == MAT_INITIAL_MATRIX) {
289     nz   = aa->nz + bb->nz;
290     *nnz = nz;
291     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
292     col  = row + nz;
293     val  = (PetscScalar*)(col + nz);
294 
295     *r = row; *c = col; *v = val;
296   } else {
297     row = *r; col = *c; val = *v;
298   }
299 
300   jj = 0; irow = rstart;
301   for (i=0; i<m; i++) {
302     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
303     countA = ai[i+1] - ai[i];
304     countB = bi[i+1] - bi[i];
305     bjj    = bj + bi[i];
306     v1     = av + ai[i];
307     v2     = bv + bi[i];
308 
309     /* A-part */
310     for (j=0; j<countA; j++) {
311       if (reuse == MAT_INITIAL_MATRIX) {
312         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
313       }
314       val[jj++] = v1[j];
315     }
316 
317     /* B-part */
318     for (j=0; j < countB; j++) {
319       if (reuse == MAT_INITIAL_MATRIX) {
320         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
321       }
322       val[jj++] = v2[j];
323     }
324     irow++;
325   }
326   PetscFunctionReturn(0);
327 }
328 
329 #undef __FUNCT__
330 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpiaij"
331 PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
332 {
333   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
334   PetscErrorCode    ierr;
335   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
336   PetscInt          *row,*col;
337   const PetscScalar *av, *bv,*v1,*v2;
338   PetscScalar       *val;
339   Mat_MPIAIJ        *mat = (Mat_MPIAIJ*)A->data;
340   Mat_SeqAIJ        *aa  = (Mat_SeqAIJ*)(mat->A)->data;
341   Mat_SeqAIJ        *bb  = (Mat_SeqAIJ*)(mat->B)->data;
342 
343   PetscFunctionBegin;
344   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
345   av=aa->a; bv=bb->a;
346 
347   garray = mat->garray;
348 
349   if (reuse == MAT_INITIAL_MATRIX) {
350     nz   = aa->nz + bb->nz;
351     *nnz = nz;
352     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
353     col  = row + nz;
354     val  = (PetscScalar*)(col + nz);
355 
356     *r = row; *c = col; *v = val;
357   } else {
358     row = *r; col = *c; val = *v;
359   }
360 
361   jj = 0; irow = rstart;
362   for (i=0; i<m; i++) {
363     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
364     countA = ai[i+1] - ai[i];
365     countB = bi[i+1] - bi[i];
366     bjj    = bj + bi[i];
367     v1     = av + ai[i];
368     v2     = bv + bi[i];
369 
370     /* A-part */
371     for (j=0; j<countA; j++) {
372       if (reuse == MAT_INITIAL_MATRIX) {
373         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
374       }
375       val[jj++] = v1[j];
376     }
377 
378     /* B-part */
379     for (j=0; j < countB; j++) {
380       if (reuse == MAT_INITIAL_MATRIX) {
381         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
382       }
383       val[jj++] = v2[j];
384     }
385     irow++;
386   }
387   PetscFunctionReturn(0);
388 }
389 
390 #undef __FUNCT__
391 #define __FUNCT__ "MatConvertToTriples_mpibaij_mpiaij"
392 PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
393 {
394   Mat_MPIBAIJ       *mat    = (Mat_MPIBAIJ*)A->data;
395   Mat_SeqBAIJ       *aa     = (Mat_SeqBAIJ*)(mat->A)->data;
396   Mat_SeqBAIJ       *bb     = (Mat_SeqBAIJ*)(mat->B)->data;
397   const PetscInt    *ai     = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj;
398   const PetscInt    *garray = mat->garray,mbs=mat->mbs,rstart=A->rmap->rstart;
399   const PetscInt    bs2=mat->bs2;
400   PetscErrorCode    ierr;
401   PetscInt          bs,nz,i,j,k,n,jj,irow,countA,countB,idx;
402   PetscInt          *row,*col;
403   const PetscScalar *av=aa->a, *bv=bb->a,*v1,*v2;
404   PetscScalar       *val;
405 
406   PetscFunctionBegin;
407   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
408   if (reuse == MAT_INITIAL_MATRIX) {
409     nz   = bs2*(aa->nz + bb->nz);
410     *nnz = nz;
411     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
412     col  = row + nz;
413     val  = (PetscScalar*)(col + nz);
414 
415     *r = row; *c = col; *v = val;
416   } else {
417     row = *r; col = *c; val = *v;
418   }
419 
420   jj = 0; irow = rstart;
421   for (i=0; i<mbs; i++) {
422     countA = ai[i+1] - ai[i];
423     countB = bi[i+1] - bi[i];
424     ajj    = aj + ai[i];
425     bjj    = bj + bi[i];
426     v1     = av + bs2*ai[i];
427     v2     = bv + bs2*bi[i];
428 
429     idx = 0;
430     /* A-part */
431     for (k=0; k<countA; k++) {
432       for (j=0; j<bs; j++) {
433         for (n=0; n<bs; n++) {
434           if (reuse == MAT_INITIAL_MATRIX) {
435             row[jj] = irow + n + shift;
436             col[jj] = rstart + bs*ajj[k] + j + shift;
437           }
438           val[jj++] = v1[idx++];
439         }
440       }
441     }
442 
443     idx = 0;
444     /* B-part */
445     for (k=0; k<countB; k++) {
446       for (j=0; j<bs; j++) {
447         for (n=0; n<bs; n++) {
448           if (reuse == MAT_INITIAL_MATRIX) {
449             row[jj] = irow + n + shift;
450             col[jj] = bs*garray[bjj[k]] + j + shift;
451           }
452           val[jj++] = v2[idx++];
453         }
454       }
455     }
456     irow += bs;
457   }
458   PetscFunctionReturn(0);
459 }
460 
461 #undef __FUNCT__
462 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpisbaij"
463 PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
464 {
465   const PetscInt    *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
466   PetscErrorCode    ierr;
467   PetscInt          rstart,nz,nza,nzb,i,j,jj,irow,countA,countB;
468   PetscInt          *row,*col;
469   const PetscScalar *av, *bv,*v1,*v2;
470   PetscScalar       *val;
471   Mat_MPIAIJ        *mat =  (Mat_MPIAIJ*)A->data;
472   Mat_SeqAIJ        *aa  =(Mat_SeqAIJ*)(mat->A)->data;
473   Mat_SeqAIJ        *bb  =(Mat_SeqAIJ*)(mat->B)->data;
474 
475   PetscFunctionBegin;
476   ai=aa->i; aj=aa->j; adiag=aa->diag;
477   bi=bb->i; bj=bb->j; garray = mat->garray;
478   av=aa->a; bv=bb->a;
479 
480   rstart = A->rmap->rstart;
481 
482   if (reuse == MAT_INITIAL_MATRIX) {
483     nza = 0;    /* num of upper triangular entries in mat->A, including diagonals */
484     nzb = 0;    /* num of upper triangular entries in mat->B */
485     for (i=0; i<m; i++) {
486       nza   += (ai[i+1] - adiag[i]);
487       countB = bi[i+1] - bi[i];
488       bjj    = bj + bi[i];
489       for (j=0; j<countB; j++) {
490         if (garray[bjj[j]] > rstart) nzb++;
491       }
492     }
493 
494     nz   = nza + nzb; /* total nz of upper triangular part of mat */
495     *nnz = nz;
496     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
497     col  = row + nz;
498     val  = (PetscScalar*)(col + nz);
499 
500     *r = row; *c = col; *v = val;
501   } else {
502     row = *r; col = *c; val = *v;
503   }
504 
505   jj = 0; irow = rstart;
506   for (i=0; i<m; i++) {
507     ajj    = aj + adiag[i];                 /* ptr to the beginning of the diagonal of this row */
508     v1     = av + adiag[i];
509     countA = ai[i+1] - adiag[i];
510     countB = bi[i+1] - bi[i];
511     bjj    = bj + bi[i];
512     v2     = bv + bi[i];
513 
514     /* A-part */
515     for (j=0; j<countA; j++) {
516       if (reuse == MAT_INITIAL_MATRIX) {
517         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
518       }
519       val[jj++] = v1[j];
520     }
521 
522     /* B-part */
523     for (j=0; j < countB; j++) {
524       if (garray[bjj[j]] > rstart) {
525         if (reuse == MAT_INITIAL_MATRIX) {
526           row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
527         }
528         val[jj++] = v2[j];
529       }
530     }
531     irow++;
532   }
533   PetscFunctionReturn(0);
534 }
535 
536 #undef __FUNCT__
537 #define __FUNCT__ "MatGetDiagonal_MUMPS"
538 PetscErrorCode MatGetDiagonal_MUMPS(Mat A,Vec v)
539 {
540   PetscFunctionBegin;
541   SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type: MUMPS factor");
542   PetscFunctionReturn(0);
543 }
544 
545 #undef __FUNCT__
546 #define __FUNCT__ "MatDestroy_MUMPS"
547 PetscErrorCode MatDestroy_MUMPS(Mat A)
548 {
549   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
550   PetscErrorCode ierr;
551 
552   PetscFunctionBegin;
553   if (mumps->CleanUpMUMPS) {
554     /* Terminate instance, deallocate memories */
555     ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
556     ierr = VecScatterDestroy(&mumps->scat_rhs);CHKERRQ(ierr);
557     ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
558     ierr = VecDestroy(&mumps->b_seq);CHKERRQ(ierr);
559     ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
560     ierr = PetscFree(mumps->id.perm_in);CHKERRQ(ierr);
561     ierr = PetscFree(mumps->irn);CHKERRQ(ierr);
562     ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr);
563     ierr = PetscFree(mumps->info);CHKERRQ(ierr);
564 
565     mumps->id.job = JOB_END;
566     PetscMUMPS_c(&mumps->id);
567     ierr = MPI_Comm_free(&(mumps->comm_mumps));CHKERRQ(ierr);
568   }
569   if (mumps->Destroy) {
570     ierr = (mumps->Destroy)(A);CHKERRQ(ierr);
571   }
572   ierr = PetscFree(A->spptr);CHKERRQ(ierr);
573 
574   /* clear composed functions */
575   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);CHKERRQ(ierr);
576   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetIcntl_C",NULL);CHKERRQ(ierr);
577   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetIcntl_C",NULL);CHKERRQ(ierr);
578   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetCntl_C",NULL);CHKERRQ(ierr);
579   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetCntl_C",NULL);CHKERRQ(ierr);
580 
581   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfo_C",NULL);CHKERRQ(ierr);
582   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfog_C",NULL);CHKERRQ(ierr);
583   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfo_C",NULL);CHKERRQ(ierr);
584   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfog_C",NULL);CHKERRQ(ierr);
585 
586   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetSchurIndices_C",NULL);CHKERRQ(ierr);
587   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetSchurComplement_C",NULL);CHKERRQ(ierr);
588   PetscFunctionReturn(0);
589 }
590 
591 #undef __FUNCT__
592 #define __FUNCT__ "MatSolve_MUMPS"
593 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x)
594 {
595   Mat_MUMPS        *mumps=(Mat_MUMPS*)A->spptr;
596   PetscScalar      *array;
597   Vec              b_seq;
598   IS               is_iden,is_petsc;
599   PetscErrorCode   ierr;
600   PetscInt         i;
601   static PetscBool cite1 = PETSC_FALSE,cite2 = PETSC_FALSE;
602 
603   PetscFunctionBegin;
604   ierr = PetscCitationsRegister("@article{MUMPS01,\n  author = {P.~R. Amestoy and I.~S. Duff and J.-Y. L'Excellent and J. Koster},\n  title = {A fully asynchronous multifrontal solver using distributed dynamic scheduling},\n  journal = {SIAM Journal on Matrix Analysis and Applications},\n  volume = {23},\n  number = {1},\n  pages = {15--41},\n  year = {2001}\n}\n",&cite1);CHKERRQ(ierr);
605   ierr = PetscCitationsRegister("@article{MUMPS02,\n  author = {P.~R. Amestoy and A. Guermouche and J.-Y. L'Excellent and S. Pralet},\n  title = {Hybrid scheduling for the parallel solution of linear systems},\n  journal = {Parallel Computing},\n  volume = {32},\n  number = {2},\n  pages = {136--156},\n  year = {2006}\n}\n",&cite2);CHKERRQ(ierr);
606   mumps->id.nrhs = 1;
607   b_seq          = mumps->b_seq;
608   if (mumps->size > 1) {
609     /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */
610     ierr = VecScatterBegin(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
611     ierr = VecScatterEnd(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
612     if (!mumps->myid) {ierr = VecGetArray(b_seq,&array);CHKERRQ(ierr);}
613   } else {  /* size == 1 */
614     ierr = VecCopy(b,x);CHKERRQ(ierr);
615     ierr = VecGetArray(x,&array);CHKERRQ(ierr);
616   }
617   if (!mumps->myid) { /* define rhs on the host */
618     mumps->id.nrhs = 1;
619     mumps->id.rhs = (MumpsScalar*)array;
620   }
621 
622   /* solve phase */
623   /*-------------*/
624   mumps->id.job = JOB_SOLVE;
625   PetscMUMPS_c(&mumps->id);
626   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
627 
628   if (mumps->size > 1) { /* convert mumps distributed solution to petsc mpi x */
629     if (mumps->scat_sol && mumps->ICNTL9_pre != mumps->id.ICNTL(9)) {
630       /* when id.ICNTL(9) changes, the contents of lsol_loc may change (not its size, lsol_loc), recreates scat_sol */
631       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
632     }
633     if (!mumps->scat_sol) { /* create scatter scat_sol */
634       ierr = ISCreateStride(PETSC_COMM_SELF,mumps->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */
635       for (i=0; i<mumps->id.lsol_loc; i++) {
636         mumps->id.isol_loc[i] -= 1; /* change Fortran style to C style */
637       }
638       ierr = ISCreateGeneral(PETSC_COMM_SELF,mumps->id.lsol_loc,mumps->id.isol_loc,PETSC_COPY_VALUES,&is_petsc);CHKERRQ(ierr);  /* to */
639       ierr = VecScatterCreate(mumps->x_seq,is_iden,x,is_petsc,&mumps->scat_sol);CHKERRQ(ierr);
640       ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
641       ierr = ISDestroy(&is_petsc);CHKERRQ(ierr);
642 
643       mumps->ICNTL9_pre = mumps->id.ICNTL(9); /* save current value of id.ICNTL(9) */
644     }
645 
646     ierr = VecScatterBegin(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
647     ierr = VecScatterEnd(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
648   }
649   PetscFunctionReturn(0);
650 }
651 
652 #undef __FUNCT__
653 #define __FUNCT__ "MatSolveTranspose_MUMPS"
654 PetscErrorCode MatSolveTranspose_MUMPS(Mat A,Vec b,Vec x)
655 {
656   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
657   PetscErrorCode ierr;
658 
659   PetscFunctionBegin;
660   mumps->id.ICNTL(9) = 0;
661   ierr = MatSolve_MUMPS(A,b,x);CHKERRQ(ierr);
662   mumps->id.ICNTL(9) = 1;
663   PetscFunctionReturn(0);
664 }
665 
666 #undef __FUNCT__
667 #define __FUNCT__ "MatMatSolve_MUMPS"
668 PetscErrorCode MatMatSolve_MUMPS(Mat A,Mat B,Mat X)
669 {
670   PetscErrorCode ierr;
671   PetscBool      flg;
672   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
673   PetscInt       i,nrhs,M;
674   PetscScalar    *array,*bray;
675 
676   PetscFunctionBegin;
677   ierr = PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
678   if (!flg) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
679   ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
680   if (!flg) SETERRQ(PetscObjectComm((PetscObject)X),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
681   if (B->rmap->n != X->rmap->n) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B and X must have same row distribution");
682 
683   ierr = MatGetSize(B,&M,&nrhs);CHKERRQ(ierr);
684   mumps->id.nrhs = nrhs;
685   mumps->id.lrhs = M;
686 
687   if (mumps->size == 1) {
688     /* copy B to X */
689     ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr);
690     ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr);
691     for (i=0; i<M*nrhs; i++) array[i] = bray[i];
692     ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr);
693     mumps->id.rhs = (MumpsScalar*)array;
694 
695     /* solve phase */
696     /*-------------*/
697     mumps->id.job = JOB_SOLVE;
698     PetscMUMPS_c(&mumps->id);
699     if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
700     ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr);
701   } else {  /*--------- parallel case --------*/
702     PetscInt       lsol_loc,nlsol_loc,*isol_loc,*idx,*iidx,*idxx,*isol_loc_save;
703     MumpsScalar    *sol_loc,*sol_loc_save;
704     IS             is_to,is_from;
705     PetscInt       k,proc,j,m;
706     const PetscInt *rstart;
707     Vec            v_mpi,b_seq,x_seq;
708     VecScatter     scat_rhs,scat_sol;
709 
710     /* create x_seq to hold local solution */
711     isol_loc_save = mumps->id.isol_loc; /* save it for MatSovle() */
712     sol_loc_save  = mumps->id.sol_loc;
713 
714     lsol_loc  = mumps->id.INFO(23);
715     nlsol_loc = nrhs*lsol_loc;     /* length of sol_loc */
716     ierr = PetscMalloc2(nlsol_loc,&sol_loc,nlsol_loc,&isol_loc);CHKERRQ(ierr);
717     mumps->id.sol_loc = (MumpsScalar*)sol_loc;
718     mumps->id.isol_loc = isol_loc;
719 
720     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nlsol_loc,(PetscScalar*)sol_loc,&x_seq);CHKERRQ(ierr);
721 
722     /* copy rhs matrix B into vector v_mpi */
723     ierr = MatGetLocalSize(B,&m,NULL);CHKERRQ(ierr);
724     ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr);
725     ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)B),1,nrhs*m,nrhs*M,(const PetscScalar*)bray,&v_mpi);CHKERRQ(ierr);
726     ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr);
727 
728     /* scatter v_mpi to b_seq because MUMPS only supports centralized rhs */
729     /* idx: maps from k-th index of v_mpi to (i,j)-th global entry of B;
730       iidx: inverse of idx, will be used by scattering xx_seq -> X       */
731     ierr = PetscMalloc2(nrhs*M,&idx,nrhs*M,&iidx);CHKERRQ(ierr);
732     ierr = MatGetOwnershipRanges(B,&rstart);CHKERRQ(ierr);
733     k = 0;
734     for (proc=0; proc<mumps->size; proc++){
735       for (j=0; j<nrhs; j++){
736         for (i=rstart[proc]; i<rstart[proc+1]; i++){
737           iidx[j*M + i] = k;
738           idx[k++]      = j*M + i;
739         }
740       }
741     }
742 
743     if (!mumps->myid) {
744       ierr = VecCreateSeq(PETSC_COMM_SELF,nrhs*M,&b_seq);CHKERRQ(ierr);
745       ierr = ISCreateGeneral(PETSC_COMM_SELF,nrhs*M,idx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr);
746       ierr = ISCreateStride(PETSC_COMM_SELF,nrhs*M,0,1,&is_from);CHKERRQ(ierr);
747     } else {
748       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&b_seq);CHKERRQ(ierr);
749       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_to);CHKERRQ(ierr);
750       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_from);CHKERRQ(ierr);
751     }
752     ierr = VecScatterCreate(v_mpi,is_from,b_seq,is_to,&scat_rhs);CHKERRQ(ierr);
753     ierr = VecScatterBegin(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
754     ierr = ISDestroy(&is_to);CHKERRQ(ierr);
755     ierr = ISDestroy(&is_from);CHKERRQ(ierr);
756     ierr = VecScatterEnd(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
757 
758     if (!mumps->myid) { /* define rhs on the host */
759       ierr = VecGetArray(b_seq,&bray);CHKERRQ(ierr);
760       mumps->id.rhs = (MumpsScalar*)bray;
761       ierr = VecRestoreArray(b_seq,&bray);CHKERRQ(ierr);
762     }
763 
764     /* solve phase */
765     /*-------------*/
766     mumps->id.job = JOB_SOLVE;
767     PetscMUMPS_c(&mumps->id);
768     if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
769 
770     /* scatter mumps distributed solution to petsc vector v_mpi, which shares local arrays with solution matrix X */
771     ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr);
772     ierr = VecPlaceArray(v_mpi,array);CHKERRQ(ierr);
773 
774     /* create scatter scat_sol */
775     ierr = PetscMalloc1(nlsol_loc,&idxx);CHKERRQ(ierr);
776     ierr = ISCreateStride(PETSC_COMM_SELF,nlsol_loc,0,1,&is_from);CHKERRQ(ierr);
777     for (i=0; i<lsol_loc; i++) {
778       isol_loc[i] -= 1; /* change Fortran style to C style */
779       idxx[i] = iidx[isol_loc[i]];
780       for (j=1; j<nrhs; j++){
781         idxx[j*lsol_loc+i] = iidx[isol_loc[i]+j*M];
782       }
783     }
784     ierr = ISCreateGeneral(PETSC_COMM_SELF,nlsol_loc,idxx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr);
785     ierr = VecScatterCreate(x_seq,is_from,v_mpi,is_to,&scat_sol);CHKERRQ(ierr);
786     ierr = VecScatterBegin(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
787     ierr = ISDestroy(&is_from);CHKERRQ(ierr);
788     ierr = ISDestroy(&is_to);CHKERRQ(ierr);
789     ierr = VecScatterEnd(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
790     ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr);
791 
792     /* free spaces */
793     mumps->id.sol_loc = sol_loc_save;
794     mumps->id.isol_loc = isol_loc_save;
795 
796     ierr = PetscFree2(sol_loc,isol_loc);CHKERRQ(ierr);
797     ierr = PetscFree2(idx,iidx);CHKERRQ(ierr);
798     ierr = PetscFree(idxx);CHKERRQ(ierr);
799     ierr = VecDestroy(&x_seq);CHKERRQ(ierr);
800     ierr = VecDestroy(&v_mpi);CHKERRQ(ierr);
801     ierr = VecDestroy(&b_seq);CHKERRQ(ierr);
802     ierr = VecScatterDestroy(&scat_rhs);CHKERRQ(ierr);
803     ierr = VecScatterDestroy(&scat_sol);CHKERRQ(ierr);
804   }
805   PetscFunctionReturn(0);
806 }
807 
808 #if !defined(PETSC_USE_COMPLEX)
809 /*
810   input:
811    F:        numeric factor
812   output:
813    nneg:     total number of negative pivots
814    nzero:    0
815    npos:     (global dimension of F) - nneg
816 */
817 
818 #undef __FUNCT__
819 #define __FUNCT__ "MatGetInertia_SBAIJMUMPS"
820 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos)
821 {
822   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
823   PetscErrorCode ierr;
824   PetscMPIInt    size;
825 
826   PetscFunctionBegin;
827   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)F),&size);CHKERRQ(ierr);
828   /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */
829   if (size > 1 && mumps->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",mumps->id.INFOG(13));
830 
831   if (nneg) *nneg = mumps->id.INFOG(12);
832   if (nzero || npos) {
833     if (mumps->id.ICNTL(24) != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"-mat_mumps_icntl_24 must be set as 1 for null pivot row detection");
834     if (nzero) *nzero = mumps->id.INFOG(28);
835     if (npos) *npos   = F->rmap->N - (mumps->id.INFOG(12) + mumps->id.INFOG(28));
836   }
837   PetscFunctionReturn(0);
838 }
839 #endif /* !defined(PETSC_USE_COMPLEX) */
840 
841 #undef __FUNCT__
842 #define __FUNCT__ "MatFactorNumeric_MUMPS"
843 PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info)
844 {
845   Mat_MUMPS      *mumps =(Mat_MUMPS*)(F)->spptr;
846   PetscErrorCode ierr;
847   Mat            F_diag;
848   PetscBool      isMPIAIJ;
849 
850   PetscFunctionBegin;
851   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_REUSE_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
852 
853   /* numerical factorization phase */
854   /*-------------------------------*/
855   mumps->id.job = JOB_FACTNUMERIC;
856   if (!mumps->id.ICNTL(18)) { /* A is centralized */
857     if (!mumps->myid) {
858       mumps->id.a = (MumpsScalar*)mumps->val;
859     }
860   } else {
861     mumps->id.a_loc = (MumpsScalar*)mumps->val;
862   }
863   PetscMUMPS_c(&mumps->id);
864   if (mumps->id.INFOG(1) < 0) {
865     if (mumps->id.INFO(1) == -13) {
866       if (mumps->id.INFO(2) < 0) {
867         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d megabytes\n",-mumps->id.INFO(2));
868       } else {
869         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d bytes\n",mumps->id.INFO(2));
870       }
871     } else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFO(1)=%d, INFO(2)=%d\n",mumps->id.INFO(1),mumps->id.INFO(2));
872   }
873   if (!mumps->myid && mumps->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"  mumps->id.ICNTL(16):=%d\n",mumps->id.INFOG(16));
874 
875   (F)->assembled      = PETSC_TRUE;
876   mumps->matstruc     = SAME_NONZERO_PATTERN;
877   mumps->CleanUpMUMPS = PETSC_TRUE;
878 
879   if (mumps->size > 1) {
880     PetscInt    lsol_loc;
881     PetscScalar *sol_loc;
882 
883     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr);
884     if (isMPIAIJ) F_diag = ((Mat_MPIAIJ*)(F)->data)->A;
885     else F_diag = ((Mat_MPISBAIJ*)(F)->data)->A;
886     F_diag->assembled = PETSC_TRUE;
887 
888     /* distributed solution; Create x_seq=sol_loc for repeated use */
889     if (mumps->x_seq) {
890       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
891       ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
892       ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
893     }
894     lsol_loc = mumps->id.INFO(23); /* length of sol_loc */
895     ierr = PetscMalloc2(lsol_loc,&sol_loc,lsol_loc,&mumps->id.isol_loc);CHKERRQ(ierr);
896     mumps->id.lsol_loc = lsol_loc;
897     mumps->id.sol_loc = (MumpsScalar*)sol_loc;
898     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,lsol_loc,sol_loc,&mumps->x_seq);CHKERRQ(ierr);
899   }
900   PetscFunctionReturn(0);
901 }
902 
903 /* Sets MUMPS options from the options database */
904 #undef __FUNCT__
905 #define __FUNCT__ "PetscSetMUMPSFromOptions"
906 PetscErrorCode PetscSetMUMPSFromOptions(Mat F, Mat A)
907 {
908   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
909   PetscErrorCode ierr;
910   PetscInt       icntl,info[40],i,ninfo=40;
911   PetscBool      flg;
912 
913   PetscFunctionBegin;
914   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr);
915   ierr = PetscOptionsInt("-mat_mumps_icntl_1","ICNTL(1): output stream for error messages","None",mumps->id.ICNTL(1),&icntl,&flg);CHKERRQ(ierr);
916   if (flg) mumps->id.ICNTL(1) = icntl;
917   ierr = PetscOptionsInt("-mat_mumps_icntl_2","ICNTL(2): output stream for diagnostic printing, statistics, and warning","None",mumps->id.ICNTL(2),&icntl,&flg);CHKERRQ(ierr);
918   if (flg) mumps->id.ICNTL(2) = icntl;
919   ierr = PetscOptionsInt("-mat_mumps_icntl_3","ICNTL(3): output stream for global information, collected on the host","None",mumps->id.ICNTL(3),&icntl,&flg);CHKERRQ(ierr);
920   if (flg) mumps->id.ICNTL(3) = icntl;
921 
922   ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",mumps->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr);
923   if (flg) mumps->id.ICNTL(4) = icntl;
924   if (mumps->id.ICNTL(4) || PetscLogPrintInfo) mumps->id.ICNTL(3) = 6; /* resume MUMPS default id.ICNTL(3) = 6 */
925 
926   ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7)","None",mumps->id.ICNTL(6),&icntl,&flg);CHKERRQ(ierr);
927   if (flg) mumps->id.ICNTL(6) = icntl;
928 
929   ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis","None",mumps->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr);
930   if (flg) {
931     if (icntl== 1 && mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n");
932     else mumps->id.ICNTL(7) = icntl;
933   }
934 
935   ierr = PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 8 or 77)","None",mumps->id.ICNTL(8),&mumps->id.ICNTL(8),NULL);CHKERRQ(ierr);
936   /* ierr = PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): computes the solution using A or A^T","None",mumps->id.ICNTL(9),&mumps->id.ICNTL(9),NULL);CHKERRQ(ierr); handled by MatSolveTranspose_MUMPS() */
937   ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",mumps->id.ICNTL(10),&mumps->id.ICNTL(10),NULL);CHKERRQ(ierr);
938   ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to an error analysis (via -ksp_view)","None",mumps->id.ICNTL(11),&mumps->id.ICNTL(11),NULL);CHKERRQ(ierr);
939   ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): an ordering strategy for symmetric matrices (0 to 3)","None",mumps->id.ICNTL(12),&mumps->id.ICNTL(12),NULL);CHKERRQ(ierr);
940   ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting","None",mumps->id.ICNTL(13),&mumps->id.ICNTL(13),NULL);CHKERRQ(ierr);
941   ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage increase in the estimated working space","None",mumps->id.ICNTL(14),&mumps->id.ICNTL(14),NULL);CHKERRQ(ierr);
942   ierr = PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): computes the Schur complement","None",mumps->id.ICNTL(19),&mumps->id.ICNTL(19),NULL);CHKERRQ(ierr);
943   /* ierr = PetscOptionsInt("-mat_mumps_icntl_20","ICNTL(20): the format (dense or sparse) of the right-hand sides","None",mumps->id.ICNTL(20),&mumps->id.ICNTL(20),NULL);CHKERRQ(ierr); -- sparse rhs is not supported in PETSc API */
944   /* ierr = PetscOptionsInt("-mat_mumps_icntl_21","ICNTL(21): the distribution (centralized or distributed) of the solution vectors","None",mumps->id.ICNTL(21),&mumps->id.ICNTL(21),NULL);CHKERRQ(ierr); we only use distributed solution vector */
945 
946   ierr = PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core factorization and solve (0 or 1)","None",mumps->id.ICNTL(22),&mumps->id.ICNTL(22),NULL);CHKERRQ(ierr);
947   ierr = PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",mumps->id.ICNTL(23),&mumps->id.ICNTL(23),NULL);CHKERRQ(ierr);
948   ierr = PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",mumps->id.ICNTL(24),&mumps->id.ICNTL(24),NULL);CHKERRQ(ierr);
949   if (mumps->id.ICNTL(24)) {
950     mumps->id.ICNTL(13) = 1; /* turn-off ScaLAPACK to help with the correct detection of null pivots */
951   }
952 
953   ierr = PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): compute a solution of a deficient matrix and a null space basis","None",mumps->id.ICNTL(25),&mumps->id.ICNTL(25),NULL);CHKERRQ(ierr);
954   ierr = PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): drives the solution phase if a Schur complement matrix","None",mumps->id.ICNTL(26),&mumps->id.ICNTL(26),NULL);CHKERRQ(ierr);
955   ierr = PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): the blocking size for multiple right-hand sides","None",mumps->id.ICNTL(27),&mumps->id.ICNTL(27),NULL);CHKERRQ(ierr);
956   ierr = PetscOptionsInt("-mat_mumps_icntl_28","ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering","None",mumps->id.ICNTL(28),&mumps->id.ICNTL(28),NULL);CHKERRQ(ierr);
957   ierr = PetscOptionsInt("-mat_mumps_icntl_29","ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis","None",mumps->id.ICNTL(29),&mumps->id.ICNTL(29),NULL);CHKERRQ(ierr);
958   ierr = PetscOptionsInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL);CHKERRQ(ierr);
959   ierr = PetscOptionsInt("-mat_mumps_icntl_31","ICNTL(31): indicates which factors may be discarded during factorization","None",mumps->id.ICNTL(31),&mumps->id.ICNTL(31),NULL);CHKERRQ(ierr);
960   /* ierr = PetscOptionsInt("-mat_mumps_icntl_32","ICNTL(32): performs the forward elemination of the right-hand sides during factorization","None",mumps->id.ICNTL(32),&mumps->id.ICNTL(32),NULL);CHKERRQ(ierr);  -- not supported by PETSc API */
961   ierr = PetscOptionsInt("-mat_mumps_icntl_33","ICNTL(33): compute determinant","None",mumps->id.ICNTL(33),&mumps->id.ICNTL(33),NULL);CHKERRQ(ierr);
962 
963   ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",mumps->id.CNTL(1),&mumps->id.CNTL(1),NULL);CHKERRQ(ierr);
964   ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",mumps->id.CNTL(2),&mumps->id.CNTL(2),NULL);CHKERRQ(ierr);
965   ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",mumps->id.CNTL(3),&mumps->id.CNTL(3),NULL);CHKERRQ(ierr);
966   ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",mumps->id.CNTL(4),&mumps->id.CNTL(4),NULL);CHKERRQ(ierr);
967   ierr = PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",mumps->id.CNTL(5),&mumps->id.CNTL(5),NULL);CHKERRQ(ierr);
968 
969   ierr = PetscOptionsString("-mat_mumps_ooc_tmpdir", "out of core directory", "None", mumps->id.ooc_tmpdir, mumps->id.ooc_tmpdir, 256, NULL);
970 
971   ierr = PetscOptionsIntArray("-mat_mumps_view_info","request INFO local to each processor","",info,&ninfo,NULL);CHKERRQ(ierr);
972   if (ninfo) {
973     if (ninfo > 40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"number of INFO %d must <= 40\n",ninfo);
974     ierr = PetscMalloc1(ninfo,&mumps->info);CHKERRQ(ierr);
975     mumps->ninfo = ninfo;
976     for (i=0; i<ninfo; i++) {
977       if (info[i] < 0 || info[i]>40) {
978         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"index of INFO %d must between 1 and 40\n",ninfo);
979       } else {
980         mumps->info[i] = info[i];
981       }
982     }
983   }
984 
985   PetscOptionsEnd();
986   PetscFunctionReturn(0);
987 }
988 
989 #undef __FUNCT__
990 #define __FUNCT__ "PetscInitializeMUMPS"
991 PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS *mumps)
992 {
993   PetscErrorCode ierr;
994 
995   PetscFunctionBegin;
996   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A), &mumps->myid);
997   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&mumps->size);CHKERRQ(ierr);
998   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(mumps->comm_mumps));CHKERRQ(ierr);
999 
1000   mumps->id.comm_fortran = MPI_Comm_c2f(mumps->comm_mumps);
1001 
1002   mumps->id.job = JOB_INIT;
1003   mumps->id.par = 1;  /* host participates factorizaton and solve */
1004   mumps->id.sym = mumps->sym;
1005   PetscMUMPS_c(&mumps->id);
1006 
1007   mumps->CleanUpMUMPS = PETSC_FALSE;
1008   mumps->scat_rhs     = NULL;
1009   mumps->scat_sol     = NULL;
1010 
1011   /* set PETSc-MUMPS default options - override MUMPS default */
1012   mumps->id.ICNTL(3) = 0;
1013   mumps->id.ICNTL(4) = 0;
1014   if (mumps->size == 1) {
1015     mumps->id.ICNTL(18) = 0;   /* centralized assembled matrix input */
1016   } else {
1017     mumps->id.ICNTL(18) = 3;   /* distributed assembled matrix input */
1018     mumps->id.ICNTL(20) = 0;   /* rhs is in dense format */
1019     mumps->id.ICNTL(21) = 1;   /* distributed solution */
1020   }
1021 
1022   /* schur */
1023   mumps->id.size_schur = 0;
1024   mumps->id.listvar_schur = NULL;
1025   mumps->id.schur = NULL;
1026   PetscFunctionReturn(0);
1027 }
1028 
1029 /* Note Petsc r(=c) permutation is used when mumps->id.ICNTL(7)==1 with centralized assembled matrix input; otherwise r and c are ignored */
1030 #undef __FUNCT__
1031 #define __FUNCT__ "MatLUFactorSymbolic_AIJMUMPS"
1032 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
1033 {
1034   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1035   PetscErrorCode ierr;
1036   Vec            b;
1037   IS             is_iden;
1038   const PetscInt M = A->rmap->N;
1039 
1040   PetscFunctionBegin;
1041   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1042 
1043   /* Set MUMPS options from the options database */
1044   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1045 
1046   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1047 
1048   /* analysis phase */
1049   /*----------------*/
1050   mumps->id.job = JOB_FACTSYMBOLIC;
1051   mumps->id.n   = M;
1052   switch (mumps->id.ICNTL(18)) {
1053   case 0:  /* centralized assembled matrix input */
1054     if (!mumps->myid) {
1055       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1056       if (mumps->id.ICNTL(6)>1) {
1057         mumps->id.a = (MumpsScalar*)mumps->val;
1058       }
1059       if (mumps->id.ICNTL(7) == 1) { /* use user-provide matrix ordering - assuming r = c ordering */
1060         /*
1061         PetscBool      flag;
1062         ierr = ISEqual(r,c,&flag);CHKERRQ(ierr);
1063         if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"row_perm != col_perm");
1064         ierr = ISView(r,PETSC_VIEWER_STDOUT_SELF);
1065          */
1066         if (!mumps->myid) {
1067           const PetscInt *idx;
1068           PetscInt       i,*perm_in;
1069 
1070           ierr = PetscMalloc1(M,&perm_in);CHKERRQ(ierr);
1071           ierr = ISGetIndices(r,&idx);CHKERRQ(ierr);
1072 
1073           mumps->id.perm_in = perm_in;
1074           for (i=0; i<M; i++) perm_in[i] = idx[i]+1; /* perm_in[]: start from 1, not 0! */
1075           ierr = ISRestoreIndices(r,&idx);CHKERRQ(ierr);
1076         }
1077       }
1078     }
1079     break;
1080   case 3:  /* distributed assembled matrix input (size>1) */
1081     mumps->id.nz_loc = mumps->nz;
1082     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1083     if (mumps->id.ICNTL(6)>1) {
1084       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1085     }
1086     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1087     if (!mumps->myid) {
1088       ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->N,&mumps->b_seq);CHKERRQ(ierr);
1089       ierr = ISCreateStride(PETSC_COMM_SELF,A->rmap->N,0,1,&is_iden);CHKERRQ(ierr);
1090     } else {
1091       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1092       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1093     }
1094     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1095     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1096     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1097     ierr = VecDestroy(&b);CHKERRQ(ierr);
1098     break;
1099   }
1100   PetscMUMPS_c(&mumps->id);
1101   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1102 
1103   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1104   F->ops->solve           = MatSolve_MUMPS;
1105   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
1106   F->ops->matsolve        = MatMatSolve_MUMPS;
1107   PetscFunctionReturn(0);
1108 }
1109 
1110 /* Note the Petsc r and c permutations are ignored */
1111 #undef __FUNCT__
1112 #define __FUNCT__ "MatLUFactorSymbolic_BAIJMUMPS"
1113 PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
1114 {
1115   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1116   PetscErrorCode ierr;
1117   Vec            b;
1118   IS             is_iden;
1119   const PetscInt M = A->rmap->N;
1120 
1121   PetscFunctionBegin;
1122   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1123 
1124   /* Set MUMPS options from the options database */
1125   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1126 
1127   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1128 
1129   /* analysis phase */
1130   /*----------------*/
1131   mumps->id.job = JOB_FACTSYMBOLIC;
1132   mumps->id.n   = M;
1133   switch (mumps->id.ICNTL(18)) {
1134   case 0:  /* centralized assembled matrix input */
1135     if (!mumps->myid) {
1136       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1137       if (mumps->id.ICNTL(6)>1) {
1138         mumps->id.a = (MumpsScalar*)mumps->val;
1139       }
1140     }
1141     break;
1142   case 3:  /* distributed assembled matrix input (size>1) */
1143     mumps->id.nz_loc = mumps->nz;
1144     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1145     if (mumps->id.ICNTL(6)>1) {
1146       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1147     }
1148     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1149     if (!mumps->myid) {
1150       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1151       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1152     } else {
1153       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1154       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1155     }
1156     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1157     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1158     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1159     ierr = VecDestroy(&b);CHKERRQ(ierr);
1160     break;
1161   }
1162   PetscMUMPS_c(&mumps->id);
1163   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1164 
1165   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1166   F->ops->solve           = MatSolve_MUMPS;
1167   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
1168   PetscFunctionReturn(0);
1169 }
1170 
1171 /* Note the Petsc r permutation and factor info are ignored */
1172 #undef __FUNCT__
1173 #define __FUNCT__ "MatCholeskyFactorSymbolic_MUMPS"
1174 PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info)
1175 {
1176   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1177   PetscErrorCode ierr;
1178   Vec            b;
1179   IS             is_iden;
1180   const PetscInt M = A->rmap->N;
1181 
1182   PetscFunctionBegin;
1183   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1184 
1185   /* Set MUMPS options from the options database */
1186   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1187 
1188   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1189 
1190   /* analysis phase */
1191   /*----------------*/
1192   mumps->id.job = JOB_FACTSYMBOLIC;
1193   mumps->id.n   = M;
1194   switch (mumps->id.ICNTL(18)) {
1195   case 0:  /* centralized assembled matrix input */
1196     if (!mumps->myid) {
1197       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1198       if (mumps->id.ICNTL(6)>1) {
1199         mumps->id.a = (MumpsScalar*)mumps->val;
1200       }
1201     }
1202     break;
1203   case 3:  /* distributed assembled matrix input (size>1) */
1204     mumps->id.nz_loc = mumps->nz;
1205     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1206     if (mumps->id.ICNTL(6)>1) {
1207       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1208     }
1209     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1210     if (!mumps->myid) {
1211       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1212       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1213     } else {
1214       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1215       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1216     }
1217     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1218     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1219     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1220     ierr = VecDestroy(&b);CHKERRQ(ierr);
1221     break;
1222   }
1223   PetscMUMPS_c(&mumps->id);
1224   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1225 
1226   F->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS;
1227   F->ops->solve                 = MatSolve_MUMPS;
1228   F->ops->solvetranspose        = MatSolve_MUMPS;
1229   F->ops->matsolve              = MatMatSolve_MUMPS;
1230 #if defined(PETSC_USE_COMPLEX)
1231   F->ops->getinertia = NULL;
1232 #else
1233   F->ops->getinertia = MatGetInertia_SBAIJMUMPS;
1234 #endif
1235   PetscFunctionReturn(0);
1236 }
1237 
1238 #undef __FUNCT__
1239 #define __FUNCT__ "MatView_MUMPS"
1240 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer)
1241 {
1242   PetscErrorCode    ierr;
1243   PetscBool         iascii;
1244   PetscViewerFormat format;
1245   Mat_MUMPS         *mumps=(Mat_MUMPS*)A->spptr;
1246 
1247   PetscFunctionBegin;
1248   /* check if matrix is mumps type */
1249   if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0);
1250 
1251   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1252   if (iascii) {
1253     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1254     if (format == PETSC_VIEWER_ASCII_INFO) {
1255       ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr);
1256       ierr = PetscViewerASCIIPrintf(viewer,"  SYM (matrix type):                   %d \n",mumps->id.sym);CHKERRQ(ierr);
1257       ierr = PetscViewerASCIIPrintf(viewer,"  PAR (host participation):            %d \n",mumps->id.par);CHKERRQ(ierr);
1258       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(1) (output for error):         %d \n",mumps->id.ICNTL(1));CHKERRQ(ierr);
1259       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(2) (output of diagnostic msg): %d \n",mumps->id.ICNTL(2));CHKERRQ(ierr);
1260       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(3) (output for global info):   %d \n",mumps->id.ICNTL(3));CHKERRQ(ierr);
1261       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(4) (level of printing):        %d \n",mumps->id.ICNTL(4));CHKERRQ(ierr);
1262       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(5) (input mat struct):         %d \n",mumps->id.ICNTL(5));CHKERRQ(ierr);
1263       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(6) (matrix prescaling):        %d \n",mumps->id.ICNTL(6));CHKERRQ(ierr);
1264       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(7) (sequentia matrix ordering):%d \n",mumps->id.ICNTL(7));CHKERRQ(ierr);
1265       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(8) (scalling strategy):        %d \n",mumps->id.ICNTL(8));CHKERRQ(ierr);
1266       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(10) (max num of refinements):  %d \n",mumps->id.ICNTL(10));CHKERRQ(ierr);
1267       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(11) (error analysis):          %d \n",mumps->id.ICNTL(11));CHKERRQ(ierr);
1268       if (mumps->id.ICNTL(11)>0) {
1269         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(4) (inf norm of input mat):        %g\n",mumps->id.RINFOG(4));CHKERRQ(ierr);
1270         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(5) (inf norm of solution):         %g\n",mumps->id.RINFOG(5));CHKERRQ(ierr);
1271         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(6) (inf norm of residual):         %g\n",mumps->id.RINFOG(6));CHKERRQ(ierr);
1272         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",mumps->id.RINFOG(7),mumps->id.RINFOG(8));CHKERRQ(ierr);
1273         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(9) (error estimate):               %g \n",mumps->id.RINFOG(9));CHKERRQ(ierr);
1274         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",mumps->id.RINFOG(10),mumps->id.RINFOG(11));CHKERRQ(ierr);
1275       }
1276       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(12) (efficiency control):                         %d \n",mumps->id.ICNTL(12));CHKERRQ(ierr);
1277       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(13) (efficiency control):                         %d \n",mumps->id.ICNTL(13));CHKERRQ(ierr);
1278       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(14) (percentage of estimated workspace increase): %d \n",mumps->id.ICNTL(14));CHKERRQ(ierr);
1279       /* ICNTL(15-17) not used */
1280       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(18) (input mat struct):                           %d \n",mumps->id.ICNTL(18));CHKERRQ(ierr);
1281       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(19) (Shur complement info):                       %d \n",mumps->id.ICNTL(19));CHKERRQ(ierr);
1282       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(20) (rhs sparse pattern):                         %d \n",mumps->id.ICNTL(20));CHKERRQ(ierr);
1283       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(21) (solution struct):                            %d \n",mumps->id.ICNTL(21));CHKERRQ(ierr);
1284       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(22) (in-core/out-of-core facility):               %d \n",mumps->id.ICNTL(22));CHKERRQ(ierr);
1285       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(23) (max size of memory can be allocated locally):%d \n",mumps->id.ICNTL(23));CHKERRQ(ierr);
1286 
1287       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(24) (detection of null pivot rows):               %d \n",mumps->id.ICNTL(24));CHKERRQ(ierr);
1288       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(25) (computation of a null space basis):          %d \n",mumps->id.ICNTL(25));CHKERRQ(ierr);
1289       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(26) (Schur options for rhs or solution):          %d \n",mumps->id.ICNTL(26));CHKERRQ(ierr);
1290       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(27) (experimental parameter):                     %d \n",mumps->id.ICNTL(27));CHKERRQ(ierr);
1291       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(28) (use parallel or sequential ordering):        %d \n",mumps->id.ICNTL(28));CHKERRQ(ierr);
1292       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(29) (parallel ordering):                          %d \n",mumps->id.ICNTL(29));CHKERRQ(ierr);
1293 
1294       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(30) (user-specified set of entries in inv(A)):    %d \n",mumps->id.ICNTL(30));CHKERRQ(ierr);
1295       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(31) (factors is discarded in the solve phase):    %d \n",mumps->id.ICNTL(31));CHKERRQ(ierr);
1296       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(33) (compute determinant):                        %d \n",mumps->id.ICNTL(33));CHKERRQ(ierr);
1297 
1298       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(1) (relative pivoting threshold):      %g \n",mumps->id.CNTL(1));CHKERRQ(ierr);
1299       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(2) (stopping criterion of refinement): %g \n",mumps->id.CNTL(2));CHKERRQ(ierr);
1300       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(3) (absolute pivoting threshold):      %g \n",mumps->id.CNTL(3));CHKERRQ(ierr);
1301       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(4) (value of static pivoting):         %g \n",mumps->id.CNTL(4));CHKERRQ(ierr);
1302       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(5) (fixation for null pivots):         %g \n",mumps->id.CNTL(5));CHKERRQ(ierr);
1303 
1304       /* infomation local to each processor */
1305       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr);
1306       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
1307       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %g \n",mumps->myid,mumps->id.RINFO(1));CHKERRQ(ierr);
1308       ierr = PetscViewerFlush(viewer);
1309       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr);
1310       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(2));CHKERRQ(ierr);
1311       ierr = PetscViewerFlush(viewer);
1312       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr);
1313       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(3));CHKERRQ(ierr);
1314       ierr = PetscViewerFlush(viewer);
1315 
1316       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr);
1317       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] %d \n",mumps->myid,mumps->id.INFO(15));CHKERRQ(ierr);
1318       ierr = PetscViewerFlush(viewer);
1319 
1320       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr);
1321       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(16));CHKERRQ(ierr);
1322       ierr = PetscViewerFlush(viewer);
1323 
1324       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr);
1325       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(23));CHKERRQ(ierr);
1326       ierr = PetscViewerFlush(viewer);
1327 
1328       if (mumps->ninfo && mumps->ninfo <= 40){
1329         PetscInt i;
1330         for (i=0; i<mumps->ninfo; i++){
1331           ierr = PetscViewerASCIIPrintf(viewer, "  INFO(%d): \n",mumps->info[i]);CHKERRQ(ierr);
1332           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(mumps->info[i]));CHKERRQ(ierr);
1333           ierr = PetscViewerFlush(viewer);
1334         }
1335       }
1336 
1337 
1338       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
1339 
1340       if (!mumps->myid) { /* information from the host */
1341         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",mumps->id.RINFOG(1));CHKERRQ(ierr);
1342         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",mumps->id.RINFOG(2));CHKERRQ(ierr);
1343         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",mumps->id.RINFOG(3));CHKERRQ(ierr);
1344         ierr = PetscViewerASCIIPrintf(viewer,"  (RINFOG(12) RINFOG(13))*2^INFOG(34) (determinant): (%g,%g)*(2^%d)\n",mumps->id.RINFOG(12),mumps->id.RINFOG(13),mumps->id.INFOG(34));CHKERRQ(ierr);
1345 
1346         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(3));CHKERRQ(ierr);
1347         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(4));CHKERRQ(ierr);
1348         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(5) (estimated maximum front size in the complete tree): %d \n",mumps->id.INFOG(5));CHKERRQ(ierr);
1349         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(6) (number of nodes in the complete tree): %d \n",mumps->id.INFOG(6));CHKERRQ(ierr);
1350         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(7) (ordering option effectively use after analysis): %d \n",mumps->id.INFOG(7));CHKERRQ(ierr);
1351         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",mumps->id.INFOG(8));CHKERRQ(ierr);
1352         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",mumps->id.INFOG(9));CHKERRQ(ierr);
1353         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(10) (total integer space store the matrix factors after factorization): %d \n",mumps->id.INFOG(10));CHKERRQ(ierr);
1354         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(11) (order of largest frontal matrix after factorization): %d \n",mumps->id.INFOG(11));CHKERRQ(ierr);
1355         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(12) (number of off-diagonal pivots): %d \n",mumps->id.INFOG(12));CHKERRQ(ierr);
1356         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(13) (number of delayed pivots after factorization): %d \n",mumps->id.INFOG(13));CHKERRQ(ierr);
1357         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(14) (number of memory compress after factorization): %d \n",mumps->id.INFOG(14));CHKERRQ(ierr);
1358         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(15) (number of steps of iterative refinement after solution): %d \n",mumps->id.INFOG(15));CHKERRQ(ierr);
1359         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",mumps->id.INFOG(16));CHKERRQ(ierr);
1360         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",mumps->id.INFOG(17));CHKERRQ(ierr);
1361         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",mumps->id.INFOG(18));CHKERRQ(ierr);
1362         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",mumps->id.INFOG(19));CHKERRQ(ierr);
1363         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(20) (estimated number of entries in the factors): %d \n",mumps->id.INFOG(20));CHKERRQ(ierr);
1364         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",mumps->id.INFOG(21));CHKERRQ(ierr);
1365         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",mumps->id.INFOG(22));CHKERRQ(ierr);
1366         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",mumps->id.INFOG(23));CHKERRQ(ierr);
1367         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",mumps->id.INFOG(24));CHKERRQ(ierr);
1368         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",mumps->id.INFOG(25));CHKERRQ(ierr);
1369         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(28) (after factorization: number of null pivots encountered): %d\n",mumps->id.INFOG(28));CHKERRQ(ierr);
1370         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(29) (after factorization: effective number of entries in the factors (sum over all processors)): %d\n",mumps->id.INFOG(29));CHKERRQ(ierr);
1371         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(30, 31) (after solution: size in Mbytes of memory used during solution phase): %d, %d\n",mumps->id.INFOG(30),mumps->id.INFOG(31));CHKERRQ(ierr);
1372         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(32) (after analysis: type of analysis done): %d\n",mumps->id.INFOG(32));CHKERRQ(ierr);
1373         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(33) (value used for ICNTL(8)): %d\n",mumps->id.INFOG(33));CHKERRQ(ierr);
1374         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(34) (exponent of the determinant if determinant is requested): %d\n",mumps->id.INFOG(34));CHKERRQ(ierr);
1375       }
1376     }
1377   }
1378   PetscFunctionReturn(0);
1379 }
1380 
1381 #undef __FUNCT__
1382 #define __FUNCT__ "MatGetInfo_MUMPS"
1383 PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info)
1384 {
1385   Mat_MUMPS *mumps =(Mat_MUMPS*)A->spptr;
1386 
1387   PetscFunctionBegin;
1388   info->block_size        = 1.0;
1389   info->nz_allocated      = mumps->id.INFOG(20);
1390   info->nz_used           = mumps->id.INFOG(20);
1391   info->nz_unneeded       = 0.0;
1392   info->assemblies        = 0.0;
1393   info->mallocs           = 0.0;
1394   info->memory            = 0.0;
1395   info->fill_ratio_given  = 0;
1396   info->fill_ratio_needed = 0;
1397   info->factor_mallocs    = 0;
1398   PetscFunctionReturn(0);
1399 }
1400 
1401 /* -------------------------------------------------------------------------------------------*/
1402 #undef __FUNCT__
1403 #define __FUNCT__ "MatMumpsSetSchurIndices_MUMPS"
1404 PetscErrorCode MatMumpsSetSchurIndices_MUMPS(Mat F,PetscInt size,PetscInt idxs[])
1405 {
1406   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1407   PetscErrorCode ierr;
1408 
1409   PetscFunctionBegin;
1410   if (mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MUMPS parallel computation of Schur complements not yet supported from PETSc\n");
1411   if (mumps->id.size_schur != size) {
1412     ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr);
1413     mumps->id.size_schur = size;
1414     mumps->id.schur_lld = size;
1415     ierr = PetscMalloc2(size,&mumps->id.listvar_schur,size*size,&mumps->id.schur);CHKERRQ(ierr);
1416   }
1417   ierr = PetscMemcpy(mumps->id.listvar_schur,idxs,size*sizeof(PetscInt));CHKERRQ(ierr);
1418   if (F->factortype == MAT_FACTOR_LU) {
1419     mumps->id.ICNTL(19) = 3; /* return full matrix */
1420   } else {
1421     mumps->id.ICNTL(19) = 2; /* return lower triangular part */
1422   }
1423   PetscFunctionReturn(0);
1424 }
1425 
1426 #undef __FUNCT__
1427 #define __FUNCT__ "MatMumpsSetSchurIndices"
1428 /*@
1429   MatMumpsSetSchurIndices - Set indices defining the Schur complement that MUMPS will compute during the factorization steps
1430 
1431    Logically Collective on Mat
1432 
1433    Input Parameters:
1434 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1435 .  size - size of the Schur complement indices
1436 -  idxs[] - array of Schur complement indices
1437 
1438    Notes:
1439    The user has to free the array idxs[] since it is copied by the routine.
1440    Currently implemented for sequential matrices
1441 
1442    Level: advanced
1443 
1444    References: MUMPS Users' Guide
1445 
1446 .seealso: MatGetFactor()
1447 @*/
1448 PetscErrorCode MatMumpsSetSchurIndices(Mat F,PetscInt size,PetscInt idxs[])
1449 {
1450   PetscErrorCode ierr;
1451 
1452   PetscFunctionBegin;
1453   PetscValidIntPointer(idxs,3);
1454   ierr = PetscTryMethod(F,"MatMumpsSetSchurIndices_C",(Mat,PetscInt,PetscInt[]),(F,size,idxs));CHKERRQ(ierr);
1455   PetscFunctionReturn(0);
1456 }
1457 /* -------------------------------------------------------------------------------------------*/
1458 #undef __FUNCT__
1459 #define __FUNCT__ "MatMumpsGetSchurComplement_MUMPS"
1460 PetscErrorCode MatMumpsGetSchurComplement_MUMPS(Mat F,Mat* S)
1461 {
1462   Mat            St;
1463   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1464   PetscScalar    *array;
1465 #if defined(PETSC_USE_COMPLEX)
1466   PetscScalar    im = PetscSqrtScalar(-1.0);
1467 #endif
1468   PetscErrorCode ierr;
1469 
1470   PetscFunctionBegin;
1471   if (mumps->id.job != JOB_FACTNUMERIC) {
1472     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONG,"Numerical factorization phase not yet performed! You should call MatFactorSymbolic/Numeric before");
1473   } else if (mumps->id.size_schur == 0) {
1474     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONG,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
1475   }
1476 
1477   ierr = MatCreate(PetscObjectComm((PetscObject)F),&St);CHKERRQ(ierr);
1478   ierr = MatSetSizes(St,PETSC_DECIDE,PETSC_DECIDE,mumps->id.size_schur,mumps->id.size_schur);CHKERRQ(ierr);
1479   ierr = MatSetType(St,MATDENSE);CHKERRQ(ierr);
1480   ierr = MatSetUp(St);CHKERRQ(ierr);
1481   ierr = MatDenseGetArray(St,&array);CHKERRQ(ierr);
1482   if (mumps->sym == 0) { /* MUMPS always return a full matrix */
1483     if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
1484       PetscInt i,j,N=mumps->id.size_schur;
1485       for (i=0;i<N;i++) {
1486         for (j=0;j<N;j++) {
1487 #if !defined(PETSC_USE_COMPLEX)
1488           PetscScalar val = mumps->id.schur[i*N+j];
1489 #else
1490           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1491 #endif
1492           array[j*N+i] = val;
1493         }
1494       }
1495     } else { /* stored by columns */
1496       ierr = PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));CHKERRQ(ierr);
1497     }
1498   } else { /* either full or lower-triangular (not packed) */
1499     if (mumps->id.ICNTL(19) == 2) { /* lower triangular stored by columns */
1500       PetscInt i,j,N=mumps->id.size_schur;
1501       for (i=0;i<N;i++) {
1502         for (j=i;j<N;j++) {
1503 #if !defined(PETSC_USE_COMPLEX)
1504           PetscScalar val = mumps->id.schur[i*N+j];
1505 #else
1506           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1507 #endif
1508           array[i*N+j] = val;
1509         }
1510         for (j=i;j<N;j++) {
1511 #if !defined(PETSC_USE_COMPLEX)
1512           PetscScalar val = mumps->id.schur[i*N+j];
1513 #else
1514           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1515 #endif
1516           array[j*N+i] = val;
1517         }
1518       }
1519     } else if (mumps->id.ICNTL(19) == 3) { /* full matrix */
1520       ierr = PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));CHKERRQ(ierr);
1521     } else { /* ICNTL(19) == 1 lower triangular stored by rows */
1522       PetscInt i,j,N=mumps->id.size_schur;
1523       for (i=0;i<N;i++) {
1524         for (j=0;j<i+1;j++) {
1525 #if !defined(PETSC_USE_COMPLEX)
1526           PetscScalar val = mumps->id.schur[i*N+j];
1527 #else
1528           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1529 #endif
1530           array[i*N+j] = val;
1531         }
1532         for (j=0;j<i+1;j++) {
1533 #if !defined(PETSC_USE_COMPLEX)
1534           PetscScalar val = mumps->id.schur[i*N+j];
1535 #else
1536           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1537 #endif
1538           array[j*N+i] = val;
1539         }
1540       }
1541     }
1542   }
1543   ierr = MatDenseRestoreArray(St,&array);CHKERRQ(ierr);
1544   *S = St;
1545   PetscFunctionReturn(0);
1546 }
1547 
1548 #undef __FUNCT__
1549 #define __FUNCT__ "MatMumpsGetSchurComplement"
1550 /*@
1551   MatMumpsGetSchurComplement - Get Schur complement matrix computed by MUMPS during the factorization step
1552 
1553    Logically Collective on Mat
1554 
1555    Input Parameters:
1556 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1557 .  *S - location where to return the Schur complement (MATDENSE)
1558 
1559    Notes:
1560    Currently implemented for sequential matrices
1561 
1562    Level: advanced
1563 
1564    References: MUMPS Users' Guide
1565 
1566 .seealso: MatGetFactor()
1567 @*/
1568 PetscErrorCode MatMumpsGetSchurComplement(Mat F,Mat* S)
1569 {
1570   PetscErrorCode ierr;
1571 
1572   PetscFunctionBegin;
1573   ierr = PetscTryMethod(F,"MatMumpsGetSchurComplement_C",(Mat,Mat*),(F,S));CHKERRQ(ierr);
1574   PetscFunctionReturn(0);
1575 }
1576 
1577 /* -------------------------------------------------------------------------------------------*/
1578 #undef __FUNCT__
1579 #define __FUNCT__ "MatMumpsSetIcntl_MUMPS"
1580 PetscErrorCode MatMumpsSetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt ival)
1581 {
1582   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1583 
1584   PetscFunctionBegin;
1585   mumps->id.ICNTL(icntl) = ival;
1586   PetscFunctionReturn(0);
1587 }
1588 
1589 #undef __FUNCT__
1590 #define __FUNCT__ "MatMumpsGetIcntl_MUMPS"
1591 PetscErrorCode MatMumpsGetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt *ival)
1592 {
1593   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1594 
1595   PetscFunctionBegin;
1596   *ival = mumps->id.ICNTL(icntl);
1597   PetscFunctionReturn(0);
1598 }
1599 
1600 #undef __FUNCT__
1601 #define __FUNCT__ "MatMumpsSetIcntl"
1602 /*@
1603   MatMumpsSetIcntl - Set MUMPS parameter ICNTL()
1604 
1605    Logically Collective on Mat
1606 
1607    Input Parameters:
1608 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1609 .  icntl - index of MUMPS parameter array ICNTL()
1610 -  ival - value of MUMPS ICNTL(icntl)
1611 
1612   Options Database:
1613 .   -mat_mumps_icntl_<icntl> <ival>
1614 
1615    Level: beginner
1616 
1617    References: MUMPS Users' Guide
1618 
1619 .seealso: MatGetFactor()
1620 @*/
1621 PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt icntl,PetscInt ival)
1622 {
1623   PetscErrorCode ierr;
1624 
1625   PetscFunctionBegin;
1626   PetscValidLogicalCollectiveInt(F,icntl,2);
1627   PetscValidLogicalCollectiveInt(F,ival,3);
1628   ierr = PetscTryMethod(F,"MatMumpsSetIcntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));CHKERRQ(ierr);
1629   PetscFunctionReturn(0);
1630 }
1631 
1632 #undef __FUNCT__
1633 #define __FUNCT__ "MatMumpsGetIcntl"
1634 /*@
1635   MatMumpsGetIcntl - Get MUMPS parameter ICNTL()
1636 
1637    Logically Collective on Mat
1638 
1639    Input Parameters:
1640 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1641 -  icntl - index of MUMPS parameter array ICNTL()
1642 
1643   Output Parameter:
1644 .  ival - value of MUMPS ICNTL(icntl)
1645 
1646    Level: beginner
1647 
1648    References: MUMPS Users' Guide
1649 
1650 .seealso: MatGetFactor()
1651 @*/
1652 PetscErrorCode MatMumpsGetIcntl(Mat F,PetscInt icntl,PetscInt *ival)
1653 {
1654   PetscErrorCode ierr;
1655 
1656   PetscFunctionBegin;
1657   PetscValidLogicalCollectiveInt(F,icntl,2);
1658   PetscValidIntPointer(ival,3);
1659   ierr = PetscTryMethod(F,"MatMumpsGetIcntl_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
1660   PetscFunctionReturn(0);
1661 }
1662 
1663 /* -------------------------------------------------------------------------------------------*/
1664 #undef __FUNCT__
1665 #define __FUNCT__ "MatMumpsSetCntl_MUMPS"
1666 PetscErrorCode MatMumpsSetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal val)
1667 {
1668   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1669 
1670   PetscFunctionBegin;
1671   mumps->id.CNTL(icntl) = val;
1672   PetscFunctionReturn(0);
1673 }
1674 
1675 #undef __FUNCT__
1676 #define __FUNCT__ "MatMumpsGetCntl_MUMPS"
1677 PetscErrorCode MatMumpsGetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal *val)
1678 {
1679   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1680 
1681   PetscFunctionBegin;
1682   *val = mumps->id.CNTL(icntl);
1683   PetscFunctionReturn(0);
1684 }
1685 
1686 #undef __FUNCT__
1687 #define __FUNCT__ "MatMumpsSetCntl"
1688 /*@
1689   MatMumpsSetCntl - Set MUMPS parameter CNTL()
1690 
1691    Logically Collective on Mat
1692 
1693    Input Parameters:
1694 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1695 .  icntl - index of MUMPS parameter array CNTL()
1696 -  val - value of MUMPS CNTL(icntl)
1697 
1698   Options Database:
1699 .   -mat_mumps_cntl_<icntl> <val>
1700 
1701    Level: beginner
1702 
1703    References: MUMPS Users' Guide
1704 
1705 .seealso: MatGetFactor()
1706 @*/
1707 PetscErrorCode MatMumpsSetCntl(Mat F,PetscInt icntl,PetscReal val)
1708 {
1709   PetscErrorCode ierr;
1710 
1711   PetscFunctionBegin;
1712   PetscValidLogicalCollectiveInt(F,icntl,2);
1713   PetscValidLogicalCollectiveReal(F,val,3);
1714   ierr = PetscTryMethod(F,"MatMumpsSetCntl_C",(Mat,PetscInt,PetscReal),(F,icntl,val));CHKERRQ(ierr);
1715   PetscFunctionReturn(0);
1716 }
1717 
1718 #undef __FUNCT__
1719 #define __FUNCT__ "MatMumpsGetCntl"
1720 /*@
1721   MatMumpsGetCntl - Get MUMPS parameter CNTL()
1722 
1723    Logically Collective on Mat
1724 
1725    Input Parameters:
1726 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1727 -  icntl - index of MUMPS parameter array CNTL()
1728 
1729   Output Parameter:
1730 .  val - value of MUMPS CNTL(icntl)
1731 
1732    Level: beginner
1733 
1734    References: MUMPS Users' Guide
1735 
1736 .seealso: MatGetFactor()
1737 @*/
1738 PetscErrorCode MatMumpsGetCntl(Mat F,PetscInt icntl,PetscReal *val)
1739 {
1740   PetscErrorCode ierr;
1741 
1742   PetscFunctionBegin;
1743   PetscValidLogicalCollectiveInt(F,icntl,2);
1744   PetscValidRealPointer(val,3);
1745   ierr = PetscTryMethod(F,"MatMumpsGetCntl_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
1746   PetscFunctionReturn(0);
1747 }
1748 
1749 #undef __FUNCT__
1750 #define __FUNCT__ "MatMumpsGetInfo_MUMPS"
1751 PetscErrorCode MatMumpsGetInfo_MUMPS(Mat F,PetscInt icntl,PetscInt *info)
1752 {
1753   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1754 
1755   PetscFunctionBegin;
1756   *info = mumps->id.INFO(icntl);
1757   PetscFunctionReturn(0);
1758 }
1759 
1760 #undef __FUNCT__
1761 #define __FUNCT__ "MatMumpsGetInfog_MUMPS"
1762 PetscErrorCode MatMumpsGetInfog_MUMPS(Mat F,PetscInt icntl,PetscInt *infog)
1763 {
1764   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1765 
1766   PetscFunctionBegin;
1767   *infog = mumps->id.INFOG(icntl);
1768   PetscFunctionReturn(0);
1769 }
1770 
1771 #undef __FUNCT__
1772 #define __FUNCT__ "MatMumpsGetRinfo_MUMPS"
1773 PetscErrorCode MatMumpsGetRinfo_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfo)
1774 {
1775   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1776 
1777   PetscFunctionBegin;
1778   *rinfo = mumps->id.RINFO(icntl);
1779   PetscFunctionReturn(0);
1780 }
1781 
1782 #undef __FUNCT__
1783 #define __FUNCT__ "MatMumpsGetRinfog_MUMPS"
1784 PetscErrorCode MatMumpsGetRinfog_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfog)
1785 {
1786   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1787 
1788   PetscFunctionBegin;
1789   *rinfog = mumps->id.RINFOG(icntl);
1790   PetscFunctionReturn(0);
1791 }
1792 
1793 #undef __FUNCT__
1794 #define __FUNCT__ "MatMumpsGetInfo"
1795 /*@
1796   MatMumpsGetInfo - Get MUMPS parameter INFO()
1797 
1798    Logically Collective on Mat
1799 
1800    Input Parameters:
1801 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1802 -  icntl - index of MUMPS parameter array INFO()
1803 
1804   Output Parameter:
1805 .  ival - value of MUMPS INFO(icntl)
1806 
1807    Level: beginner
1808 
1809    References: MUMPS Users' Guide
1810 
1811 .seealso: MatGetFactor()
1812 @*/
1813 PetscErrorCode MatMumpsGetInfo(Mat F,PetscInt icntl,PetscInt *ival)
1814 {
1815   PetscErrorCode ierr;
1816 
1817   PetscFunctionBegin;
1818   PetscValidIntPointer(ival,3);
1819   ierr = PetscTryMethod(F,"MatMumpsGetInfo_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
1820   PetscFunctionReturn(0);
1821 }
1822 
1823 #undef __FUNCT__
1824 #define __FUNCT__ "MatMumpsGetInfog"
1825 /*@
1826   MatMumpsGetInfog - Get MUMPS parameter INFOG()
1827 
1828    Logically Collective on Mat
1829 
1830    Input Parameters:
1831 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1832 -  icntl - index of MUMPS parameter array INFOG()
1833 
1834   Output Parameter:
1835 .  ival - value of MUMPS INFOG(icntl)
1836 
1837    Level: beginner
1838 
1839    References: MUMPS Users' Guide
1840 
1841 .seealso: MatGetFactor()
1842 @*/
1843 PetscErrorCode MatMumpsGetInfog(Mat F,PetscInt icntl,PetscInt *ival)
1844 {
1845   PetscErrorCode ierr;
1846 
1847   PetscFunctionBegin;
1848   PetscValidIntPointer(ival,3);
1849   ierr = PetscTryMethod(F,"MatMumpsGetInfog_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
1850   PetscFunctionReturn(0);
1851 }
1852 
1853 #undef __FUNCT__
1854 #define __FUNCT__ "MatMumpsGetRinfo"
1855 /*@
1856   MatMumpsGetRinfo - Get MUMPS parameter RINFO()
1857 
1858    Logically Collective on Mat
1859 
1860    Input Parameters:
1861 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1862 -  icntl - index of MUMPS parameter array RINFO()
1863 
1864   Output Parameter:
1865 .  val - value of MUMPS RINFO(icntl)
1866 
1867    Level: beginner
1868 
1869    References: MUMPS Users' Guide
1870 
1871 .seealso: MatGetFactor()
1872 @*/
1873 PetscErrorCode MatMumpsGetRinfo(Mat F,PetscInt icntl,PetscReal *val)
1874 {
1875   PetscErrorCode ierr;
1876 
1877   PetscFunctionBegin;
1878   PetscValidRealPointer(val,3);
1879   ierr = PetscTryMethod(F,"MatMumpsGetRinfo_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
1880   PetscFunctionReturn(0);
1881 }
1882 
1883 #undef __FUNCT__
1884 #define __FUNCT__ "MatMumpsGetRinfog"
1885 /*@
1886   MatMumpsGetRinfog - Get MUMPS parameter RINFOG()
1887 
1888    Logically Collective on Mat
1889 
1890    Input Parameters:
1891 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1892 -  icntl - index of MUMPS parameter array RINFOG()
1893 
1894   Output Parameter:
1895 .  val - value of MUMPS RINFOG(icntl)
1896 
1897    Level: beginner
1898 
1899    References: MUMPS Users' Guide
1900 
1901 .seealso: MatGetFactor()
1902 @*/
1903 PetscErrorCode MatMumpsGetRinfog(Mat F,PetscInt icntl,PetscReal *val)
1904 {
1905   PetscErrorCode ierr;
1906 
1907   PetscFunctionBegin;
1908   PetscValidRealPointer(val,3);
1909   ierr = PetscTryMethod(F,"MatMumpsGetRinfog_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
1910   PetscFunctionReturn(0);
1911 }
1912 
1913 /*MC
1914   MATSOLVERMUMPS -  A matrix type providing direct solvers (LU and Cholesky) for
1915   distributed and sequential matrices via the external package MUMPS.
1916 
1917   Works with MATAIJ and MATSBAIJ matrices
1918 
1919   Options Database Keys:
1920 +  -mat_mumps_icntl_1 <6>: ICNTL(1): output stream for error messages (None)
1921 .  -mat_mumps_icntl_2 <0>: ICNTL(2): output stream for diagnostic printing, statistics, and warning (None)
1922 .  -mat_mumps_icntl_3 <0>: ICNTL(3): output stream for global information, collected on the host (None)
1923 .  -mat_mumps_icntl_4 <0>: ICNTL(4): level of printing (0 to 4) (None)
1924 .  -mat_mumps_icntl_6 <7>: ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7) (None)
1925 .  -mat_mumps_icntl_7 <7>: ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis (None)
1926 .  -mat_mumps_icntl_8 <77>: ICNTL(8): scaling strategy (-2 to 8 or 77) (None)
1927 .  -mat_mumps_icntl_10 <0>: ICNTL(10): max num of refinements (None)
1928 .  -mat_mumps_icntl_11 <0>: ICNTL(11): statistics related to an error analysis (via -ksp_view) (None)
1929 .  -mat_mumps_icntl_12 <1>: ICNTL(12): an ordering strategy for symmetric matrices (0 to 3) (None)
1930 .  -mat_mumps_icntl_13 <0>: ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting (None)
1931 .  -mat_mumps_icntl_14 <20>: ICNTL(14): percentage increase in the estimated working space (None)
1932 .  -mat_mumps_icntl_19 <0>: ICNTL(19): computes the Schur complement (None)
1933 .  -mat_mumps_icntl_22 <0>: ICNTL(22): in-core/out-of-core factorization and solve (0 or 1) (None)
1934 .  -mat_mumps_icntl_23 <0>: ICNTL(23): max size of the working memory (MB) that can allocate per processor (None)
1935 .  -mat_mumps_icntl_24 <0>: ICNTL(24): detection of null pivot rows (0 or 1) (None)
1936 .  -mat_mumps_icntl_25 <0>: ICNTL(25): compute a solution of a deficient matrix and a null space basis (None)
1937 .  -mat_mumps_icntl_26 <0>: ICNTL(26): drives the solution phase if a Schur complement matrix (None)
1938 .  -mat_mumps_icntl_28 <1>: ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering (None)
1939 .  -mat_mumps_icntl_29 <0>: ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis (None)
1940 .  -mat_mumps_icntl_30 <0>: ICNTL(30): compute user-specified set of entries in inv(A) (None)
1941 .  -mat_mumps_icntl_31 <0>: ICNTL(31): indicates which factors may be discarded during factorization (None)
1942 .  -mat_mumps_icntl_33 <0>: ICNTL(33): compute determinant (None)
1943 .  -mat_mumps_cntl_1 <0.01>: CNTL(1): relative pivoting threshold (None)
1944 .  -mat_mumps_cntl_2 <1.49012e-08>: CNTL(2): stopping criterion of refinement (None)
1945 .  -mat_mumps_cntl_3 <0>: CNTL(3): absolute pivoting threshold (None)
1946 .  -mat_mumps_cntl_4 <-1>: CNTL(4): value for static pivoting (None)
1947 -  -mat_mumps_cntl_5 <0>: CNTL(5): fixation for null pivots (None)
1948 
1949   Level: beginner
1950 
1951 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
1952 
1953 M*/
1954 
1955 #undef __FUNCT__
1956 #define __FUNCT__ "MatFactorGetSolverPackage_mumps"
1957 static PetscErrorCode MatFactorGetSolverPackage_mumps(Mat A,const MatSolverPackage *type)
1958 {
1959   PetscFunctionBegin;
1960   *type = MATSOLVERMUMPS;
1961   PetscFunctionReturn(0);
1962 }
1963 
1964 /* MatGetFactor for Seq and MPI AIJ matrices */
1965 #undef __FUNCT__
1966 #define __FUNCT__ "MatGetFactor_aij_mumps"
1967 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F)
1968 {
1969   Mat            B;
1970   PetscErrorCode ierr;
1971   Mat_MUMPS      *mumps;
1972   PetscBool      isSeqAIJ;
1973 
1974   PetscFunctionBegin;
1975   /* Create the factorization matrix */
1976   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr);
1977   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1978   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1979   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1980   if (isSeqAIJ) {
1981     ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
1982   } else {
1983     ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
1984   }
1985 
1986   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
1987 
1988   B->ops->view        = MatView_MUMPS;
1989   B->ops->getinfo     = MatGetInfo_MUMPS;
1990   B->ops->getdiagonal = MatGetDiagonal_MUMPS;
1991 
1992   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1993   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
1994   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
1995   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
1996   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
1997 
1998   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
1999   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2000   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2001   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2002 
2003   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetSchurIndices_C",MatMumpsSetSchurIndices_MUMPS);CHKERRQ(ierr);
2004   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetSchurComplement_C",MatMumpsGetSchurComplement_MUMPS);CHKERRQ(ierr);
2005   if (ftype == MAT_FACTOR_LU) {
2006     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
2007     B->factortype            = MAT_FACTOR_LU;
2008     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij;
2009     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij;
2010     mumps->sym = 0;
2011   } else {
2012     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
2013     B->factortype                  = MAT_FACTOR_CHOLESKY;
2014     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij;
2015     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij;
2016     if (A->spd_set && A->spd) mumps->sym = 1;
2017     else                      mumps->sym = 2;
2018   }
2019 
2020   mumps->isAIJ    = PETSC_TRUE;
2021   mumps->Destroy  = B->ops->destroy;
2022   B->ops->destroy = MatDestroy_MUMPS;
2023   B->spptr        = (void*)mumps;
2024 
2025   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2026 
2027   *F = B;
2028   PetscFunctionReturn(0);
2029 }
2030 
2031 /* MatGetFactor for Seq and MPI SBAIJ matrices */
2032 #undef __FUNCT__
2033 #define __FUNCT__ "MatGetFactor_sbaij_mumps"
2034 PETSC_EXTERN PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
2035 {
2036   Mat            B;
2037   PetscErrorCode ierr;
2038   Mat_MUMPS      *mumps;
2039   PetscBool      isSeqSBAIJ;
2040 
2041   PetscFunctionBegin;
2042   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
2043   if (A->rmap->bs > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead");
2044   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr);
2045   /* Create the factorization matrix */
2046   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2047   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2048   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
2049   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2050   if (isSeqSBAIJ) {
2051     ierr = MatSeqSBAIJSetPreallocation(B,1,0,NULL);CHKERRQ(ierr);
2052 
2053     mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij;
2054   } else {
2055     ierr = MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);CHKERRQ(ierr);
2056 
2057     mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij;
2058   }
2059 
2060   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
2061   B->ops->view                   = MatView_MUMPS;
2062   B->ops->getdiagonal            = MatGetDiagonal_MUMPS;
2063 
2064   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
2065   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2066   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2067   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2068   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2069 
2070   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2071   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2072   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2073   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2074 
2075   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetSchurIndices_C",MatMumpsSetSchurIndices_MUMPS);CHKERRQ(ierr);
2076   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetSchurComplement_C",MatMumpsGetSchurComplement_MUMPS);CHKERRQ(ierr);
2077 
2078   B->factortype = MAT_FACTOR_CHOLESKY;
2079   if (A->spd_set && A->spd) mumps->sym = 1;
2080   else                      mumps->sym = 2;
2081 
2082   mumps->isAIJ    = PETSC_FALSE;
2083   mumps->Destroy  = B->ops->destroy;
2084   B->ops->destroy = MatDestroy_MUMPS;
2085   B->spptr        = (void*)mumps;
2086 
2087   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2088 
2089   *F = B;
2090   PetscFunctionReturn(0);
2091 }
2092 
2093 #undef __FUNCT__
2094 #define __FUNCT__ "MatGetFactor_baij_mumps"
2095 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F)
2096 {
2097   Mat            B;
2098   PetscErrorCode ierr;
2099   Mat_MUMPS      *mumps;
2100   PetscBool      isSeqBAIJ;
2101 
2102   PetscFunctionBegin;
2103   /* Create the factorization matrix */
2104   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);CHKERRQ(ierr);
2105   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2106   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2107   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
2108   if (isSeqBAIJ) {
2109     ierr = MatSeqBAIJSetPreallocation(B,A->rmap->bs,0,NULL);CHKERRQ(ierr);
2110   } else {
2111     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
2112   }
2113 
2114   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2115   if (ftype == MAT_FACTOR_LU) {
2116     B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS;
2117     B->factortype            = MAT_FACTOR_LU;
2118     if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij;
2119     else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij;
2120     mumps->sym = 0;
2121   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n");
2122 
2123   B->ops->view        = MatView_MUMPS;
2124   B->ops->getdiagonal = MatGetDiagonal_MUMPS;
2125 
2126   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
2127   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2128   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2129   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2130   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2131 
2132   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2133   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2134   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2135   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2136 
2137   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetSchurIndices_C",MatMumpsSetSchurIndices_MUMPS);CHKERRQ(ierr);
2138   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetSchurComplement_C",MatMumpsGetSchurComplement_MUMPS);CHKERRQ(ierr);
2139 
2140   mumps->isAIJ    = PETSC_TRUE;
2141   mumps->Destroy  = B->ops->destroy;
2142   B->ops->destroy = MatDestroy_MUMPS;
2143   B->spptr        = (void*)mumps;
2144 
2145   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2146 
2147   *F = B;
2148   PetscFunctionReturn(0);
2149 }
2150 
2151 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat,MatFactorType,Mat*);
2152 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat,MatFactorType,Mat*);
2153 PETSC_EXTERN PetscErrorCode MatGetFactor_sbaij_mumps(Mat,MatFactorType,Mat*);
2154 
2155 #undef __FUNCT__
2156 #define __FUNCT__ "MatSolverPackageRegister_MUMPS"
2157 PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_MUMPS(void)
2158 {
2159   PetscErrorCode ierr;
2160 
2161   PetscFunctionBegin;
2162   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ,         MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2163   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ,         MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2164   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ,         MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2165   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ,         MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2166   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPISBAIJ,         MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr);
2167   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ,         MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2168   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ,         MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2169   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ,         MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2170   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ,         MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2171   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQSBAIJ,         MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr);
2172   PetscFunctionReturn(0);
2173 }
2174 
2175