xref: /petsc/src/mat/impls/aij/mpi/mumps/mumps.c (revision 7842634f46ad2ee292fa135afd07ef040c80768f)
1 #define PETSCMAT_DLL
2 
3 /*
4     Provides an interface to the MUMPS sparse solver
5 */
6 #include "../src/mat/impls/aij/seq/aij.h"  /*I  "petscmat.h"  I*/
7 #include "../src/mat/impls/aij/mpi/mpiaij.h"
8 #include "../src/mat/impls/sbaij/seq/sbaij.h"
9 #include "../src/mat/impls/sbaij/mpi/mpisbaij.h"
10 #include "../src/mat/impls/baij/seq/baij.h"
11 #include "../src/mat/impls/baij/mpi/mpibaij.h"
12 
13 EXTERN_C_BEGIN
14 #if defined(PETSC_USE_COMPLEX)
15 #include "zmumps_c.h"
16 #else
17 #include "dmumps_c.h"
18 #endif
19 EXTERN_C_END
20 #define JOB_INIT -1
21 #define JOB_END -2
22 /* macros s.t. indices match MUMPS documentation */
23 #define ICNTL(I) icntl[(I)-1]
24 #define CNTL(I) cntl[(I)-1]
25 #define INFOG(I) infog[(I)-1]
26 #define INFO(I) info[(I)-1]
27 #define RINFOG(I) rinfog[(I)-1]
28 #define RINFO(I) rinfo[(I)-1]
29 
30 typedef struct {
31 #if defined(PETSC_USE_COMPLEX)
32   ZMUMPS_STRUC_C id;
33 #else
34   DMUMPS_STRUC_C id;
35 #endif
36   MatStructure   matstruc;
37   PetscMPIInt    myid,size;
38   PetscInt       *irn,*jcn,nz,sym,nSolve;
39   PetscScalar    *val;
40   MPI_Comm       comm_mumps;
41   VecScatter     scat_rhs, scat_sol;
42   PetscTruth     isAIJ,CleanUpMUMPS,mumpsview;
43   Vec            b_seq,x_seq;
44   PetscErrorCode (*MatDestroy)(Mat);
45   PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**);
46 } Mat_MUMPS;
47 
48 EXTERN PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*);
49 
50 
51 /* MatConvertToTriples_A_B */
52 /*convert Petsc matrix to triples: row[nz], col[nz], val[nz] */
53 /*
54   input:
55     A       - matrix in aij,baij or sbaij (bs=1) format
56     shift   - 0: C style output triple; 1: Fortran style output triple.
57     reuse   - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple
58               MAT_REUSE_MATRIX:   only the values in v array are updated
59   output:
60     nnz     - dim of r, c, and v (number of local nonzero entries of A)
61     r, c, v - row and col index, matrix values (matrix triples)
62  */
63 
64 #undef __FUNCT__
65 #define __FUNCT__ "MatConvertToTriples_seqaij_seqaij"
66 PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
67 {
68   const PetscInt   *ai,*aj,*ajj,M=A->rmap->n;
69   PetscInt         nz,rnz,i,j;
70   PetscErrorCode   ierr;
71   PetscInt         *row,*col;
72   Mat_SeqAIJ       *aa=(Mat_SeqAIJ*)A->data;
73 
74   PetscFunctionBegin;
75   *v=aa->a;
76   if (reuse == MAT_INITIAL_MATRIX){
77     nz = aa->nz; ai = aa->i; aj = aa->j;
78     *nnz = nz;
79     ierr = PetscMalloc(2*nz*sizeof(PetscInt), &row);CHKERRQ(ierr);
80     col  = row + nz;
81 
82     nz = 0;
83     for(i=0; i<M; i++) {
84       rnz = ai[i+1] - ai[i];
85       ajj = aj + ai[i];
86       for(j=0; j<rnz; j++) {
87 	row[nz] = i+shift; col[nz++] = ajj[j] + shift;
88       }
89     }
90     *r = row; *c = col;
91   }
92   PetscFunctionReturn(0);
93 }
94 
95 #undef __FUNCT__
96 #define __FUNCT__ "MatConvertToTriples_seqbaij_seqaij"
97 PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
98 {
99   Mat_SeqBAIJ        *aa=(Mat_SeqBAIJ*)A->data;
100   const PetscInt     *ai,*aj,*ajj,bs=A->rmap->bs,bs2=aa->bs2,M=A->rmap->N/bs;
101   PetscInt           nz,idx=0,rnz,i,j,k,m,ii;
102   PetscErrorCode     ierr;
103   PetscInt           *row,*col;
104 
105   PetscFunctionBegin;
106   *v = aa->a;
107   if (reuse == MAT_INITIAL_MATRIX){
108     ai = aa->i; aj = aa->j;
109     nz = bs2*aa->nz;
110     *nnz = nz;
111     ierr = PetscMalloc(2*nz*sizeof(PetscInt), &row);CHKERRQ(ierr);
112     col  = row + nz;
113 
114     for(i=0; i<M; i++) {
115       ii = 0;
116       ajj = aj + ai[i];
117       rnz = ai[i+1] - ai[i];
118       for(k=0; k<rnz; k++) {
119 	for(j=0; j<bs; j++) {
120 	  for(m=0; m<bs; m++) {
121 	    row[idx]     = i*bs + m + shift;
122 	    col[idx++]   = bs*(ajj[k]) + j + shift;
123 	  }
124 	}
125       }
126     }
127     *r = row; *c = col;
128   }
129   PetscFunctionReturn(0);
130 }
131 
132 #undef __FUNCT__
133 #define __FUNCT__ "MatConvertToTriples_seqsbaij_seqsbaij"
134 PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
135 {
136   const PetscInt   *ai, *aj,*ajj,M=A->rmap->n;
137   PetscInt         nz,rnz,i,j;
138   PetscErrorCode   ierr;
139   PetscInt         *row,*col;
140   Mat_SeqSBAIJ     *aa=(Mat_SeqSBAIJ*)A->data;
141 
142   PetscFunctionBegin;
143   if (reuse == MAT_INITIAL_MATRIX){
144     nz = aa->nz;ai=aa->i; aj=aa->j;*v=aa->a;
145     *nnz = nz;
146     ierr = PetscMalloc(2*nz*sizeof(PetscInt), &row);CHKERRQ(ierr);
147     col  = row + nz;
148 
149     nz = 0;
150     for(i=0; i<M; i++) {
151       rnz = ai[i+1] - ai[i];
152       ajj = aj + ai[i];
153       for(j=0; j<rnz; j++) {
154 	row[nz] = i+shift; col[nz++] = ajj[j] + shift;
155       }
156     }
157     *r = row; *c = col;
158   }
159   PetscFunctionReturn(0);
160 }
161 
162 #undef __FUNCT__
163 #define __FUNCT__ "MatConvertToTriples_seqaij_seqsbaij"
164 PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
165 {
166   const PetscInt     *ai,*aj,*ajj,*adiag,M=A->rmap->n;
167   PetscInt           nz,rnz,i,j;
168   const PetscScalar  *av,*v1;
169   PetscScalar        *val;
170   PetscErrorCode     ierr;
171   PetscInt           *row,*col;
172   Mat_SeqSBAIJ       *aa=(Mat_SeqSBAIJ*)A->data;
173 
174   PetscFunctionBegin;
175   ai=aa->i; aj=aa->j;av=aa->a;
176   adiag=aa->diag;
177   if (reuse == MAT_INITIAL_MATRIX){
178     nz = M + (aa->nz-M)/2;
179     *nnz = nz;
180     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
181     col  = row + nz;
182     val  = (PetscScalar*)(col + nz);
183 
184     nz = 0;
185     for(i=0; i<M; i++) {
186       rnz = ai[i+1] - adiag[i];
187       ajj  = aj + adiag[i];
188       v1   = av + adiag[i];
189       for(j=0; j<rnz; j++) {
190 	row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j];
191       }
192     }
193     *r = row; *c = col; *v = val;
194   } else {
195     nz = 0; val = *v;
196     for(i=0; i <M; i++) {
197       rnz = ai[i+1] - adiag[i];
198       ajj = aj + adiag[i];
199       v1  = av + adiag[i];
200       for(j=0; j<rnz; j++) {
201 	val[nz++] = v1[j];
202       }
203     }
204   }
205   PetscFunctionReturn(0);
206 }
207 
208 #undef __FUNCT__
209 #define __FUNCT__ "MatConvertToTriples_mpisbaij_mpisbaij"
210 PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
211 {
212   const PetscInt     *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
213   PetscErrorCode     ierr;
214   PetscInt           rstart,nz,i,j,jj,irow,countA,countB;
215   PetscInt           *row,*col;
216   const PetscScalar  *av, *bv,*v1,*v2;
217   PetscScalar        *val;
218   Mat_MPISBAIJ       *mat =  (Mat_MPISBAIJ*)A->data;
219   Mat_SeqSBAIJ       *aa=(Mat_SeqSBAIJ*)(mat->A)->data;
220   Mat_SeqBAIJ        *bb=(Mat_SeqBAIJ*)(mat->B)->data;
221 
222   PetscFunctionBegin;
223   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
224   garray = mat->garray;
225   av=aa->a; bv=bb->a;
226 
227   if (reuse == MAT_INITIAL_MATRIX){
228     nz = aa->nz + bb->nz;
229     *nnz = nz;
230     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
231     col  = row + nz;
232     val  = (PetscScalar*)(col + nz);
233 
234     *r = row; *c = col; *v = val;
235   } else {
236     row = *r; col = *c; val = *v;
237   }
238 
239   jj = 0; irow = rstart;
240   for ( i=0; i<m; i++ ) {
241     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
242     countA = ai[i+1] - ai[i];
243     countB = bi[i+1] - bi[i];
244     bjj    = bj + bi[i];
245     v1     = av + ai[i];
246     v2     = bv + bi[i];
247 
248     /* A-part */
249     for (j=0; j<countA; j++){
250       if (reuse == MAT_INITIAL_MATRIX) {
251         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
252       }
253       val[jj++] = v1[j];
254     }
255 
256     /* B-part */
257     for(j=0; j < countB; j++){
258       if (reuse == MAT_INITIAL_MATRIX) {
259 	row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
260       }
261       val[jj++] = v2[j];
262     }
263     irow++;
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 #undef __FUNCT__
269 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpiaij"
270 PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
271 {
272   const PetscInt     *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
273   PetscErrorCode     ierr;
274   PetscInt           rstart,nz,i,j,jj,irow,countA,countB;
275   PetscInt           *row,*col;
276   const PetscScalar  *av, *bv,*v1,*v2;
277   PetscScalar        *val;
278   Mat_MPIAIJ         *mat =  (Mat_MPIAIJ*)A->data;
279   Mat_SeqAIJ         *aa=(Mat_SeqAIJ*)(mat->A)->data;
280   Mat_SeqAIJ         *bb=(Mat_SeqAIJ*)(mat->B)->data;
281 
282   PetscFunctionBegin;
283   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
284   garray = mat->garray;
285   av=aa->a; bv=bb->a;
286 
287   if (reuse == MAT_INITIAL_MATRIX){
288     nz = aa->nz + bb->nz;
289     *nnz = nz;
290     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
291     col  = row + nz;
292     val  = (PetscScalar*)(col + nz);
293 
294     *r = row; *c = col; *v = val;
295   } else {
296     row = *r; col = *c; val = *v;
297   }
298 
299   jj = 0; irow = rstart;
300   for ( i=0; i<m; i++ ) {
301     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
302     countA = ai[i+1] - ai[i];
303     countB = bi[i+1] - bi[i];
304     bjj    = bj + bi[i];
305     v1     = av + ai[i];
306     v2     = bv + bi[i];
307 
308     /* A-part */
309     for (j=0; j<countA; j++){
310       if (reuse == MAT_INITIAL_MATRIX){
311         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
312       }
313       val[jj++] = v1[j];
314     }
315 
316     /* B-part */
317     for(j=0; j < countB; j++){
318       if (reuse == MAT_INITIAL_MATRIX){
319 	row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
320       }
321       val[jj++] = v2[j];
322     }
323     irow++;
324   }
325   PetscFunctionReturn(0);
326 }
327 
328 #undef __FUNCT__
329 #define __FUNCT__ "MatConvertToTriples_mpibaij_mpiaij"
330 PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
331 {
332   Mat_MPIBAIJ        *mat =  (Mat_MPIBAIJ*)A->data;
333   Mat_SeqBAIJ        *aa=(Mat_SeqBAIJ*)(mat->A)->data;
334   Mat_SeqBAIJ        *bb=(Mat_SeqBAIJ*)(mat->B)->data;
335   const PetscInt     *ai = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj;
336   const PetscInt     *garray = mat->garray,mbs=mat->mbs,rstartbs=mat->rstartbs;
337   const PetscInt     bs = A->rmap->bs,bs2=mat->bs2;
338   PetscErrorCode     ierr;
339   PetscInt           nz,i,j,k,n,jj,irow,countA,countB,idx;
340   PetscInt           *row,*col;
341   const PetscScalar  *av=aa->a, *bv=bb->a,*v1,*v2;
342   PetscScalar        *val;
343 
344   PetscFunctionBegin;
345 
346   if (reuse == MAT_INITIAL_MATRIX) {
347     nz = bs2*(aa->nz + bb->nz);
348     *nnz = nz;
349     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
350     col  = row + nz;
351     val  = (PetscScalar*)(col + nz);
352 
353     *r = row; *c = col; *v = val;
354   } else {
355     row = *r; col = *c; val = *v;
356   }
357 
358   jj = 0; irow = rstartbs;
359   for ( i=0; i<mbs; i++ ) {
360     countA = ai[i+1] - ai[i];
361     countB = bi[i+1] - bi[i];
362     ajj    = aj + ai[i];
363     bjj    = bj + bi[i];
364     v1     = av + bs2*ai[i];
365     v2     = bv + bs2*bi[i];
366 
367     idx = 0;
368     /* A-part */
369     for (k=0; k<countA; k++){
370       for (j=0; j<bs; j++) {
371 	for (n=0; n<bs; n++) {
372 	  if (reuse == MAT_INITIAL_MATRIX){
373 	    row[jj] = bs*irow + n + shift;
374 	    col[jj] = bs*(rstartbs + ajj[k]) + j + shift;
375 	  }
376 	  val[jj++] = v1[idx++];
377 	}
378       }
379     }
380 
381     idx = 0;
382     /* B-part */
383     for(k=0; k<countB; k++){
384       for (j=0; j<bs; j++) {
385 	for (n=0; n<bs; n++) {
386 	  if (reuse == MAT_INITIAL_MATRIX){
387 	    row[jj] = bs*irow + n + shift;
388 	    col[jj] = bs*(garray[bjj[k]]) + j + shift;
389 	  }
390 	  val[jj++] = bv[idx++];
391 	}
392       }
393     }
394     irow++;
395   }
396   PetscFunctionReturn(0);
397 }
398 
399 #undef __FUNCT__
400 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpisbaij"
401 PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
402 {
403   const PetscInt     *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
404   PetscErrorCode     ierr;
405   PetscInt           rstart,nz,nza,nzb_low,i,j,jj,irow,countA,countB;
406   PetscInt           *row,*col;
407   const PetscScalar  *av, *bv,*v1,*v2;
408   PetscScalar        *val;
409   Mat_MPIAIJ         *mat =  (Mat_MPIAIJ*)A->data;
410   Mat_SeqAIJ         *aa=(Mat_SeqAIJ*)(mat->A)->data;
411   Mat_SeqAIJ         *bb=(Mat_SeqAIJ*)(mat->B)->data;
412 
413   PetscFunctionBegin;
414   ai=aa->i; aj=aa->j; adiag=aa->diag;
415   bi=bb->i; bj=bb->j; garray = mat->garray;
416   av=aa->a; bv=bb->a;
417   rstart = A->rmap->rstart;
418 
419   if (reuse == MAT_INITIAL_MATRIX) {
420     nza = 0;nzb_low = 0;
421     for(i=0; i<m; i++){
422       nza     = nza + (ai[i+1] - adiag[i]);
423       countB  = bi[i+1] - bi[i];
424       bjj     = bj + bi[i];
425 
426       j = 0;
427       while(garray[bjj[j]] < rstart) {
428 	if(j == countB) break;
429 	j++;nzb_low++;
430       }
431     }
432     /* Total nz = nz for the upper triangular A part + nz for the 2nd B part */
433     nz = nza + (bb->nz - nzb_low);
434     *nnz = nz;
435     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
436     col  = row + nz;
437     val  = (PetscScalar*)(col + nz);
438 
439     *r = row; *c = col; *v = val;
440   } else {
441     row = *r; col = *c; val = *v;
442   }
443 
444   jj = 0; irow = rstart;
445   for ( i=0; i<m; i++ ) {
446     ajj    = aj + adiag[i];                 /* ptr to the beginning of the diagonal of this row */
447     v1     = av + adiag[i];
448     countA = ai[i+1] - adiag[i];
449     countB = bi[i+1] - bi[i];
450     bjj    = bj + bi[i];
451     v2     = bv + bi[i];
452 
453      /* A-part */
454     for (j=0; j<countA; j++){
455       if (reuse == MAT_INITIAL_MATRIX) {
456         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
457       }
458       val[jj++] = v1[j];
459     }
460 
461     /* B-part */
462     for(j=0; j < countB; j++){
463       if (garray[bjj[j]] > rstart) {
464 	if (reuse == MAT_INITIAL_MATRIX) {
465 	  row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
466 	}
467 	val[jj++] = v2[j];
468       }
469     }
470     irow++;
471   }
472   PetscFunctionReturn(0);
473 }
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "MatDestroy_MUMPS"
477 PetscErrorCode MatDestroy_MUMPS(Mat A)
478 {
479   Mat_MUMPS      *lu=(Mat_MUMPS*)A->spptr;
480   PetscErrorCode ierr;
481   PetscMPIInt    size=lu->size;
482 
483   PetscFunctionBegin;
484   if (lu->CleanUpMUMPS) {
485     /* Terminate instance, deallocate memories */
486     if (size > 1){
487       ierr = PetscFree2(lu->id.sol_loc,lu->id.isol_loc);CHKERRQ(ierr);
488       ierr = VecScatterDestroy(lu->scat_rhs);CHKERRQ(ierr);
489       ierr = VecDestroy(lu->b_seq);CHKERRQ(ierr);
490       if (lu->nSolve && lu->scat_sol){ierr = VecScatterDestroy(lu->scat_sol);CHKERRQ(ierr);}
491       if (lu->nSolve && lu->x_seq){ierr = VecDestroy(lu->x_seq);CHKERRQ(ierr);}
492     }
493     ierr = PetscFree(lu->irn);CHKERRQ(ierr);
494     lu->id.job=JOB_END;
495 #if defined(PETSC_USE_COMPLEX)
496     zmumps_c(&lu->id);
497 #else
498     dmumps_c(&lu->id);
499 #endif
500     ierr = MPI_Comm_free(&(lu->comm_mumps));CHKERRQ(ierr);
501   }
502   /* clear composed functions */
503   ierr = PetscObjectComposeFunctionDynamic((PetscObject)A,"MatFactorGetSolverPackage_C","",PETSC_NULL);CHKERRQ(ierr);
504   ierr = PetscObjectComposeFunctionDynamic((PetscObject)A,"MatSetMumpsIcntl_C","",PETSC_NULL);CHKERRQ(ierr);
505   ierr = (lu->MatDestroy)(A);CHKERRQ(ierr);
506   PetscFunctionReturn(0);
507 }
508 
509 #undef __FUNCT__
510 #define __FUNCT__ "MatSolve_MUMPS"
511 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x)
512 {
513   Mat_MUMPS      *lu=(Mat_MUMPS*)A->spptr;
514   PetscScalar    *array;
515   Vec            b_seq;
516   IS             is_iden,is_petsc;
517   PetscErrorCode ierr;
518   PetscInt       i;
519 
520   PetscFunctionBegin;
521   lu->id.nrhs = 1;
522   b_seq = lu->b_seq;
523   if (lu->size > 1){
524     /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */
525     ierr = VecScatterBegin(lu->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
526     ierr = VecScatterEnd(lu->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
527     if (!lu->myid) {ierr = VecGetArray(b_seq,&array);CHKERRQ(ierr);}
528   } else {  /* size == 1 */
529     ierr = VecCopy(b,x);CHKERRQ(ierr);
530     ierr = VecGetArray(x,&array);CHKERRQ(ierr);
531   }
532   if (!lu->myid) { /* define rhs on the host */
533     lu->id.nrhs = 1;
534 #if defined(PETSC_USE_COMPLEX)
535     lu->id.rhs = (mumps_double_complex*)array;
536 #else
537     lu->id.rhs = array;
538 #endif
539   }
540 
541   /* solve phase */
542   /*-------------*/
543   lu->id.job = 3;
544 #if defined(PETSC_USE_COMPLEX)
545   zmumps_c(&lu->id);
546 #else
547   dmumps_c(&lu->id);
548 #endif
549   if (lu->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",lu->id.INFOG(1));
550 
551   if (lu->size > 1) { /* convert mumps distributed solution to petsc mpi x */
552     if (!lu->nSolve){ /* create scatter scat_sol */
553       ierr = ISCreateStride(PETSC_COMM_SELF,lu->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */
554       for (i=0; i<lu->id.lsol_loc; i++){
555         lu->id.isol_loc[i] -= 1; /* change Fortran style to C style */
556       }
557       ierr = ISCreateGeneral(PETSC_COMM_SELF,lu->id.lsol_loc,lu->id.isol_loc,&is_petsc);CHKERRQ(ierr);  /* to */
558       ierr = VecScatterCreate(lu->x_seq,is_iden,x,is_petsc,&lu->scat_sol);CHKERRQ(ierr);
559       ierr = ISDestroy(is_iden);CHKERRQ(ierr);
560       ierr = ISDestroy(is_petsc);CHKERRQ(ierr);
561     }
562     ierr = VecScatterBegin(lu->scat_sol,lu->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
563     ierr = VecScatterEnd(lu->scat_sol,lu->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
564   }
565   lu->nSolve++;
566   PetscFunctionReturn(0);
567 }
568 
569 #if !defined(PETSC_USE_COMPLEX)
570 /*
571   input:
572    F:        numeric factor
573   output:
574    nneg:     total number of negative pivots
575    nzero:    0
576    npos:     (global dimension of F) - nneg
577 */
578 
579 #undef __FUNCT__
580 #define __FUNCT__ "MatGetInertia_SBAIJMUMPS"
581 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos)
582 {
583   Mat_MUMPS      *lu =(Mat_MUMPS*)F->spptr;
584   PetscErrorCode ierr;
585   PetscMPIInt    size;
586 
587   PetscFunctionBegin;
588   ierr = MPI_Comm_size(((PetscObject)F)->comm,&size);CHKERRQ(ierr);
589   /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */
590   if (size > 1 && lu->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",lu->id.INFOG(13));
591   if (nneg){
592     if (!lu->myid){
593       *nneg = lu->id.INFOG(12);
594     }
595     ierr = MPI_Bcast(nneg,1,MPI_INT,0,lu->comm_mumps);CHKERRQ(ierr);
596   }
597   if (nzero) *nzero = 0;
598   if (npos)  *npos  = F->rmap->N - (*nneg);
599   PetscFunctionReturn(0);
600 }
601 #endif /* !defined(PETSC_USE_COMPLEX) */
602 
603 #undef __FUNCT__
604 #define __FUNCT__ "MatFactorNumeric_MUMPS"
605 PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info)
606 {
607   Mat_MUMPS       *lu =(Mat_MUMPS*)(F)->spptr;
608   PetscErrorCode  ierr;
609   MatReuse        reuse;
610   Mat             F_diag;
611   PetscTruth      isMPIAIJ;
612 
613   PetscFunctionBegin;
614   reuse = MAT_REUSE_MATRIX;
615   ierr = (*lu->ConvertToTriples)(A, 1, reuse, &lu->nz, &lu->irn, &lu->jcn, &lu->val);CHKERRQ(ierr);
616 
617   /* numerical factorization phase */
618   /*-------------------------------*/
619   lu->id.job = 2;
620   if(!lu->id.ICNTL(18)) {
621     if (!lu->myid) {
622 #if defined(PETSC_USE_COMPLEX)
623       lu->id.a = (mumps_double_complex*)lu->val;
624 #else
625       lu->id.a = lu->val;
626 #endif
627     }
628   } else {
629 #if defined(PETSC_USE_COMPLEX)
630     lu->id.a_loc = (mumps_double_complex*)lu->val;
631 #else
632     lu->id.a_loc = lu->val;
633 #endif
634   }
635 #if defined(PETSC_USE_COMPLEX)
636   zmumps_c(&lu->id);
637 #else
638   dmumps_c(&lu->id);
639 #endif
640   if (lu->id.INFOG(1) < 0) {
641     if (lu->id.INFO(1) == -13) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d megabytes\n",lu->id.INFO(2));
642     else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFO(1)=%d, INFO(2)=%d\n",lu->id.INFO(1),lu->id.INFO(2));
643   }
644   if (!lu->myid && lu->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"  lu->id.ICNTL(16):=%d\n",lu->id.INFOG(16));
645 
646   if (lu->size > 1){
647     ierr = PetscTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr);
648     if(isMPIAIJ) {
649       F_diag = ((Mat_MPIAIJ *)(F)->data)->A;
650     } else {
651       F_diag = ((Mat_MPISBAIJ *)(F)->data)->A;
652     }
653     F_diag->assembled = PETSC_TRUE;
654     if (lu->nSolve){
655       ierr = VecScatterDestroy(lu->scat_sol);CHKERRQ(ierr);
656       ierr = PetscFree2(lu->id.sol_loc,lu->id.isol_loc);CHKERRQ(ierr);
657       ierr = VecDestroy(lu->x_seq);CHKERRQ(ierr);
658     }
659   }
660   (F)->assembled   = PETSC_TRUE;
661   lu->matstruc     = SAME_NONZERO_PATTERN;
662   lu->CleanUpMUMPS = PETSC_TRUE;
663   lu->nSolve       = 0;
664 
665   if (lu->size > 1){
666     /* distributed solution */
667     lu->id.ICNTL(21) = 1;
668     if (!lu->nSolve){
669       /* Create x_seq=sol_loc for repeated use */
670       PetscInt    lsol_loc;
671       PetscScalar *sol_loc;
672       lsol_loc = lu->id.INFO(23); /* length of sol_loc */
673       ierr = PetscMalloc2(lsol_loc,PetscScalar,&sol_loc,lsol_loc,PetscInt,&lu->id.isol_loc);CHKERRQ(ierr);
674       lu->id.lsol_loc = lsol_loc;
675 #if defined(PETSC_USE_COMPLEX)
676       lu->id.sol_loc  = (mumps_double_complex*)sol_loc;
677 #else
678       lu->id.sol_loc  = sol_loc;
679 #endif
680       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,lsol_loc,sol_loc,&lu->x_seq);CHKERRQ(ierr);
681     }
682   }
683   PetscFunctionReturn(0);
684 }
685 
686 #undef __FUNCT__
687 #define __FUNCT__ "PetscSetMUMPSOptions"
688 PetscErrorCode PetscSetMUMPSOptions(Mat F, Mat A)
689 {
690   Mat_MUMPS        *lu = (Mat_MUMPS*)F->spptr;
691   PetscErrorCode   ierr;
692   PetscInt         icntl;
693   PetscTruth       flg;
694 
695   PetscFunctionBegin;
696   ierr = PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr);
697   ierr = PetscOptionsTruth("-mat_mumps_view","View MUMPS parameters","None",lu->mumpsview,&lu->mumpsview,PETSC_NULL);CHKERRQ(ierr);
698   if (lu->size == 1){
699     lu->id.ICNTL(18) = 0;   /* centralized assembled matrix input */
700   } else {
701     lu->id.ICNTL(18) = 3;   /* distributed assembled matrix input */
702   }
703 
704   icntl=-1;
705   lu->id.ICNTL(4) = 0;  /* level of printing; overwrite mumps default ICNTL(4)=2 */
706   ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",lu->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr);
707   if ((flg && icntl > 0) || PetscLogPrintInfo) {
708     lu->id.ICNTL(4)=icntl; /* and use mumps default icntl(i), i=1,2,3 */
709   } else { /* no output */
710     lu->id.ICNTL(1) = 0;  /* error message, default= 6 */
711     lu->id.ICNTL(2) = 0;  /* output stream for diagnostic printing, statistics, and warning. default=0 */
712     lu->id.ICNTL(3) = 0; /* output stream for global information, default=6 */
713   }
714   ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): column permutation and/or scaling to get a zero-free diagonal (0 to 7)","None",lu->id.ICNTL(6),&lu->id.ICNTL(6),PETSC_NULL);CHKERRQ(ierr);
715   icntl=-1;
716   ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): matrix ordering (0 to 7)","None",lu->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr);
717   if (flg) {
718     if (icntl== 1){
719       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n");
720     } else {
721       lu->id.ICNTL(7) = icntl;
722     }
723   }
724   ierr = PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 7 or 77)","None",lu->id.ICNTL(8),&lu->id.ICNTL(8),PETSC_NULL);CHKERRQ(ierr);
725   ierr = PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): A or A^T x=b to be solved. 1: A; otherwise: A^T","None",lu->id.ICNTL(9),&lu->id.ICNTL(9),PETSC_NULL);CHKERRQ(ierr);
726   ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",lu->id.ICNTL(10),&lu->id.ICNTL(10),PETSC_NULL);CHKERRQ(ierr);
727   ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to the linear system solved (via -ksp_view)","None",lu->id.ICNTL(11),&lu->id.ICNTL(11),PETSC_NULL);CHKERRQ(ierr);
728   ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): efficiency control: defines the ordering strategy with scaling constraints (0 to 3","None",lu->id.ICNTL(12),&lu->id.ICNTL(12),PETSC_NULL);CHKERRQ(ierr);
729   ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): efficiency control: with or without ScaLAPACK","None",lu->id.ICNTL(13),&lu->id.ICNTL(13),PETSC_NULL);CHKERRQ(ierr);
730   ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage of estimated workspace increase","None",lu->id.ICNTL(14),&lu->id.ICNTL(14),PETSC_NULL);CHKERRQ(ierr);
731   ierr = PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): Schur complement","None",lu->id.ICNTL(19),&lu->id.ICNTL(19),PETSC_NULL);CHKERRQ(ierr);
732 
733   ierr = PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core facility (0 or 1)","None",lu->id.ICNTL(22),&lu->id.ICNTL(22),PETSC_NULL);CHKERRQ(ierr);
734   ierr = PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",lu->id.ICNTL(23),&lu->id.ICNTL(23),PETSC_NULL);CHKERRQ(ierr);
735   ierr = PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",lu->id.ICNTL(24),&lu->id.ICNTL(24),PETSC_NULL);CHKERRQ(ierr);
736   ierr = PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): computation of a null space basis","None",lu->id.ICNTL(25),&lu->id.ICNTL(25),PETSC_NULL);CHKERRQ(ierr);
737   ierr = PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): Schur options for right-hand side or solution vector","None",lu->id.ICNTL(26),&lu->id.ICNTL(26),PETSC_NULL);CHKERRQ(ierr);
738   ierr = PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): experimental parameter","None",lu->id.ICNTL(27),&lu->id.ICNTL(27),PETSC_NULL);CHKERRQ(ierr);
739 
740   ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",lu->id.CNTL(1),&lu->id.CNTL(1),PETSC_NULL);CHKERRQ(ierr);
741   ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",lu->id.CNTL(2),&lu->id.CNTL(2),PETSC_NULL);CHKERRQ(ierr);
742   ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",lu->id.CNTL(3),&lu->id.CNTL(3),PETSC_NULL);CHKERRQ(ierr);
743   ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",lu->id.CNTL(4),&lu->id.CNTL(4),PETSC_NULL);CHKERRQ(ierr);
744   ierr = PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",lu->id.CNTL(5),&lu->id.CNTL(5),PETSC_NULL);CHKERRQ(ierr);
745   PetscOptionsEnd();
746   PetscFunctionReturn(0);
747 }
748 
749 #undef __FUNCT__
750 #define __FUNCT__ "PetscInitializeMUMPS"
751 PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS* mumps)
752 {
753   PetscErrorCode  ierr;
754   PetscInt        icntl;
755   PetscTruth      flg;
756 
757   PetscFunctionBegin;
758   ierr = MPI_Comm_rank(((PetscObject)A)->comm, &mumps->myid);
759   ierr = MPI_Comm_size(((PetscObject)A)->comm,&mumps->size);CHKERRQ(ierr);
760   ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(mumps->comm_mumps));CHKERRQ(ierr);
761   mumps->id.comm_fortran = MPI_Comm_c2f(mumps->comm_mumps);
762 
763   mumps->id.job = JOB_INIT;
764   mumps->id.par = 1;  /* host participates factorizaton and solve */
765   mumps->id.sym = mumps->sym;
766   if (mumps->sym == 2){
767     ierr = PetscOptionsInt("-mat_mumps_sym","SYM: (1,2)","None",mumps->id.sym,&icntl,&flg);CHKERRQ(ierr);
768     if (flg && icntl == 1) mumps->id.sym=icntl;  /* matrix is spd */
769   }
770 #if defined(PETSC_USE_COMPLEX)
771   zmumps_c(&mumps->id);
772 #else
773   dmumps_c(&mumps->id);
774 #endif
775 
776   mumps->CleanUpMUMPS = PETSC_FALSE;
777   mumps->scat_rhs     = PETSC_NULL;
778   mumps->scat_sol     = PETSC_NULL;
779   mumps->nSolve       = 0;
780   PetscFunctionReturn(0);
781 }
782 
783 /* Note the Petsc r and c permutations are ignored */
784 #undef __FUNCT__
785 #define __FUNCT__ "MatLUFactorSymbolic_AIJMUMPS"
786 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
787 {
788   Mat_MUMPS          *lu = (Mat_MUMPS*)F->spptr;
789   PetscErrorCode     ierr;
790   MatReuse           reuse;
791   Vec                b;
792   IS                 is_iden;
793   const PetscInt     M = A->rmap->N;
794 
795   PetscFunctionBegin;
796   lu->matstruc = DIFFERENT_NONZERO_PATTERN;
797 
798   /* Set MUMPS options */
799   ierr = PetscSetMUMPSOptions(F,A);CHKERRQ(ierr);
800 
801   reuse = MAT_INITIAL_MATRIX;
802   ierr = (*lu->ConvertToTriples)(A, 1, reuse, &lu->nz, &lu->irn, &lu->jcn, &lu->val);CHKERRQ(ierr);
803 
804   /* analysis phase */
805   /*----------------*/
806   lu->id.job = 1;
807   lu->id.n = M;
808   switch (lu->id.ICNTL(18)){
809   case 0:  /* centralized assembled matrix input */
810     if (!lu->myid) {
811       lu->id.nz =lu->nz; lu->id.irn=lu->irn; lu->id.jcn=lu->jcn;
812       if (lu->id.ICNTL(6)>1){
813 #if defined(PETSC_USE_COMPLEX)
814         lu->id.a = (mumps_double_complex*)lu->val;
815 #else
816         lu->id.a = lu->val;
817 #endif
818       }
819     }
820     break;
821   case 3:  /* distributed assembled matrix input (size>1) */
822     lu->id.nz_loc = lu->nz;
823     lu->id.irn_loc=lu->irn; lu->id.jcn_loc=lu->jcn;
824     if (lu->id.ICNTL(6)>1) {
825 #if defined(PETSC_USE_COMPLEX)
826       lu->id.a_loc = (mumps_double_complex*)lu->val;
827 #else
828       lu->id.a_loc = lu->val;
829 #endif
830     }
831     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
832     if (!lu->myid){
833       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);CHKERRQ(ierr);
834       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
835     } else {
836       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&lu->b_seq);CHKERRQ(ierr);
837       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
838     }
839     ierr = VecCreate(((PetscObject)A)->comm,&b);CHKERRQ(ierr);
840     ierr = VecSetSizes(b,A->rmap->n,PETSC_DECIDE);CHKERRQ(ierr);
841     ierr = VecSetFromOptions(b);CHKERRQ(ierr);
842 
843     ierr = VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);CHKERRQ(ierr);
844     ierr = ISDestroy(is_iden);CHKERRQ(ierr);
845     ierr = VecDestroy(b);CHKERRQ(ierr);
846     break;
847     }
848 #if defined(PETSC_USE_COMPLEX)
849   zmumps_c(&lu->id);
850 #else
851   dmumps_c(&lu->id);
852 #endif
853   if (lu->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",lu->id.INFOG(1));
854 
855   F->ops->lufactornumeric  = MatFactorNumeric_MUMPS;
856   F->ops->solve            = MatSolve_MUMPS;
857   PetscFunctionReturn(0);
858 }
859 
860 /* Note the Petsc r and c permutations are ignored */
861 #undef __FUNCT__
862 #define __FUNCT__ "MatLUFactorSymbolic_BAIJMUMPS"
863 PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
864 {
865 
866   Mat_MUMPS       *lu = (Mat_MUMPS*)F->spptr;
867   PetscErrorCode  ierr;
868   MatReuse        reuse;
869   Vec             b;
870   IS              is_iden;
871   const PetscInt  M = A->rmap->N;
872 
873   PetscFunctionBegin;
874   lu->matstruc = DIFFERENT_NONZERO_PATTERN;
875 
876   /* Set MUMPS options */
877   ierr = PetscSetMUMPSOptions(F,A);CHKERRQ(ierr);
878 
879   reuse = MAT_INITIAL_MATRIX;
880   ierr = (*lu->ConvertToTriples)(A, 1, reuse, &lu->nz, &lu->irn, &lu->jcn, &lu->val);CHKERRQ(ierr);
881 
882   /* analysis phase */
883   /*----------------*/
884   lu->id.job = 1;
885   lu->id.n = M;
886   switch (lu->id.ICNTL(18)){
887   case 0:  /* centralized assembled matrix input */
888     if (!lu->myid) {
889       lu->id.nz =lu->nz; lu->id.irn=lu->irn; lu->id.jcn=lu->jcn;
890       if (lu->id.ICNTL(6)>1){
891 #if defined(PETSC_USE_COMPLEX)
892         lu->id.a = (mumps_double_complex*)lu->val;
893 #else
894         lu->id.a = lu->val;
895 #endif
896       }
897     }
898     break;
899   case 3:  /* distributed assembled matrix input (size>1) */
900     lu->id.nz_loc = lu->nz;
901     lu->id.irn_loc=lu->irn; lu->id.jcn_loc=lu->jcn;
902     if (lu->id.ICNTL(6)>1) {
903 #if defined(PETSC_USE_COMPLEX)
904       lu->id.a_loc = (mumps_double_complex*)lu->val;
905 #else
906       lu->id.a_loc = lu->val;
907 #endif
908     }
909     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
910     if (!lu->myid){
911       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);CHKERRQ(ierr);
912       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
913     } else {
914       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&lu->b_seq);CHKERRQ(ierr);
915       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
916     }
917     ierr = VecCreate(((PetscObject)A)->comm,&b);CHKERRQ(ierr);
918     ierr = VecSetSizes(b,A->rmap->n,PETSC_DECIDE);CHKERRQ(ierr);
919     ierr = VecSetFromOptions(b);CHKERRQ(ierr);
920 
921     ierr = VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);CHKERRQ(ierr);
922     ierr = ISDestroy(is_iden);CHKERRQ(ierr);
923     ierr = VecDestroy(b);CHKERRQ(ierr);
924     break;
925     }
926 #if defined(PETSC_USE_COMPLEX)
927   zmumps_c(&lu->id);
928 #else
929   dmumps_c(&lu->id);
930 #endif
931   if (lu->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",lu->id.INFOG(1));
932 
933   F->ops->lufactornumeric  = MatFactorNumeric_MUMPS;
934   F->ops->solve            = MatSolve_MUMPS;
935   PetscFunctionReturn(0);
936 }
937 
938 /* Note the Petsc r permutation is ignored */
939 #undef __FUNCT__
940 #define __FUNCT__ "MatCholeskyFactorSymbolic_MUMPS"
941 PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info)
942 {
943   Mat_MUMPS          *lu = (Mat_MUMPS*)F->spptr;
944   PetscErrorCode     ierr;
945   MatReuse           reuse;
946   Vec                b;
947   IS                 is_iden;
948   const PetscInt     M = A->rmap->N;
949 
950   PetscFunctionBegin;
951   lu->matstruc = DIFFERENT_NONZERO_PATTERN;
952 
953   /* Set MUMPS options */
954   ierr = PetscSetMUMPSOptions(F,A);CHKERRQ(ierr);
955 
956   reuse = MAT_INITIAL_MATRIX;
957   ierr = (*lu->ConvertToTriples)(A, 1 , reuse, &lu->nz, &lu->irn, &lu->jcn, &lu->val);CHKERRQ(ierr);
958 
959   /* analysis phase */
960   /*----------------*/
961   lu->id.job = 1;
962   lu->id.n = M;
963   switch (lu->id.ICNTL(18)){
964   case 0:  /* centralized assembled matrix input */
965     if (!lu->myid) {
966       lu->id.nz =lu->nz; lu->id.irn=lu->irn; lu->id.jcn=lu->jcn;
967       if (lu->id.ICNTL(6)>1){
968 #if defined(PETSC_USE_COMPLEX)
969         lu->id.a = (mumps_double_complex*)lu->val;
970 #else
971         lu->id.a = lu->val;
972 #endif
973       }
974     }
975     break;
976   case 3:  /* distributed assembled matrix input (size>1) */
977     lu->id.nz_loc = lu->nz;
978     lu->id.irn_loc=lu->irn; lu->id.jcn_loc=lu->jcn;
979     if (lu->id.ICNTL(6)>1) {
980 #if defined(PETSC_USE_COMPLEX)
981       lu->id.a_loc = (mumps_double_complex*)lu->val;
982 #else
983       lu->id.a_loc = lu->val;
984 #endif
985     }
986     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
987     if (!lu->myid){
988       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);CHKERRQ(ierr);
989       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
990     } else {
991       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&lu->b_seq);CHKERRQ(ierr);
992       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
993     }
994     ierr = VecCreate(((PetscObject)A)->comm,&b);CHKERRQ(ierr);
995     ierr = VecSetSizes(b,A->rmap->n,PETSC_DECIDE);CHKERRQ(ierr);
996     ierr = VecSetFromOptions(b);CHKERRQ(ierr);
997 
998     ierr = VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);CHKERRQ(ierr);
999     ierr = ISDestroy(is_iden);CHKERRQ(ierr);
1000     ierr = VecDestroy(b);CHKERRQ(ierr);
1001     break;
1002     }
1003 #if defined(PETSC_USE_COMPLEX)
1004   zmumps_c(&lu->id);
1005 #else
1006   dmumps_c(&lu->id);
1007 #endif
1008   if (lu->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",lu->id.INFOG(1));
1009 
1010   F->ops->choleskyfactornumeric =  MatFactorNumeric_MUMPS;
1011   F->ops->solve                 =  MatSolve_MUMPS;
1012 #if !defined(PETSC_USE_COMPLEX)
1013   (F)->ops->getinertia          =  MatGetInertia_SBAIJMUMPS;
1014 #endif
1015   PetscFunctionReturn(0);
1016 }
1017 
1018 #undef __FUNCT__
1019 #define __FUNCT__ "MatFactorInfo_MUMPS"
1020 PetscErrorCode MatFactorInfo_MUMPS(Mat A,PetscViewer viewer)
1021 {
1022   Mat_MUMPS      *lu=(Mat_MUMPS*)A->spptr;
1023   PetscErrorCode ierr;
1024 
1025   PetscFunctionBegin;
1026   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(1) (output for error):        %d \n",lu->id.ICNTL(1));CHKERRQ(ierr);
1027   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(2) (output of diagnostic msg):%d \n",lu->id.ICNTL(2));CHKERRQ(ierr);
1028   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(3) (output for global info):  %d \n",lu->id.ICNTL(3));CHKERRQ(ierr);
1029   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(4) (level of printing):       %d \n",lu->id.ICNTL(4));CHKERRQ(ierr);
1030   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(5) (input mat struct):        %d \n",lu->id.ICNTL(5));CHKERRQ(ierr);
1031   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(6) (matrix prescaling):       %d \n",lu->id.ICNTL(6));CHKERRQ(ierr);
1032   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(7) (matrix ordering):         %d \n",lu->id.ICNTL(7));CHKERRQ(ierr);
1033   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(8) (scalling strategy):       %d \n",lu->id.ICNTL(8));CHKERRQ(ierr);
1034   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(9) (A/A^T x=b is solved):     %d \n",lu->id.ICNTL(9));CHKERRQ(ierr);
1035   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(10) (max num of refinements): %d \n",lu->id.ICNTL(10));CHKERRQ(ierr);
1036   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(11) (error analysis):         %d \n",lu->id.ICNTL(11));CHKERRQ(ierr);
1037   if (lu->id.ICNTL(11)>0) {
1038     ierr = PetscViewerASCIIPrintf(viewer,"        RINFOG(4) (inf norm of input mat):        %g\n",lu->id.RINFOG(4));CHKERRQ(ierr);
1039     ierr = PetscViewerASCIIPrintf(viewer,"        RINFOG(5) (inf norm of solution):         %g\n",lu->id.RINFOG(5));CHKERRQ(ierr);
1040     ierr = PetscViewerASCIIPrintf(viewer,"        RINFOG(6) (inf norm of residual):         %g\n",lu->id.RINFOG(6));CHKERRQ(ierr);
1041     ierr = PetscViewerASCIIPrintf(viewer,"        RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",lu->id.RINFOG(7),lu->id.RINFOG(8));CHKERRQ(ierr);
1042     ierr = PetscViewerASCIIPrintf(viewer,"        RINFOG(9) (error estimate):               %g \n",lu->id.RINFOG(9));CHKERRQ(ierr);
1043     ierr = PetscViewerASCIIPrintf(viewer,"        RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",lu->id.RINFOG(10),lu->id.RINFOG(11));CHKERRQ(ierr);
1044 
1045   }
1046   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(12) (efficiency control):                         %d \n",lu->id.ICNTL(12));CHKERRQ(ierr);
1047   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(13) (efficiency control):                         %d \n",lu->id.ICNTL(13));CHKERRQ(ierr);
1048   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(14) (percentage of estimated workspace increase): %d \n",lu->id.ICNTL(14));CHKERRQ(ierr);
1049   /* ICNTL(15-17) not used */
1050   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(18) (input mat struct):                           %d \n",lu->id.ICNTL(18));CHKERRQ(ierr);
1051   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(19) (Shur complement info):                       %d \n",lu->id.ICNTL(19));CHKERRQ(ierr);
1052   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(20) (rhs sparse pattern):                         %d \n",lu->id.ICNTL(20));CHKERRQ(ierr);
1053   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(21) (solution struct):                            %d \n",lu->id.ICNTL(21));CHKERRQ(ierr);
1054   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(22) (in-core/out-of-core facility):               %d \n",lu->id.ICNTL(22));CHKERRQ(ierr);
1055   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(23) (max size of memory can be allocated locally):%d \n",lu->id.ICNTL(23));CHKERRQ(ierr);
1056 
1057   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(24) (detection of null pivot rows):               %d \n",lu->id.ICNTL(24));CHKERRQ(ierr);
1058   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(25) (computation of a null space basis):          %d \n",lu->id.ICNTL(25));CHKERRQ(ierr);
1059   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(26) (Schur options for rhs or solution):          %d \n",lu->id.ICNTL(26));CHKERRQ(ierr);
1060   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(27) (experimental parameter):                     %d \n",lu->id.ICNTL(27));CHKERRQ(ierr);
1061 
1062   ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(1) (relative pivoting threshold):      %g \n",lu->id.CNTL(1));CHKERRQ(ierr);
1063   ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(2) (stopping criterion of refinement): %g \n",lu->id.CNTL(2));CHKERRQ(ierr);
1064   ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(3) (absolute pivoting threshold):      %g \n",lu->id.CNTL(3));CHKERRQ(ierr);
1065   ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(4) (value of static pivoting):         %g \n",lu->id.CNTL(4));CHKERRQ(ierr);
1066   ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(5) (fixation for null pivots):         %g \n",lu->id.CNTL(5));CHKERRQ(ierr);
1067 
1068   /* infomation local to each processor */
1069   ierr = PetscViewerASCIIPrintf(viewer, "              RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr);
1070   ierr = PetscViewerASCIISynchronizedPrintf(viewer,"              [%d] %g \n",lu->myid,lu->id.RINFO(1));CHKERRQ(ierr);
1071   ierr = PetscViewerFlush(viewer);
1072   ierr = PetscViewerASCIIPrintf(viewer, "              RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr);
1073   ierr = PetscViewerASCIISynchronizedPrintf(viewer,"              [%d]  %g \n",lu->myid,lu->id.RINFO(2));CHKERRQ(ierr);
1074   ierr = PetscViewerFlush(viewer);
1075   ierr = PetscViewerASCIIPrintf(viewer, "              RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr);
1076   ierr = PetscViewerASCIISynchronizedPrintf(viewer,"              [%d]  %g \n",lu->myid,lu->id.RINFO(3));CHKERRQ(ierr);
1077   ierr = PetscViewerFlush(viewer);
1078 
1079   ierr = PetscViewerASCIIPrintf(viewer, "              INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr);
1080   ierr = PetscViewerASCIISynchronizedPrintf(viewer,"              [%d] %d \n",lu->myid,lu->id.INFO(15));CHKERRQ(ierr);
1081   ierr = PetscViewerFlush(viewer);
1082 
1083   ierr = PetscViewerASCIIPrintf(viewer, "              INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr);
1084   ierr = PetscViewerASCIISynchronizedPrintf(viewer,"              [%d] %d \n",lu->myid,lu->id.INFO(16));CHKERRQ(ierr);
1085   ierr = PetscViewerFlush(viewer);
1086 
1087   ierr = PetscViewerASCIIPrintf(viewer, "              INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr);
1088   ierr = PetscViewerASCIISynchronizedPrintf(viewer,"              [%d] %d \n",lu->myid,lu->id.INFO(23));CHKERRQ(ierr);
1089   ierr = PetscViewerFlush(viewer);
1090 
1091   if (!lu->myid){ /* information from the host */
1092     ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",lu->id.RINFOG(1));CHKERRQ(ierr);
1093     ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",lu->id.RINFOG(2));CHKERRQ(ierr);
1094     ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",lu->id.RINFOG(3));CHKERRQ(ierr);
1095 
1096     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",lu->id.INFOG(3));CHKERRQ(ierr);
1097     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",lu->id.INFOG(4));CHKERRQ(ierr);
1098     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(5) (estimated maximum front size in the complete tree): %d \n",lu->id.INFOG(5));CHKERRQ(ierr);
1099     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(6) (number of nodes in the complete tree): %d \n",lu->id.INFOG(6));CHKERRQ(ierr);
1100     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(7) (ordering option effectively uese after analysis): %d \n",lu->id.INFOG(7));CHKERRQ(ierr);
1101     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",lu->id.INFOG(8));CHKERRQ(ierr);
1102     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",lu->id.INFOG(9));CHKERRQ(ierr);
1103     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(10) (total integer space store the matrix factors after factorization): %d \n",lu->id.INFOG(10));CHKERRQ(ierr);
1104     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(11) (order of largest frontal matrix after factorization): %d \n",lu->id.INFOG(11));CHKERRQ(ierr);
1105     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(12) (number of off-diagonal pivots): %d \n",lu->id.INFOG(12));CHKERRQ(ierr);
1106     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(13) (number of delayed pivots after factorization): %d \n",lu->id.INFOG(13));CHKERRQ(ierr);
1107     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(14) (number of memory compress after factorization): %d \n",lu->id.INFOG(14));CHKERRQ(ierr);
1108     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(15) (number of steps of iterative refinement after solution): %d \n",lu->id.INFOG(15));CHKERRQ(ierr);
1109     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",lu->id.INFOG(16));CHKERRQ(ierr);
1110     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",lu->id.INFOG(17));CHKERRQ(ierr);
1111     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",lu->id.INFOG(18));CHKERRQ(ierr);
1112     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",lu->id.INFOG(19));CHKERRQ(ierr);
1113      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(20) (estimated number of entries in the factors): %d \n",lu->id.INFOG(20));CHKERRQ(ierr);
1114      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",lu->id.INFOG(21));CHKERRQ(ierr);
1115      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",lu->id.INFOG(22));CHKERRQ(ierr);
1116      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",lu->id.INFOG(23));CHKERRQ(ierr);
1117      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",lu->id.INFOG(24));CHKERRQ(ierr);
1118      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",lu->id.INFOG(25));CHKERRQ(ierr);
1119   }
1120   PetscFunctionReturn(0);
1121 }
1122 
1123 #undef __FUNCT__
1124 #define __FUNCT__ "MatView_MUMPS"
1125 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer)
1126 {
1127   PetscErrorCode    ierr;
1128   PetscTruth        iascii;
1129   PetscViewerFormat format;
1130   Mat_MUMPS         *mumps=(Mat_MUMPS*)A->spptr;
1131 
1132   PetscFunctionBegin;
1133   /* check if matrix is mumps type */
1134   if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0);
1135 
1136   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1137   if (iascii) {
1138     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1139     if (format == PETSC_VIEWER_ASCII_INFO || mumps->mumpsview){
1140       ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr);
1141       ierr = PetscViewerASCIIPrintf(viewer,"  SYM (matrix type):                  %d \n",mumps->id.sym);CHKERRQ(ierr);
1142       ierr = PetscViewerASCIIPrintf(viewer,"  PAR (host participation):           %d \n",mumps->id.par);CHKERRQ(ierr);
1143       if (mumps->mumpsview){ /* View all MUMPS parameters */
1144         ierr = MatFactorInfo_MUMPS(A,viewer);CHKERRQ(ierr);
1145       }
1146     }
1147   }
1148   PetscFunctionReturn(0);
1149 }
1150 
1151 #undef __FUNCT__
1152 #define __FUNCT__ "MatGetInfo_MUMPS"
1153 PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info)
1154 {
1155   Mat_MUMPS  *mumps =(Mat_MUMPS*)A->spptr;
1156 
1157   PetscFunctionBegin;
1158   info->block_size        = 1.0;
1159   info->nz_allocated      = mumps->id.INFOG(20);
1160   info->nz_used           = mumps->id.INFOG(20);
1161   info->nz_unneeded       = 0.0;
1162   info->assemblies        = 0.0;
1163   info->mallocs           = 0.0;
1164   info->memory            = 0.0;
1165   info->fill_ratio_given  = 0;
1166   info->fill_ratio_needed = 0;
1167   info->factor_mallocs    = 0;
1168   PetscFunctionReturn(0);
1169 }
1170 
1171 /*MC
1172   MATSOLVERMUMPS -  A matrix type providing direct solvers (LU and Cholesky) for
1173   distributed and sequential matrices via the external package MUMPS.
1174 
1175   Works with MATAIJ and MATSBAIJ matrices
1176 
1177   Options Database Keys:
1178 + -mat_mumps_sym <0,1,2> - 0 the matrix is unsymmetric, 1 symmetric positive definite, 2 symmetric
1179 . -mat_mumps_icntl_4 <0,...,4> - print level
1180 . -mat_mumps_icntl_6 <0,...,7> - matrix prescaling options (see MUMPS User's Guide)
1181 . -mat_mumps_icntl_7 <0,...,7> - matrix orderings (see MUMPS User's Guide)
1182 . -mat_mumps_icntl_9 <1,2> - A or A^T x=b to be solved: 1 denotes A, 2 denotes A^T
1183 . -mat_mumps_icntl_10 <n> - maximum number of iterative refinements
1184 . -mat_mumps_icntl_11 <n> - error analysis, a positive value returns statistics during -ksp_view
1185 . -mat_mumps_icntl_12 <n> - efficiency control (see MUMPS User's Guide)
1186 . -mat_mumps_icntl_13 <n> - efficiency control (see MUMPS User's Guide)
1187 . -mat_mumps_icntl_14 <n> - efficiency control (see MUMPS User's Guide)
1188 . -mat_mumps_icntl_15 <n> - efficiency control (see MUMPS User's Guide)
1189 . -mat_mumps_cntl_1 <delta> - relative pivoting threshold
1190 . -mat_mumps_cntl_2 <tol> - stopping criterion for refinement
1191 - -mat_mumps_cntl_3 <adelta> - absolute pivoting threshold
1192 
1193   Level: beginner
1194 
1195 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
1196 
1197 M*/
1198 
1199 EXTERN_C_BEGIN
1200 #undef __FUNCT__
1201 #define __FUNCT__ "MatFactorGetSolverPackage_mumps"
1202 PetscErrorCode MatFactorGetSolverPackage_mumps(Mat A,const MatSolverPackage *type)
1203 {
1204   PetscFunctionBegin;
1205   *type = MATSOLVERMUMPS;
1206   PetscFunctionReturn(0);
1207 }
1208 EXTERN_C_END
1209 
1210 EXTERN_C_BEGIN
1211 /* MatGetFactor for Seq and MPI AIJ matrices */
1212 #undef __FUNCT__
1213 #define __FUNCT__ "MatGetFactor_aij_mumps"
1214 PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F)
1215 {
1216   Mat            B;
1217   PetscErrorCode ierr;
1218   Mat_MUMPS      *mumps;
1219   PetscTruth     isSeqAIJ;
1220 
1221   PetscFunctionBegin;
1222   /* Create the factorization matrix */
1223   ierr = PetscTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr);
1224   ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
1225   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1226   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1227   if (isSeqAIJ) {
1228     ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr);
1229   } else {
1230     ierr = MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
1231   }
1232 
1233   ierr = PetscNewLog(B,Mat_MUMPS,&mumps);CHKERRQ(ierr);
1234   B->ops->view             = MatView_MUMPS;
1235   B->ops->getinfo          = MatGetInfo_MUMPS;
1236   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mumps",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1237   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatSetMumpsIcntl_C","MatSetMumpsIcntl",MatSetMumpsIcntl);CHKERRQ(ierr);
1238   if (ftype == MAT_FACTOR_LU) {
1239     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
1240     B->factortype = MAT_FACTOR_LU;
1241     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij;
1242     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij;
1243     mumps->sym = 0;
1244   } else {
1245     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
1246     B->factortype = MAT_FACTOR_CHOLESKY;
1247     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij;
1248     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij;
1249     mumps->sym = 2;
1250   }
1251 
1252   mumps->isAIJ        = PETSC_TRUE;
1253   mumps->MatDestroy   = B->ops->destroy;
1254   B->ops->destroy     = MatDestroy_MUMPS;
1255   B->spptr            = (void*)mumps;
1256   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
1257 
1258   *F = B;
1259   PetscFunctionReturn(0);
1260 }
1261 EXTERN_C_END
1262 
1263 
1264 EXTERN_C_BEGIN
1265 /* MatGetFactor for Seq and MPI SBAIJ matrices */
1266 #undef __FUNCT__
1267 #define __FUNCT__ "MatGetFactor_sbaij_mumps"
1268 PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
1269 {
1270   Mat            B;
1271   PetscErrorCode ierr;
1272   Mat_MUMPS      *mumps;
1273   PetscTruth     isSeqSBAIJ;
1274 
1275   PetscFunctionBegin;
1276   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
1277   if(A->rmap->bs > 1) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead");
1278   ierr = PetscTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr);
1279   /* Create the factorization matrix */
1280   ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
1281   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1282   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1283   ierr = PetscNewLog(B,Mat_MUMPS,&mumps);CHKERRQ(ierr);
1284   if (isSeqSBAIJ) {
1285     ierr = MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);CHKERRQ(ierr);
1286     mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij;
1287   } else {
1288     ierr = MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
1289     mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij;
1290   }
1291 
1292   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
1293   B->ops->view                   = MatView_MUMPS;
1294   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mumps",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1295   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatSetMumpsIcntl_C","MatSetMumpsIcntl",MatSetMumpsIcntl);CHKERRQ(ierr);
1296   B->factortype                  = MAT_FACTOR_CHOLESKY;
1297   mumps->sym = 2;
1298 
1299   mumps->isAIJ        = PETSC_FALSE;
1300   mumps->MatDestroy   = B->ops->destroy;
1301   B->ops->destroy     = MatDestroy_MUMPS;
1302   B->spptr            = (void*)mumps;
1303   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
1304 
1305   *F = B;
1306   PetscFunctionReturn(0);
1307 }
1308 EXTERN_C_END
1309 
1310 EXTERN_C_BEGIN
1311 #undef __FUNCT__
1312 #define __FUNCT__ "MatGetFactor_baij_mumps"
1313 PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F)
1314 {
1315   Mat            B;
1316   PetscErrorCode ierr;
1317   Mat_MUMPS      *mumps;
1318   PetscTruth     isSeqBAIJ;
1319 
1320   PetscFunctionBegin;
1321   /* Create the factorization matrix */
1322   ierr = PetscTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);CHKERRQ(ierr);
1323   ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
1324   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1325   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1326   if (isSeqBAIJ) {
1327     ierr = MatSeqBAIJSetPreallocation(B,A->rmap->bs,0,PETSC_NULL);CHKERRQ(ierr);
1328   } else {
1329     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
1330   }
1331 
1332   ierr = PetscNewLog(B,Mat_MUMPS,&mumps);CHKERRQ(ierr);
1333   if (ftype == MAT_FACTOR_LU) {
1334     B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS;
1335     B->factortype = MAT_FACTOR_LU;
1336     if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij;
1337     else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij;
1338     mumps->sym = 0;
1339   } else {
1340     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n");
1341   }
1342 
1343   B->ops->view             = MatView_MUMPS;
1344   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mumps",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
1345   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatSetMumpsIcntl_C","MatSetMumpsIcntl",MatSetMumpsIcntl);CHKERRQ(ierr);
1346 
1347   mumps->isAIJ        = PETSC_TRUE;
1348   mumps->MatDestroy   = B->ops->destroy;
1349   B->ops->destroy     = MatDestroy_MUMPS;
1350   B->spptr            = (void*)mumps;
1351   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
1352 
1353   *F = B;
1354   PetscFunctionReturn(0);
1355 }
1356 EXTERN_C_END
1357 
1358 /* -------------------------------------------------------------------------------------------*/
1359 #undef __FUNCT__
1360 #define __FUNCT__ "MatSeetMumpsIcntl"
1361 /*@
1362   MatSetMumpsIcntl - Set MUMPS parameter ICNTL()
1363 
1364    Collective on Mat
1365 
1366    Input Parameters:
1367 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1368 .  icntl - index of MUMPS parameter array ICNTL()
1369 -  ival - value of MUMPS ICNTL(icntl)
1370 
1371   Options Database:
1372 .   -mat_mumps_icntl_<icntl> <ival>
1373 
1374    Level: beginner
1375 
1376    References: MUMPS Users' Guide
1377 
1378 .seealso: MatGetFactor()
1379 @*/
1380 PetscErrorCode MatSetMumpsIcntl(Mat F,PetscInt icntl,PetscInt ival)
1381 {
1382   Mat_MUMPS      *lu =(Mat_MUMPS*)(F)->spptr;
1383 
1384   PetscFunctionBegin;
1385   lu->id.ICNTL(icntl) = ival;
1386   PetscFunctionReturn(0);
1387 }
1388 
1389