xref: /petsc/src/mat/impls/aij/mpi/mumps/mumps.c (revision e52336cb213eafb6e0bc1e6a0d4ebe153339e097)
1 
2 /*
3     Provides an interface to the MUMPS sparse solver
4 */
5 
6 #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I  "petscmat.h"  I*/
7 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
8 #include <petscblaslapack.h>
9 
10 EXTERN_C_BEGIN
11 #if defined(PETSC_USE_COMPLEX)
12 #if defined(PETSC_USE_REAL_SINGLE)
13 #include <cmumps_c.h>
14 #else
15 #include <zmumps_c.h>
16 #endif
17 #else
18 #if defined(PETSC_USE_REAL_SINGLE)
19 #include <smumps_c.h>
20 #else
21 #include <dmumps_c.h>
22 #endif
23 #endif
24 EXTERN_C_END
25 #define JOB_INIT -1
26 #define JOB_FACTSYMBOLIC 1
27 #define JOB_FACTNUMERIC 2
28 #define JOB_SOLVE 3
29 #define JOB_END -2
30 
31 /* calls to MUMPS */
32 #if defined(PETSC_USE_COMPLEX)
33 #if defined(PETSC_USE_REAL_SINGLE)
34 #define PetscMUMPS_c cmumps_c
35 #else
36 #define PetscMUMPS_c zmumps_c
37 #endif
38 #else
39 #if defined(PETSC_USE_REAL_SINGLE)
40 #define PetscMUMPS_c smumps_c
41 #else
42 #define PetscMUMPS_c dmumps_c
43 #endif
44 #endif
45 
46 /* declare MumpsScalar */
47 #if defined(PETSC_USE_COMPLEX)
48 #if defined(PETSC_USE_REAL_SINGLE)
49 #define MumpsScalar mumps_complex
50 #else
51 #define MumpsScalar mumps_double_complex
52 #endif
53 #else
54 #define MumpsScalar PetscScalar
55 #endif
56 
57 /* macros s.t. indices match MUMPS documentation */
58 #define ICNTL(I) icntl[(I)-1]
59 #define CNTL(I) cntl[(I)-1]
60 #define INFOG(I) infog[(I)-1]
61 #define INFO(I) info[(I)-1]
62 #define RINFOG(I) rinfog[(I)-1]
63 #define RINFO(I) rinfo[(I)-1]
64 
65 typedef struct {
66 #if defined(PETSC_USE_COMPLEX)
67 #if defined(PETSC_USE_REAL_SINGLE)
68   CMUMPS_STRUC_C id;
69 #else
70   ZMUMPS_STRUC_C id;
71 #endif
72 #else
73 #if defined(PETSC_USE_REAL_SINGLE)
74   SMUMPS_STRUC_C id;
75 #else
76   DMUMPS_STRUC_C id;
77 #endif
78 #endif
79 
80   MatStructure matstruc;
81   PetscMPIInt  myid,size;
82   PetscInt     *irn,*jcn,nz,sym;
83   PetscScalar  *val;
84   MPI_Comm     comm_mumps;
85   PetscBool    isAIJ;
86   PetscInt     ICNTL9_pre;           /* check if ICNTL(9) is changed from previous MatSolve */
87   VecScatter   scat_rhs, scat_sol;   /* used by MatSolve() */
88   Vec          b_seq,x_seq;
89   PetscInt     ninfo,*info;          /* display INFO */
90   PetscBool    schur_second_solve;
91   PetscInt     sizeredrhs;
92   PetscInt     *schur_pivots;
93   PetscInt     schur_B_lwork;
94   PetscScalar  *schur_work;
95   PetscScalar  *schur_sol;
96   PetscInt     schur_sizesol;
97   PetscBool    schur_restored;
98   PetscBool    schur_factored;
99   PetscBool    schur_inverted;
100 
101   PetscErrorCode (*Destroy)(Mat);
102   PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**);
103 } Mat_MUMPS;
104 
105 extern PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*);
106 
107 #undef __FUNCT__
108 #define __FUNCT__ "MatMumpsResetSchur_Private"
109 static PetscErrorCode MatMumpsResetSchur_Private(Mat_MUMPS* mumps)
110 {
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr);
115   ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr);
116   ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr);
117   ierr = PetscFree(mumps->schur_pivots);CHKERRQ(ierr);
118   ierr = PetscFree(mumps->schur_work);CHKERRQ(ierr);
119   if (!mumps->schur_restored) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
120   mumps->id.size_schur = 0;
121   mumps->id.ICNTL(19) = 0;
122   PetscFunctionReturn(0);
123 }
124 
125 #undef __FUNCT__
126 #define __FUNCT__ "MatMumpsFactorSchur_Private"
127 static PetscErrorCode MatMumpsFactorSchur_Private(Mat_MUMPS* mumps)
128 {
129   PetscBLASInt   B_N,B_ierr,B_slda;
130   PetscErrorCode ierr;
131 
132   PetscFunctionBegin;
133   if (mumps->schur_factored) {
134     PetscFunctionReturn(0);
135   }
136   ierr = PetscBLASIntCast(mumps->id.size_schur,&B_N);CHKERRQ(ierr);
137   ierr = PetscBLASIntCast(mumps->id.schur_lld,&B_slda);CHKERRQ(ierr);
138   if (!mumps->sym) { /* MUMPS always return a full Schur matrix */
139     if (!mumps->schur_pivots) {
140       ierr = PetscMalloc1(B_N,&mumps->schur_pivots);CHKERRQ(ierr);
141     }
142     ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
143     PetscStackCallBLAS("LAPACKgetrf",LAPACKgetrf_(&B_N,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,&B_ierr));
144     ierr = PetscFPTrapPop();CHKERRQ(ierr);
145     if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRF Lapack routine %d",(int)B_ierr);
146   } else { /* either full or lower-triangular (not packed) */
147     char ord[2];
148     if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
149       sprintf(ord,"L");
150     } else { /* ICNTL(19) == 1 lower triangular stored by rows */
151       sprintf(ord,"U");
152     }
153     if (mumps->id.sym == 2) {
154       if (!mumps->schur_pivots) {
155         PetscScalar  lwork;
156 
157         ierr = PetscMalloc1(B_N,&mumps->schur_pivots);CHKERRQ(ierr);
158         mumps->schur_B_lwork=-1;
159         ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
160         PetscStackCallBLAS("LAPACKsytrf",LAPACKsytrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,&lwork,&mumps->schur_B_lwork,&B_ierr));
161         ierr = PetscFPTrapPop();CHKERRQ(ierr);
162         if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in query to SYTRF Lapack routine %d",(int)B_ierr);
163         ierr = PetscBLASIntCast((PetscInt)PetscRealPart(lwork),&mumps->schur_B_lwork);CHKERRQ(ierr);
164         ierr = PetscMalloc1(mumps->schur_B_lwork,&mumps->schur_work);CHKERRQ(ierr);
165       }
166       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
167       PetscStackCallBLAS("LAPACKsytrf",LAPACKsytrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,mumps->schur_work,&mumps->schur_B_lwork,&B_ierr));
168       ierr = PetscFPTrapPop();CHKERRQ(ierr);
169       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRF Lapack routine %d",(int)B_ierr);
170     } else {
171       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
172       PetscStackCallBLAS("LAPACKpotrf",LAPACKpotrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,&B_ierr));
173       ierr = PetscFPTrapPop();CHKERRQ(ierr);
174       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRF Lapack routine %d",(int)B_ierr);
175     }
176   }
177   mumps->schur_factored = PETSC_TRUE;
178   PetscFunctionReturn(0);
179 }
180 
181 #undef __FUNCT__
182 #define __FUNCT__ "MatMumpsInvertSchur_Private"
183 static PetscErrorCode MatMumpsInvertSchur_Private(Mat_MUMPS* mumps)
184 {
185   PetscBLASInt   B_N,B_ierr,B_slda;
186   PetscErrorCode ierr;
187 
188   PetscFunctionBegin;
189   ierr = MatMumpsFactorSchur_Private(mumps);CHKERRQ(ierr);
190   ierr = PetscBLASIntCast(mumps->id.size_schur,&B_N);CHKERRQ(ierr);
191   ierr = PetscBLASIntCast(mumps->id.schur_lld,&B_slda);CHKERRQ(ierr);
192   if (!mumps->sym) { /* MUMPS always return a full Schur matrix */
193     if (!mumps->schur_work) {
194       PetscScalar lwork;
195 
196       mumps->schur_B_lwork = -1;
197       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
198       PetscStackCallBLAS("LAPACKgetri",LAPACKgetri_(&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,&lwork,&mumps->schur_B_lwork,&B_ierr));
199       ierr = PetscFPTrapPop();CHKERRQ(ierr);
200       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in query to GETRI Lapack routine %d",(int)B_ierr);
201       ierr = PetscBLASIntCast((PetscInt)PetscRealPart(lwork),&mumps->schur_B_lwork);CHKERRQ(ierr);
202       ierr = PetscMalloc1(mumps->schur_B_lwork,&mumps->schur_work);CHKERRQ(ierr);
203     }
204     ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
205     PetscStackCallBLAS("LAPACKgetri",LAPACKgetri_(&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,mumps->schur_work,&mumps->schur_B_lwork,&B_ierr));
206     ierr = PetscFPTrapPop();CHKERRQ(ierr);
207     if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRI Lapack routine %d",(int)B_ierr);
208   } else { /* either full or lower-triangular (not packed) */
209     char ord[2];
210     if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
211       sprintf(ord,"L");
212     } else { /* ICNTL(19) == 1 lower triangular stored by rows */
213       sprintf(ord,"U");
214     }
215     if (mumps->id.sym == 2) {
216       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
217       PetscStackCallBLAS("LAPACKsytri",LAPACKsytri_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,mumps->schur_work,&B_ierr));
218       ierr = PetscFPTrapPop();CHKERRQ(ierr);
219       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRI Lapack routine %d",(int)B_ierr);
220     } else {
221       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
222       PetscStackCallBLAS("LAPACKpotri",LAPACKpotri_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_N,&B_ierr));
223       ierr = PetscFPTrapPop();CHKERRQ(ierr);
224       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRI Lapack routine %d",(int)B_ierr);
225     }
226   }
227   mumps->schur_inverted = PETSC_TRUE;
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "MatMumpsSolveSchur_Private"
233 static PetscErrorCode MatMumpsSolveSchur_Private(Mat_MUMPS* mumps, PetscBool sol_in_redrhs)
234 {
235   PetscBLASInt   B_N,B_Nrhs,B_ierr,B_slda,B_rlda;
236   PetscScalar    one=1.,zero=0.;
237   PetscErrorCode ierr;
238 
239   PetscFunctionBegin;
240   ierr = MatMumpsFactorSchur_Private(mumps);CHKERRQ(ierr);
241   ierr = PetscBLASIntCast(mumps->id.size_schur,&B_N);CHKERRQ(ierr);
242   ierr = PetscBLASIntCast(mumps->id.schur_lld,&B_slda);CHKERRQ(ierr);
243   ierr = PetscBLASIntCast(mumps->id.nrhs,&B_Nrhs);CHKERRQ(ierr);
244   ierr = PetscBLASIntCast(mumps->id.lredrhs,&B_rlda);CHKERRQ(ierr);
245   if (mumps->schur_inverted) {
246     PetscInt sizesol = B_Nrhs*B_N;
247     if (!mumps->schur_sol || sizesol > mumps->schur_sizesol) {
248       ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr);
249       ierr = PetscMalloc1(sizesol,&mumps->schur_sol);CHKERRQ(ierr);
250       mumps->schur_sizesol = sizesol;
251     }
252     if (!mumps->sym) {
253       char type[2];
254       if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
255         if (!mumps->id.ICNTL(9)) { /* transpose solve */
256           sprintf(type,"N");
257         } else {
258           sprintf(type,"T");
259         }
260       } else { /* stored by columns */
261         if (!mumps->id.ICNTL(9)) { /* transpose solve */
262           sprintf(type,"T");
263         } else {
264           sprintf(type,"N");
265         }
266       }
267       PetscStackCallBLAS("BLASgemm",BLASgemm_(type,"N",&B_N,&B_Nrhs,&B_N,&one,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&zero,mumps->schur_sol,&B_rlda));
268     } else {
269       char ord[2];
270       if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
271         sprintf(ord,"L");
272       } else { /* ICNTL(19) == 1 lower triangular stored by rows */
273         sprintf(ord,"U");
274       }
275       PetscStackCallBLAS("BLASsymm",BLASsymm_("L",ord,&B_N,&B_Nrhs,&one,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&zero,mumps->schur_sol,&B_rlda));
276     }
277     if (sol_in_redrhs) {
278       ierr = PetscMemcpy(mumps->id.redrhs,mumps->schur_sol,sizesol*sizeof(PetscScalar));CHKERRQ(ierr);
279     }
280   } else { /* Schur complement has not been inverted */
281     MumpsScalar *orhs=NULL;
282 
283     if (!sol_in_redrhs) {
284       PetscInt sizesol = B_Nrhs*B_N;
285       if (!mumps->schur_sol || sizesol > mumps->schur_sizesol) {
286         ierr = PetscFree(mumps->schur_sol);CHKERRQ(ierr);
287         ierr = PetscMalloc1(sizesol,&mumps->schur_sol);CHKERRQ(ierr);
288         mumps->schur_sizesol = sizesol;
289       }
290       orhs = mumps->id.redrhs;
291       ierr = PetscMemcpy(mumps->schur_sol,mumps->id.redrhs,sizesol*sizeof(PetscScalar));CHKERRQ(ierr);
292       mumps->id.redrhs = (MumpsScalar*)mumps->schur_sol;
293     }
294     if (!mumps->sym) { /* MUMPS always return a full Schur matrix */
295       char type[2];
296       if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
297         if (!mumps->id.ICNTL(9)) { /* transpose solve */
298           sprintf(type,"N");
299         } else {
300           sprintf(type,"T");
301         }
302       } else { /* stored by columns */
303         if (!mumps->id.ICNTL(9)) { /* transpose solve */
304           sprintf(type,"T");
305         } else {
306           sprintf(type,"N");
307         }
308       }
309       ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
310       PetscStackCallBLAS("LAPACKgetrs",LAPACKgetrs_(type,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr));
311       ierr = PetscFPTrapPop();CHKERRQ(ierr);
312       if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRS Lapack routine %d",(int)B_ierr);
313     } else { /* either full or lower-triangular (not packed) */
314       char ord[2];
315       if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
316         sprintf(ord,"L");
317       } else { /* ICNTL(19) == 1 lower triangular stored by rows */
318         sprintf(ord,"U");
319       }
320       if (mumps->id.sym == 2) {
321         ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
322         PetscStackCallBLAS("LAPACKsytrs",LAPACKsytrs_(ord,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr));
323         ierr = PetscFPTrapPop();CHKERRQ(ierr);
324         if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRS Lapack routine %d",(int)B_ierr);
325       } else {
326         ierr = PetscFPTrapPush(PETSC_FP_TRAP_OFF);CHKERRQ(ierr);
327         PetscStackCallBLAS("LAPACKpotrs",LAPACKpotrs_(ord,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr));
328         ierr = PetscFPTrapPop();CHKERRQ(ierr);
329         if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRS Lapack routine %d",(int)B_ierr);
330       }
331     }
332     if (!sol_in_redrhs) {
333       mumps->id.redrhs = orhs;
334     }
335   }
336   PetscFunctionReturn(0);
337 }
338 
339 #undef __FUNCT__
340 #define __FUNCT__ "MatMumpsHandleSchur_Private"
341 static PetscErrorCode MatMumpsHandleSchur_Private(Mat_MUMPS* mumps)
342 {
343   PetscErrorCode ierr;
344 
345   PetscFunctionBegin;
346   if (!mumps->id.ICNTL(19)) { /* do nothing when Schur complement has not been computed */
347     PetscFunctionReturn(0);
348   }
349   if (!mumps->schur_second_solve) { /* prepare for the condensation step */
350     /* check if schur complement has been computed
351        We set by default ICNTL(26) == -1 when Schur indices have been provided by the user.
352        According to MUMPS (5.0.0) manual, any value should be harmful during the factorization phase
353        Unless the user provides a valid value for ICNTL(26), MatSolve and MatMatSolve routines solve the full system.
354        This requires an extra call to PetscMUMPS_c and the computation of the factors for S, handled setting double_schur_solve to PETSC_TRUE */
355     if (mumps->id.ICNTL(26) < 0 || mumps->id.ICNTL(26) > 2) {
356       PetscInt sizeredrhs = mumps->id.nrhs*mumps->id.size_schur;
357       /* allocate MUMPS internal array to store reduced right-hand sides */
358       if (!mumps->id.redrhs || sizeredrhs > mumps->sizeredrhs) {
359         ierr = PetscFree(mumps->id.redrhs);CHKERRQ(ierr);
360         mumps->id.lredrhs = mumps->id.size_schur;
361         ierr = PetscMalloc1(mumps->id.nrhs*mumps->id.lredrhs,&mumps->id.redrhs);CHKERRQ(ierr);
362         mumps->sizeredrhs = mumps->id.nrhs*mumps->id.lredrhs;
363       }
364       mumps->schur_second_solve = PETSC_TRUE;
365       mumps->id.ICNTL(26) = 1; /* condensation phase */
366     }
367   } else { /* prepare for the expansion step */
368     /* solve Schur complement (this should be done by the MUMPS user, so basically us) */
369     ierr = MatMumpsSolveSchur_Private(mumps,PETSC_TRUE);CHKERRQ(ierr);
370     mumps->id.ICNTL(26) = 2; /* expansion phase */
371     PetscMUMPS_c(&mumps->id);
372     if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
373     /* restore defaults */
374     mumps->id.ICNTL(26) = -1;
375     mumps->schur_second_solve = PETSC_FALSE;
376   }
377   PetscFunctionReturn(0);
378 }
379 
380 /*
381   MatConvertToTriples_A_B - convert Petsc matrix to triples: row[nz], col[nz], val[nz]
382 
383   input:
384     A       - matrix in aij,baij or sbaij (bs=1) format
385     shift   - 0: C style output triple; 1: Fortran style output triple.
386     reuse   - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple
387               MAT_REUSE_MATRIX:   only the values in v array are updated
388   output:
389     nnz     - dim of r, c, and v (number of local nonzero entries of A)
390     r, c, v - row and col index, matrix values (matrix triples)
391 
392   The returned values r, c, and sometimes v are obtained in a single PetscMalloc(). Then in MatDestroy_MUMPS() it is
393   freed with PetscFree((mumps->irn);  This is not ideal code, the fact that v is ONLY sometimes part of mumps->irn means
394   that the PetscMalloc() cannot easily be replaced with a PetscMalloc3().
395 
396  */
397 
398 #undef __FUNCT__
399 #define __FUNCT__ "MatConvertToTriples_seqaij_seqaij"
400 PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
401 {
402   const PetscInt *ai,*aj,*ajj,M=A->rmap->n;
403   PetscInt       nz,rnz,i,j;
404   PetscErrorCode ierr;
405   PetscInt       *row,*col;
406   Mat_SeqAIJ     *aa=(Mat_SeqAIJ*)A->data;
407 
408   PetscFunctionBegin;
409   *v=aa->a;
410   if (reuse == MAT_INITIAL_MATRIX) {
411     nz   = aa->nz;
412     ai   = aa->i;
413     aj   = aa->j;
414     *nnz = nz;
415     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
416     col  = row + nz;
417 
418     nz = 0;
419     for (i=0; i<M; i++) {
420       rnz = ai[i+1] - ai[i];
421       ajj = aj + ai[i];
422       for (j=0; j<rnz; j++) {
423         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
424       }
425     }
426     *r = row; *c = col;
427   }
428   PetscFunctionReturn(0);
429 }
430 
431 #undef __FUNCT__
432 #define __FUNCT__ "MatConvertToTriples_seqbaij_seqaij"
433 PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
434 {
435   Mat_SeqBAIJ    *aa=(Mat_SeqBAIJ*)A->data;
436   const PetscInt *ai,*aj,*ajj,bs2 = aa->bs2;
437   PetscInt       bs,M,nz,idx=0,rnz,i,j,k,m;
438   PetscErrorCode ierr;
439   PetscInt       *row,*col;
440 
441   PetscFunctionBegin;
442   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
443   M = A->rmap->N/bs;
444   *v = aa->a;
445   if (reuse == MAT_INITIAL_MATRIX) {
446     ai   = aa->i; aj = aa->j;
447     nz   = bs2*aa->nz;
448     *nnz = nz;
449     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
450     col  = row + nz;
451 
452     for (i=0; i<M; i++) {
453       ajj = aj + ai[i];
454       rnz = ai[i+1] - ai[i];
455       for (k=0; k<rnz; k++) {
456         for (j=0; j<bs; j++) {
457           for (m=0; m<bs; m++) {
458             row[idx]   = i*bs + m + shift;
459             col[idx++] = bs*(ajj[k]) + j + shift;
460           }
461         }
462       }
463     }
464     *r = row; *c = col;
465   }
466   PetscFunctionReturn(0);
467 }
468 
469 #undef __FUNCT__
470 #define __FUNCT__ "MatConvertToTriples_seqsbaij_seqsbaij"
471 PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
472 {
473   const PetscInt *ai, *aj,*ajj,M=A->rmap->n;
474   PetscInt       nz,rnz,i,j;
475   PetscErrorCode ierr;
476   PetscInt       *row,*col;
477   Mat_SeqSBAIJ   *aa=(Mat_SeqSBAIJ*)A->data;
478 
479   PetscFunctionBegin;
480   *v = aa->a;
481   if (reuse == MAT_INITIAL_MATRIX) {
482     nz   = aa->nz;
483     ai   = aa->i;
484     aj   = aa->j;
485     *v   = aa->a;
486     *nnz = nz;
487     ierr = PetscMalloc1(2*nz, &row);CHKERRQ(ierr);
488     col  = row + nz;
489 
490     nz = 0;
491     for (i=0; i<M; i++) {
492       rnz = ai[i+1] - ai[i];
493       ajj = aj + ai[i];
494       for (j=0; j<rnz; j++) {
495         row[nz] = i+shift; col[nz++] = ajj[j] + shift;
496       }
497     }
498     *r = row; *c = col;
499   }
500   PetscFunctionReturn(0);
501 }
502 
503 #undef __FUNCT__
504 #define __FUNCT__ "MatConvertToTriples_seqaij_seqsbaij"
505 PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
506 {
507   const PetscInt    *ai,*aj,*ajj,*adiag,M=A->rmap->n;
508   PetscInt          nz,rnz,i,j;
509   const PetscScalar *av,*v1;
510   PetscScalar       *val;
511   PetscErrorCode    ierr;
512   PetscInt          *row,*col;
513   Mat_SeqAIJ        *aa=(Mat_SeqAIJ*)A->data;
514 
515   PetscFunctionBegin;
516   ai   =aa->i; aj=aa->j;av=aa->a;
517   adiag=aa->diag;
518   if (reuse == MAT_INITIAL_MATRIX) {
519     /* count nz in the uppper triangular part of A */
520     nz = 0;
521     for (i=0; i<M; i++) nz += ai[i+1] - adiag[i];
522     *nnz = nz;
523 
524     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
525     col  = row + nz;
526     val  = (PetscScalar*)(col + nz);
527 
528     nz = 0;
529     for (i=0; i<M; i++) {
530       rnz = ai[i+1] - adiag[i];
531       ajj = aj + adiag[i];
532       v1  = av + adiag[i];
533       for (j=0; j<rnz; j++) {
534         row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j];
535       }
536     }
537     *r = row; *c = col; *v = val;
538   } else {
539     nz = 0; val = *v;
540     for (i=0; i <M; i++) {
541       rnz = ai[i+1] - adiag[i];
542       ajj = aj + adiag[i];
543       v1  = av + adiag[i];
544       for (j=0; j<rnz; j++) {
545         val[nz++] = v1[j];
546       }
547     }
548   }
549   PetscFunctionReturn(0);
550 }
551 
552 #undef __FUNCT__
553 #define __FUNCT__ "MatConvertToTriples_mpisbaij_mpisbaij"
554 PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
555 {
556   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
557   PetscErrorCode    ierr;
558   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
559   PetscInt          *row,*col;
560   const PetscScalar *av, *bv,*v1,*v2;
561   PetscScalar       *val;
562   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ*)A->data;
563   Mat_SeqSBAIJ      *aa  = (Mat_SeqSBAIJ*)(mat->A)->data;
564   Mat_SeqBAIJ       *bb  = (Mat_SeqBAIJ*)(mat->B)->data;
565 
566   PetscFunctionBegin;
567   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
568   av=aa->a; bv=bb->a;
569 
570   garray = mat->garray;
571 
572   if (reuse == MAT_INITIAL_MATRIX) {
573     nz   = aa->nz + bb->nz;
574     *nnz = nz;
575     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
576     col  = row + nz;
577     val  = (PetscScalar*)(col + nz);
578 
579     *r = row; *c = col; *v = val;
580   } else {
581     row = *r; col = *c; val = *v;
582   }
583 
584   jj = 0; irow = rstart;
585   for (i=0; i<m; i++) {
586     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
587     countA = ai[i+1] - ai[i];
588     countB = bi[i+1] - bi[i];
589     bjj    = bj + bi[i];
590     v1     = av + ai[i];
591     v2     = bv + bi[i];
592 
593     /* A-part */
594     for (j=0; j<countA; j++) {
595       if (reuse == MAT_INITIAL_MATRIX) {
596         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
597       }
598       val[jj++] = v1[j];
599     }
600 
601     /* B-part */
602     for (j=0; j < countB; j++) {
603       if (reuse == MAT_INITIAL_MATRIX) {
604         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
605       }
606       val[jj++] = v2[j];
607     }
608     irow++;
609   }
610   PetscFunctionReturn(0);
611 }
612 
613 #undef __FUNCT__
614 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpiaij"
615 PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
616 {
617   const PetscInt    *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
618   PetscErrorCode    ierr;
619   PetscInt          rstart,nz,i,j,jj,irow,countA,countB;
620   PetscInt          *row,*col;
621   const PetscScalar *av, *bv,*v1,*v2;
622   PetscScalar       *val;
623   Mat_MPIAIJ        *mat = (Mat_MPIAIJ*)A->data;
624   Mat_SeqAIJ        *aa  = (Mat_SeqAIJ*)(mat->A)->data;
625   Mat_SeqAIJ        *bb  = (Mat_SeqAIJ*)(mat->B)->data;
626 
627   PetscFunctionBegin;
628   ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
629   av=aa->a; bv=bb->a;
630 
631   garray = mat->garray;
632 
633   if (reuse == MAT_INITIAL_MATRIX) {
634     nz   = aa->nz + bb->nz;
635     *nnz = nz;
636     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
637     col  = row + nz;
638     val  = (PetscScalar*)(col + nz);
639 
640     *r = row; *c = col; *v = val;
641   } else {
642     row = *r; col = *c; val = *v;
643   }
644 
645   jj = 0; irow = rstart;
646   for (i=0; i<m; i++) {
647     ajj    = aj + ai[i];                 /* ptr to the beginning of this row */
648     countA = ai[i+1] - ai[i];
649     countB = bi[i+1] - bi[i];
650     bjj    = bj + bi[i];
651     v1     = av + ai[i];
652     v2     = bv + bi[i];
653 
654     /* A-part */
655     for (j=0; j<countA; j++) {
656       if (reuse == MAT_INITIAL_MATRIX) {
657         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
658       }
659       val[jj++] = v1[j];
660     }
661 
662     /* B-part */
663     for (j=0; j < countB; j++) {
664       if (reuse == MAT_INITIAL_MATRIX) {
665         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
666       }
667       val[jj++] = v2[j];
668     }
669     irow++;
670   }
671   PetscFunctionReturn(0);
672 }
673 
674 #undef __FUNCT__
675 #define __FUNCT__ "MatConvertToTriples_mpibaij_mpiaij"
676 PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
677 {
678   Mat_MPIBAIJ       *mat    = (Mat_MPIBAIJ*)A->data;
679   Mat_SeqBAIJ       *aa     = (Mat_SeqBAIJ*)(mat->A)->data;
680   Mat_SeqBAIJ       *bb     = (Mat_SeqBAIJ*)(mat->B)->data;
681   const PetscInt    *ai     = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj;
682   const PetscInt    *garray = mat->garray,mbs=mat->mbs,rstart=A->rmap->rstart;
683   const PetscInt    bs2=mat->bs2;
684   PetscErrorCode    ierr;
685   PetscInt          bs,nz,i,j,k,n,jj,irow,countA,countB,idx;
686   PetscInt          *row,*col;
687   const PetscScalar *av=aa->a, *bv=bb->a,*v1,*v2;
688   PetscScalar       *val;
689 
690   PetscFunctionBegin;
691   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
692   if (reuse == MAT_INITIAL_MATRIX) {
693     nz   = bs2*(aa->nz + bb->nz);
694     *nnz = nz;
695     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
696     col  = row + nz;
697     val  = (PetscScalar*)(col + nz);
698 
699     *r = row; *c = col; *v = val;
700   } else {
701     row = *r; col = *c; val = *v;
702   }
703 
704   jj = 0; irow = rstart;
705   for (i=0; i<mbs; i++) {
706     countA = ai[i+1] - ai[i];
707     countB = bi[i+1] - bi[i];
708     ajj    = aj + ai[i];
709     bjj    = bj + bi[i];
710     v1     = av + bs2*ai[i];
711     v2     = bv + bs2*bi[i];
712 
713     idx = 0;
714     /* A-part */
715     for (k=0; k<countA; k++) {
716       for (j=0; j<bs; j++) {
717         for (n=0; n<bs; n++) {
718           if (reuse == MAT_INITIAL_MATRIX) {
719             row[jj] = irow + n + shift;
720             col[jj] = rstart + bs*ajj[k] + j + shift;
721           }
722           val[jj++] = v1[idx++];
723         }
724       }
725     }
726 
727     idx = 0;
728     /* B-part */
729     for (k=0; k<countB; k++) {
730       for (j=0; j<bs; j++) {
731         for (n=0; n<bs; n++) {
732           if (reuse == MAT_INITIAL_MATRIX) {
733             row[jj] = irow + n + shift;
734             col[jj] = bs*garray[bjj[k]] + j + shift;
735           }
736           val[jj++] = v2[idx++];
737         }
738       }
739     }
740     irow += bs;
741   }
742   PetscFunctionReturn(0);
743 }
744 
745 #undef __FUNCT__
746 #define __FUNCT__ "MatConvertToTriples_mpiaij_mpisbaij"
747 PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
748 {
749   const PetscInt    *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
750   PetscErrorCode    ierr;
751   PetscInt          rstart,nz,nza,nzb,i,j,jj,irow,countA,countB;
752   PetscInt          *row,*col;
753   const PetscScalar *av, *bv,*v1,*v2;
754   PetscScalar       *val;
755   Mat_MPIAIJ        *mat =  (Mat_MPIAIJ*)A->data;
756   Mat_SeqAIJ        *aa  =(Mat_SeqAIJ*)(mat->A)->data;
757   Mat_SeqAIJ        *bb  =(Mat_SeqAIJ*)(mat->B)->data;
758 
759   PetscFunctionBegin;
760   ai=aa->i; aj=aa->j; adiag=aa->diag;
761   bi=bb->i; bj=bb->j; garray = mat->garray;
762   av=aa->a; bv=bb->a;
763 
764   rstart = A->rmap->rstart;
765 
766   if (reuse == MAT_INITIAL_MATRIX) {
767     nza = 0;    /* num of upper triangular entries in mat->A, including diagonals */
768     nzb = 0;    /* num of upper triangular entries in mat->B */
769     for (i=0; i<m; i++) {
770       nza   += (ai[i+1] - adiag[i]);
771       countB = bi[i+1] - bi[i];
772       bjj    = bj + bi[i];
773       for (j=0; j<countB; j++) {
774         if (garray[bjj[j]] > rstart) nzb++;
775       }
776     }
777 
778     nz   = nza + nzb; /* total nz of upper triangular part of mat */
779     *nnz = nz;
780     ierr = PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);CHKERRQ(ierr);
781     col  = row + nz;
782     val  = (PetscScalar*)(col + nz);
783 
784     *r = row; *c = col; *v = val;
785   } else {
786     row = *r; col = *c; val = *v;
787   }
788 
789   jj = 0; irow = rstart;
790   for (i=0; i<m; i++) {
791     ajj    = aj + adiag[i];                 /* ptr to the beginning of the diagonal of this row */
792     v1     = av + adiag[i];
793     countA = ai[i+1] - adiag[i];
794     countB = bi[i+1] - bi[i];
795     bjj    = bj + bi[i];
796     v2     = bv + bi[i];
797 
798     /* A-part */
799     for (j=0; j<countA; j++) {
800       if (reuse == MAT_INITIAL_MATRIX) {
801         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
802       }
803       val[jj++] = v1[j];
804     }
805 
806     /* B-part */
807     for (j=0; j < countB; j++) {
808       if (garray[bjj[j]] > rstart) {
809         if (reuse == MAT_INITIAL_MATRIX) {
810           row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
811         }
812         val[jj++] = v2[j];
813       }
814     }
815     irow++;
816   }
817   PetscFunctionReturn(0);
818 }
819 
820 #undef __FUNCT__
821 #define __FUNCT__ "MatGetDiagonal_MUMPS"
822 PetscErrorCode MatGetDiagonal_MUMPS(Mat A,Vec v)
823 {
824   PetscFunctionBegin;
825   SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type: MUMPS factor");
826   PetscFunctionReturn(0);
827 }
828 
829 #undef __FUNCT__
830 #define __FUNCT__ "MatDestroy_MUMPS"
831 PetscErrorCode MatDestroy_MUMPS(Mat A)
832 {
833   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
834   PetscErrorCode ierr;
835 
836   PetscFunctionBegin;
837   ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
838   ierr = VecScatterDestroy(&mumps->scat_rhs);CHKERRQ(ierr);
839   ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
840   ierr = VecDestroy(&mumps->b_seq);CHKERRQ(ierr);
841   ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
842   ierr = PetscFree(mumps->id.perm_in);CHKERRQ(ierr);
843   ierr = PetscFree(mumps->irn);CHKERRQ(ierr);
844   ierr = PetscFree(mumps->info);CHKERRQ(ierr);
845   ierr = MatMumpsResetSchur_Private(mumps);CHKERRQ(ierr);
846   mumps->id.job = JOB_END;
847   PetscMUMPS_c(&mumps->id);
848   ierr = MPI_Comm_free(&mumps->comm_mumps);CHKERRQ(ierr);
849   if (mumps->Destroy) {
850     ierr = (mumps->Destroy)(A);CHKERRQ(ierr);
851   }
852   ierr = PetscFree(A->spptr);CHKERRQ(ierr);
853 
854   /* clear composed functions */
855   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);CHKERRQ(ierr);
856   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetIcntl_C",NULL);CHKERRQ(ierr);
857   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetIcntl_C",NULL);CHKERRQ(ierr);
858   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetCntl_C",NULL);CHKERRQ(ierr);
859   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetCntl_C",NULL);CHKERRQ(ierr);
860 
861   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfo_C",NULL);CHKERRQ(ierr);
862   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfog_C",NULL);CHKERRQ(ierr);
863   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfo_C",NULL);CHKERRQ(ierr);
864   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfog_C",NULL);CHKERRQ(ierr);
865 
866   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetSchurIndices_C",NULL);CHKERRQ(ierr);
867   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsInvertSchurComplement_C",NULL);CHKERRQ(ierr);
868   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsCreateSchurComplement_C",NULL);CHKERRQ(ierr);
869   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetSchurComplement_C",NULL);CHKERRQ(ierr);
870   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsRestoreSchurComplement_C",NULL);CHKERRQ(ierr);
871   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSolveSchurComplement_C",NULL);CHKERRQ(ierr);
872   ierr = PetscObjectComposeFunction((PetscObject)A,"MatMumpsSolveSchurComplementTranspose_C",NULL);CHKERRQ(ierr);
873   PetscFunctionReturn(0);
874 }
875 
876 #undef __FUNCT__
877 #define __FUNCT__ "MatSolve_MUMPS"
878 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x)
879 {
880   Mat_MUMPS        *mumps=(Mat_MUMPS*)A->spptr;
881   PetscScalar      *array;
882   Vec              b_seq;
883   IS               is_iden,is_petsc;
884   PetscErrorCode   ierr;
885   PetscInt         i;
886   static PetscBool cite1 = PETSC_FALSE,cite2 = PETSC_FALSE;
887 
888   PetscFunctionBegin;
889   ierr = PetscCitationsRegister("@article{MUMPS01,\n  author = {P.~R. Amestoy and I.~S. Duff and J.-Y. L'Excellent and J. Koster},\n  title = {A fully asynchronous multifrontal solver using distributed dynamic scheduling},\n  journal = {SIAM Journal on Matrix Analysis and Applications},\n  volume = {23},\n  number = {1},\n  pages = {15--41},\n  year = {2001}\n}\n",&cite1);CHKERRQ(ierr);
890   ierr = PetscCitationsRegister("@article{MUMPS02,\n  author = {P.~R. Amestoy and A. Guermouche and J.-Y. L'Excellent and S. Pralet},\n  title = {Hybrid scheduling for the parallel solution of linear systems},\n  journal = {Parallel Computing},\n  volume = {32},\n  number = {2},\n  pages = {136--156},\n  year = {2006}\n}\n",&cite2);CHKERRQ(ierr);
891   mumps->id.nrhs = 1;
892   b_seq          = mumps->b_seq;
893   if (mumps->size > 1) {
894     /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */
895     ierr = VecScatterBegin(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
896     ierr = VecScatterEnd(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
897     if (!mumps->myid) {ierr = VecGetArray(b_seq,&array);CHKERRQ(ierr);}
898   } else {  /* size == 1 */
899     ierr = VecCopy(b,x);CHKERRQ(ierr);
900     ierr = VecGetArray(x,&array);CHKERRQ(ierr);
901   }
902   if (!mumps->myid) { /* define rhs on the host */
903     mumps->id.nrhs = 1;
904     mumps->id.rhs = (MumpsScalar*)array;
905   }
906 
907   /* handle condensation step of Schur complement (if any) */
908   ierr = MatMumpsHandleSchur_Private(mumps);CHKERRQ(ierr);
909 
910   /* solve phase */
911   /*-------------*/
912   mumps->id.job = JOB_SOLVE;
913   PetscMUMPS_c(&mumps->id);
914   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
915 
916   /* handle expansion step of Schur complement (if any) */
917   ierr = MatMumpsHandleSchur_Private(mumps);CHKERRQ(ierr);
918 
919   if (mumps->size > 1) { /* convert mumps distributed solution to petsc mpi x */
920     if (mumps->scat_sol && mumps->ICNTL9_pre != mumps->id.ICNTL(9)) {
921       /* when id.ICNTL(9) changes, the contents of lsol_loc may change (not its size, lsol_loc), recreates scat_sol */
922       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
923     }
924     if (!mumps->scat_sol) { /* create scatter scat_sol */
925       ierr = ISCreateStride(PETSC_COMM_SELF,mumps->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */
926       for (i=0; i<mumps->id.lsol_loc; i++) {
927         mumps->id.isol_loc[i] -= 1; /* change Fortran style to C style */
928       }
929       ierr = ISCreateGeneral(PETSC_COMM_SELF,mumps->id.lsol_loc,mumps->id.isol_loc,PETSC_COPY_VALUES,&is_petsc);CHKERRQ(ierr);  /* to */
930       ierr = VecScatterCreate(mumps->x_seq,is_iden,x,is_petsc,&mumps->scat_sol);CHKERRQ(ierr);
931       ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
932       ierr = ISDestroy(&is_petsc);CHKERRQ(ierr);
933 
934       mumps->ICNTL9_pre = mumps->id.ICNTL(9); /* save current value of id.ICNTL(9) */
935     }
936 
937     ierr = VecScatterBegin(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
938     ierr = VecScatterEnd(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
939   }
940   PetscFunctionReturn(0);
941 }
942 
943 #undef __FUNCT__
944 #define __FUNCT__ "MatSolveTranspose_MUMPS"
945 PetscErrorCode MatSolveTranspose_MUMPS(Mat A,Vec b,Vec x)
946 {
947   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
948   PetscErrorCode ierr;
949 
950   PetscFunctionBegin;
951   mumps->id.ICNTL(9) = 0;
952   ierr = MatSolve_MUMPS(A,b,x);CHKERRQ(ierr);
953   mumps->id.ICNTL(9) = 1;
954   PetscFunctionReturn(0);
955 }
956 
957 #undef __FUNCT__
958 #define __FUNCT__ "MatMatSolve_MUMPS"
959 PetscErrorCode MatMatSolve_MUMPS(Mat A,Mat B,Mat X)
960 {
961   PetscErrorCode ierr;
962   PetscBool      flg;
963   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
964   PetscInt       i,nrhs,M;
965   PetscScalar    *array,*bray;
966 
967   PetscFunctionBegin;
968   ierr = PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
969   if (!flg) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
970   ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr);
971   if (!flg) SETERRQ(PetscObjectComm((PetscObject)X),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
972   if (B->rmap->n != X->rmap->n) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B and X must have same row distribution");
973 
974   ierr = MatGetSize(B,&M,&nrhs);CHKERRQ(ierr);
975   mumps->id.nrhs = nrhs;
976   mumps->id.lrhs = M;
977 
978   if (mumps->size == 1) {
979     /* copy B to X */
980     ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr);
981     ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr);
982     ierr = PetscMemcpy(array,bray,M*nrhs*sizeof(PetscScalar));CHKERRQ(ierr);
983     ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr);
984     mumps->id.rhs = (MumpsScalar*)array;
985     /* handle condensation step of Schur complement (if any) */
986     ierr = MatMumpsHandleSchur_Private(mumps);CHKERRQ(ierr);
987 
988     /* solve phase */
989     /*-------------*/
990     mumps->id.job = JOB_SOLVE;
991     PetscMUMPS_c(&mumps->id);
992     if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
993 
994     /* handle expansion step of Schur complement (if any) */
995     ierr = MatMumpsHandleSchur_Private(mumps);CHKERRQ(ierr);
996     ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr);
997   } else {  /*--------- parallel case --------*/
998     PetscInt       lsol_loc,nlsol_loc,*isol_loc,*idx,*iidx,*idxx,*isol_loc_save;
999     MumpsScalar    *sol_loc,*sol_loc_save;
1000     IS             is_to,is_from;
1001     PetscInt       k,proc,j,m;
1002     const PetscInt *rstart;
1003     Vec            v_mpi,b_seq,x_seq;
1004     VecScatter     scat_rhs,scat_sol;
1005 
1006     /* create x_seq to hold local solution */
1007     isol_loc_save = mumps->id.isol_loc; /* save it for MatSovle() */
1008     sol_loc_save  = mumps->id.sol_loc;
1009 
1010     lsol_loc  = mumps->id.INFO(23);
1011     nlsol_loc = nrhs*lsol_loc;     /* length of sol_loc */
1012     ierr = PetscMalloc2(nlsol_loc,&sol_loc,nlsol_loc,&isol_loc);CHKERRQ(ierr);
1013     mumps->id.sol_loc = (MumpsScalar*)sol_loc;
1014     mumps->id.isol_loc = isol_loc;
1015 
1016     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nlsol_loc,(PetscScalar*)sol_loc,&x_seq);CHKERRQ(ierr);
1017 
1018     /* copy rhs matrix B into vector v_mpi */
1019     ierr = MatGetLocalSize(B,&m,NULL);CHKERRQ(ierr);
1020     ierr = MatDenseGetArray(B,&bray);CHKERRQ(ierr);
1021     ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)B),1,nrhs*m,nrhs*M,(const PetscScalar*)bray,&v_mpi);CHKERRQ(ierr);
1022     ierr = MatDenseRestoreArray(B,&bray);CHKERRQ(ierr);
1023 
1024     /* scatter v_mpi to b_seq because MUMPS only supports centralized rhs */
1025     /* idx: maps from k-th index of v_mpi to (i,j)-th global entry of B;
1026       iidx: inverse of idx, will be used by scattering xx_seq -> X       */
1027     ierr = PetscMalloc2(nrhs*M,&idx,nrhs*M,&iidx);CHKERRQ(ierr);
1028     ierr = MatGetOwnershipRanges(B,&rstart);CHKERRQ(ierr);
1029     k = 0;
1030     for (proc=0; proc<mumps->size; proc++){
1031       for (j=0; j<nrhs; j++){
1032         for (i=rstart[proc]; i<rstart[proc+1]; i++){
1033           iidx[j*M + i] = k;
1034           idx[k++]      = j*M + i;
1035         }
1036       }
1037     }
1038 
1039     if (!mumps->myid) {
1040       ierr = VecCreateSeq(PETSC_COMM_SELF,nrhs*M,&b_seq);CHKERRQ(ierr);
1041       ierr = ISCreateGeneral(PETSC_COMM_SELF,nrhs*M,idx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr);
1042       ierr = ISCreateStride(PETSC_COMM_SELF,nrhs*M,0,1,&is_from);CHKERRQ(ierr);
1043     } else {
1044       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&b_seq);CHKERRQ(ierr);
1045       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_to);CHKERRQ(ierr);
1046       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_from);CHKERRQ(ierr);
1047     }
1048     ierr = VecScatterCreate(v_mpi,is_from,b_seq,is_to,&scat_rhs);CHKERRQ(ierr);
1049     ierr = VecScatterBegin(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1050     ierr = ISDestroy(&is_to);CHKERRQ(ierr);
1051     ierr = ISDestroy(&is_from);CHKERRQ(ierr);
1052     ierr = VecScatterEnd(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1053 
1054     if (!mumps->myid) { /* define rhs on the host */
1055       ierr = VecGetArray(b_seq,&bray);CHKERRQ(ierr);
1056       mumps->id.rhs = (MumpsScalar*)bray;
1057       ierr = VecRestoreArray(b_seq,&bray);CHKERRQ(ierr);
1058     }
1059 
1060     /* solve phase */
1061     /*-------------*/
1062     mumps->id.job = JOB_SOLVE;
1063     PetscMUMPS_c(&mumps->id);
1064     if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1065 
1066     /* scatter mumps distributed solution to petsc vector v_mpi, which shares local arrays with solution matrix X */
1067     ierr = MatDenseGetArray(X,&array);CHKERRQ(ierr);
1068     ierr = VecPlaceArray(v_mpi,array);CHKERRQ(ierr);
1069 
1070     /* create scatter scat_sol */
1071     ierr = PetscMalloc1(nlsol_loc,&idxx);CHKERRQ(ierr);
1072     ierr = ISCreateStride(PETSC_COMM_SELF,nlsol_loc,0,1,&is_from);CHKERRQ(ierr);
1073     for (i=0; i<lsol_loc; i++) {
1074       isol_loc[i] -= 1; /* change Fortran style to C style */
1075       idxx[i] = iidx[isol_loc[i]];
1076       for (j=1; j<nrhs; j++){
1077         idxx[j*lsol_loc+i] = iidx[isol_loc[i]+j*M];
1078       }
1079     }
1080     ierr = ISCreateGeneral(PETSC_COMM_SELF,nlsol_loc,idxx,PETSC_COPY_VALUES,&is_to);CHKERRQ(ierr);
1081     ierr = VecScatterCreate(x_seq,is_from,v_mpi,is_to,&scat_sol);CHKERRQ(ierr);
1082     ierr = VecScatterBegin(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1083     ierr = ISDestroy(&is_from);CHKERRQ(ierr);
1084     ierr = ISDestroy(&is_to);CHKERRQ(ierr);
1085     ierr = VecScatterEnd(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1086     ierr = MatDenseRestoreArray(X,&array);CHKERRQ(ierr);
1087 
1088     /* free spaces */
1089     mumps->id.sol_loc = sol_loc_save;
1090     mumps->id.isol_loc = isol_loc_save;
1091 
1092     ierr = PetscFree2(sol_loc,isol_loc);CHKERRQ(ierr);
1093     ierr = PetscFree2(idx,iidx);CHKERRQ(ierr);
1094     ierr = PetscFree(idxx);CHKERRQ(ierr);
1095     ierr = VecDestroy(&x_seq);CHKERRQ(ierr);
1096     ierr = VecDestroy(&v_mpi);CHKERRQ(ierr);
1097     ierr = VecDestroy(&b_seq);CHKERRQ(ierr);
1098     ierr = VecScatterDestroy(&scat_rhs);CHKERRQ(ierr);
1099     ierr = VecScatterDestroy(&scat_sol);CHKERRQ(ierr);
1100   }
1101   PetscFunctionReturn(0);
1102 }
1103 
1104 #if !defined(PETSC_USE_COMPLEX)
1105 /*
1106   input:
1107    F:        numeric factor
1108   output:
1109    nneg:     total number of negative pivots
1110    nzero:    0
1111    npos:     (global dimension of F) - nneg
1112 */
1113 
1114 #undef __FUNCT__
1115 #define __FUNCT__ "MatGetInertia_SBAIJMUMPS"
1116 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos)
1117 {
1118   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1119   PetscErrorCode ierr;
1120   PetscMPIInt    size;
1121 
1122   PetscFunctionBegin;
1123   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)F),&size);CHKERRQ(ierr);
1124   /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */
1125   if (size > 1 && mumps->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",mumps->id.INFOG(13));
1126 
1127   if (nneg) *nneg = mumps->id.INFOG(12);
1128   if (nzero || npos) {
1129     if (mumps->id.ICNTL(24) != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"-mat_mumps_icntl_24 must be set as 1 for null pivot row detection");
1130     if (nzero) *nzero = mumps->id.INFOG(28);
1131     if (npos) *npos   = F->rmap->N - (mumps->id.INFOG(12) + mumps->id.INFOG(28));
1132   }
1133   PetscFunctionReturn(0);
1134 }
1135 #endif /* !defined(PETSC_USE_COMPLEX) */
1136 
1137 #undef __FUNCT__
1138 #define __FUNCT__ "MatFactorNumeric_MUMPS"
1139 PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info)
1140 {
1141   Mat_MUMPS      *mumps =(Mat_MUMPS*)(F)->spptr;
1142   PetscErrorCode ierr;
1143   Mat            F_diag;
1144   PetscBool      isMPIAIJ;
1145 
1146   PetscFunctionBegin;
1147   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_REUSE_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1148 
1149   /* numerical factorization phase */
1150   /*-------------------------------*/
1151   mumps->id.job = JOB_FACTNUMERIC;
1152   if (!mumps->id.ICNTL(18)) { /* A is centralized */
1153     if (!mumps->myid) {
1154       mumps->id.a = (MumpsScalar*)mumps->val;
1155     }
1156   } else {
1157     mumps->id.a_loc = (MumpsScalar*)mumps->val;
1158   }
1159   PetscMUMPS_c(&mumps->id);
1160   if (mumps->id.INFOG(1) < 0) {
1161     if (mumps->id.INFO(1) == -13) {
1162       if (mumps->id.INFO(2) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d megabytes\n",-mumps->id.INFO(2));
1163       else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d bytes\n",mumps->id.INFO(2));
1164     } else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFO(1)=%d, INFO(2)=%d\n",mumps->id.INFO(1),mumps->id.INFO(2));
1165   }
1166   if (!mumps->myid && mumps->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"  mumps->id.ICNTL(16):=%d\n",mumps->id.INFOG(16));
1167 
1168   (F)->assembled        = PETSC_TRUE;
1169   mumps->matstruc       = SAME_NONZERO_PATTERN;
1170   mumps->schur_factored = PETSC_FALSE;
1171   mumps->schur_inverted = PETSC_FALSE;
1172 
1173   /* just to be sure that ICNTL(19) value returned by a call from MatMumpsGetIcntl is always consistent */
1174   if (!mumps->sym && mumps->id.ICNTL(19) && mumps->id.ICNTL(19) != 1) mumps->id.ICNTL(19) = 3;
1175 
1176   if (mumps->size > 1) {
1177     PetscInt    lsol_loc;
1178     PetscScalar *sol_loc;
1179 
1180     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr);
1181     if (isMPIAIJ) F_diag = ((Mat_MPIAIJ*)(F)->data)->A;
1182     else F_diag = ((Mat_MPISBAIJ*)(F)->data)->A;
1183     F_diag->assembled = PETSC_TRUE;
1184 
1185     /* distributed solution; Create x_seq=sol_loc for repeated use */
1186     if (mumps->x_seq) {
1187       ierr = VecScatterDestroy(&mumps->scat_sol);CHKERRQ(ierr);
1188       ierr = PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);CHKERRQ(ierr);
1189       ierr = VecDestroy(&mumps->x_seq);CHKERRQ(ierr);
1190     }
1191     lsol_loc = mumps->id.INFO(23); /* length of sol_loc */
1192     ierr = PetscMalloc2(lsol_loc,&sol_loc,lsol_loc,&mumps->id.isol_loc);CHKERRQ(ierr);
1193     mumps->id.lsol_loc = lsol_loc;
1194     mumps->id.sol_loc = (MumpsScalar*)sol_loc;
1195     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,lsol_loc,sol_loc,&mumps->x_seq);CHKERRQ(ierr);
1196   }
1197   PetscFunctionReturn(0);
1198 }
1199 
1200 /* Sets MUMPS options from the options database */
1201 #undef __FUNCT__
1202 #define __FUNCT__ "PetscSetMUMPSFromOptions"
1203 PetscErrorCode PetscSetMUMPSFromOptions(Mat F, Mat A)
1204 {
1205   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1206   PetscErrorCode ierr;
1207   PetscInt       icntl,info[40],i,ninfo=40;
1208   PetscBool      flg;
1209 
1210   PetscFunctionBegin;
1211   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr);
1212   ierr = PetscOptionsInt("-mat_mumps_icntl_1","ICNTL(1): output stream for error messages","None",mumps->id.ICNTL(1),&icntl,&flg);CHKERRQ(ierr);
1213   if (flg) mumps->id.ICNTL(1) = icntl;
1214   ierr = PetscOptionsInt("-mat_mumps_icntl_2","ICNTL(2): output stream for diagnostic printing, statistics, and warning","None",mumps->id.ICNTL(2),&icntl,&flg);CHKERRQ(ierr);
1215   if (flg) mumps->id.ICNTL(2) = icntl;
1216   ierr = PetscOptionsInt("-mat_mumps_icntl_3","ICNTL(3): output stream for global information, collected on the host","None",mumps->id.ICNTL(3),&icntl,&flg);CHKERRQ(ierr);
1217   if (flg) mumps->id.ICNTL(3) = icntl;
1218 
1219   ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",mumps->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr);
1220   if (flg) mumps->id.ICNTL(4) = icntl;
1221   if (mumps->id.ICNTL(4) || PetscLogPrintInfo) mumps->id.ICNTL(3) = 6; /* resume MUMPS default id.ICNTL(3) = 6 */
1222 
1223   ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7)","None",mumps->id.ICNTL(6),&icntl,&flg);CHKERRQ(ierr);
1224   if (flg) mumps->id.ICNTL(6) = icntl;
1225 
1226   ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis","None",mumps->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr);
1227   if (flg) {
1228     if (icntl== 1 && mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n");
1229     else mumps->id.ICNTL(7) = icntl;
1230   }
1231 
1232   ierr = PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 8 or 77)","None",mumps->id.ICNTL(8),&mumps->id.ICNTL(8),NULL);CHKERRQ(ierr);
1233   /* ierr = PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): computes the solution using A or A^T","None",mumps->id.ICNTL(9),&mumps->id.ICNTL(9),NULL);CHKERRQ(ierr); handled by MatSolveTranspose_MUMPS() */
1234   ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",mumps->id.ICNTL(10),&mumps->id.ICNTL(10),NULL);CHKERRQ(ierr);
1235   ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to an error analysis (via -ksp_view)","None",mumps->id.ICNTL(11),&mumps->id.ICNTL(11),NULL);CHKERRQ(ierr);
1236   ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): an ordering strategy for symmetric matrices (0 to 3)","None",mumps->id.ICNTL(12),&mumps->id.ICNTL(12),NULL);CHKERRQ(ierr);
1237   ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting","None",mumps->id.ICNTL(13),&mumps->id.ICNTL(13),NULL);CHKERRQ(ierr);
1238   ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage increase in the estimated working space","None",mumps->id.ICNTL(14),&mumps->id.ICNTL(14),NULL);CHKERRQ(ierr);
1239   ierr = PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): computes the Schur complement","None",mumps->id.ICNTL(19),&mumps->id.ICNTL(19),NULL);CHKERRQ(ierr);
1240   if (mumps->id.ICNTL(19) <= 0 || mumps->id.ICNTL(19) > 3) { /* reset any schur data (if any) */
1241     ierr = MatMumpsResetSchur_Private(mumps);CHKERRQ(ierr);
1242   }
1243   /* ierr = PetscOptionsInt("-mat_mumps_icntl_20","ICNTL(20): the format (dense or sparse) of the right-hand sides","None",mumps->id.ICNTL(20),&mumps->id.ICNTL(20),NULL);CHKERRQ(ierr); -- sparse rhs is not supported in PETSc API */
1244   /* ierr = PetscOptionsInt("-mat_mumps_icntl_21","ICNTL(21): the distribution (centralized or distributed) of the solution vectors","None",mumps->id.ICNTL(21),&mumps->id.ICNTL(21),NULL);CHKERRQ(ierr); we only use distributed solution vector */
1245 
1246   ierr = PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core factorization and solve (0 or 1)","None",mumps->id.ICNTL(22),&mumps->id.ICNTL(22),NULL);CHKERRQ(ierr);
1247   ierr = PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",mumps->id.ICNTL(23),&mumps->id.ICNTL(23),NULL);CHKERRQ(ierr);
1248   ierr = PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",mumps->id.ICNTL(24),&mumps->id.ICNTL(24),NULL);CHKERRQ(ierr);
1249   if (mumps->id.ICNTL(24)) {
1250     mumps->id.ICNTL(13) = 1; /* turn-off ScaLAPACK to help with the correct detection of null pivots */
1251   }
1252 
1253   ierr = PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): compute a solution of a deficient matrix and a null space basis","None",mumps->id.ICNTL(25),&mumps->id.ICNTL(25),NULL);CHKERRQ(ierr);
1254   ierr = PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): drives the solution phase if a Schur complement matrix","None",mumps->id.ICNTL(26),&mumps->id.ICNTL(26),NULL);CHKERRQ(ierr);
1255   ierr = PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): the blocking size for multiple right-hand sides","None",mumps->id.ICNTL(27),&mumps->id.ICNTL(27),NULL);CHKERRQ(ierr);
1256   ierr = PetscOptionsInt("-mat_mumps_icntl_28","ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering","None",mumps->id.ICNTL(28),&mumps->id.ICNTL(28),NULL);CHKERRQ(ierr);
1257   ierr = PetscOptionsInt("-mat_mumps_icntl_29","ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis","None",mumps->id.ICNTL(29),&mumps->id.ICNTL(29),NULL);CHKERRQ(ierr);
1258   ierr = PetscOptionsInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL);CHKERRQ(ierr);
1259   ierr = PetscOptionsInt("-mat_mumps_icntl_31","ICNTL(31): indicates which factors may be discarded during factorization","None",mumps->id.ICNTL(31),&mumps->id.ICNTL(31),NULL);CHKERRQ(ierr);
1260   /* ierr = PetscOptionsInt("-mat_mumps_icntl_32","ICNTL(32): performs the forward elemination of the right-hand sides during factorization","None",mumps->id.ICNTL(32),&mumps->id.ICNTL(32),NULL);CHKERRQ(ierr);  -- not supported by PETSc API */
1261   ierr = PetscOptionsInt("-mat_mumps_icntl_33","ICNTL(33): compute determinant","None",mumps->id.ICNTL(33),&mumps->id.ICNTL(33),NULL);CHKERRQ(ierr);
1262 
1263   ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",mumps->id.CNTL(1),&mumps->id.CNTL(1),NULL);CHKERRQ(ierr);
1264   ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",mumps->id.CNTL(2),&mumps->id.CNTL(2),NULL);CHKERRQ(ierr);
1265   ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",mumps->id.CNTL(3),&mumps->id.CNTL(3),NULL);CHKERRQ(ierr);
1266   ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",mumps->id.CNTL(4),&mumps->id.CNTL(4),NULL);CHKERRQ(ierr);
1267   ierr = PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",mumps->id.CNTL(5),&mumps->id.CNTL(5),NULL);CHKERRQ(ierr);
1268 
1269   ierr = PetscOptionsString("-mat_mumps_ooc_tmpdir", "out of core directory", "None", mumps->id.ooc_tmpdir, mumps->id.ooc_tmpdir, 256, NULL);
1270 
1271   ierr = PetscOptionsIntArray("-mat_mumps_view_info","request INFO local to each processor","",info,&ninfo,NULL);CHKERRQ(ierr);
1272   if (ninfo) {
1273     if (ninfo > 40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"number of INFO %d must <= 40\n",ninfo);
1274     ierr = PetscMalloc1(ninfo,&mumps->info);CHKERRQ(ierr);
1275     mumps->ninfo = ninfo;
1276     for (i=0; i<ninfo; i++) {
1277       if (info[i] < 0 || info[i]>40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"index of INFO %d must between 1 and 40\n",ninfo);
1278       else {
1279         mumps->info[i] = info[i];
1280       }
1281     }
1282   }
1283 
1284   PetscOptionsEnd();
1285   PetscFunctionReturn(0);
1286 }
1287 
1288 #undef __FUNCT__
1289 #define __FUNCT__ "PetscInitializeMUMPS"
1290 PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS *mumps)
1291 {
1292   PetscErrorCode ierr;
1293 
1294   PetscFunctionBegin;
1295   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A), &mumps->myid);
1296   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&mumps->size);CHKERRQ(ierr);
1297   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(mumps->comm_mumps));CHKERRQ(ierr);
1298 
1299   mumps->id.comm_fortran = MPI_Comm_c2f(mumps->comm_mumps);
1300 
1301   mumps->id.job = JOB_INIT;
1302   mumps->id.par = 1;  /* host participates factorizaton and solve */
1303   mumps->id.sym = mumps->sym;
1304   PetscMUMPS_c(&mumps->id);
1305 
1306   mumps->scat_rhs     = NULL;
1307   mumps->scat_sol     = NULL;
1308 
1309   /* set PETSc-MUMPS default options - override MUMPS default */
1310   mumps->id.ICNTL(3) = 0;
1311   mumps->id.ICNTL(4) = 0;
1312   if (mumps->size == 1) {
1313     mumps->id.ICNTL(18) = 0;   /* centralized assembled matrix input */
1314   } else {
1315     mumps->id.ICNTL(18) = 3;   /* distributed assembled matrix input */
1316     mumps->id.ICNTL(20) = 0;   /* rhs is in dense format */
1317     mumps->id.ICNTL(21) = 1;   /* distributed solution */
1318   }
1319 
1320   /* schur */
1321   mumps->id.size_schur      = 0;
1322   mumps->id.listvar_schur   = NULL;
1323   mumps->id.schur           = NULL;
1324   mumps->schur_second_solve = PETSC_FALSE;
1325   mumps->sizeredrhs         = 0;
1326   mumps->schur_pivots       = NULL;
1327   mumps->schur_work         = NULL;
1328   mumps->schur_sol          = NULL;
1329   mumps->schur_sizesol      = 0;
1330   mumps->schur_restored     = PETSC_TRUE;
1331   mumps->schur_factored     = PETSC_FALSE;
1332   mumps->schur_inverted     = PETSC_FALSE;
1333   PetscFunctionReturn(0);
1334 }
1335 
1336 /* Note Petsc r(=c) permutation is used when mumps->id.ICNTL(7)==1 with centralized assembled matrix input; otherwise r and c are ignored */
1337 #undef __FUNCT__
1338 #define __FUNCT__ "MatLUFactorSymbolic_AIJMUMPS"
1339 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
1340 {
1341   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1342   PetscErrorCode ierr;
1343   Vec            b;
1344   IS             is_iden;
1345   const PetscInt M = A->rmap->N;
1346 
1347   PetscFunctionBegin;
1348   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1349 
1350   /* Set MUMPS options from the options database */
1351   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1352 
1353   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1354 
1355   /* analysis phase */
1356   /*----------------*/
1357   mumps->id.job = JOB_FACTSYMBOLIC;
1358   mumps->id.n   = M;
1359   switch (mumps->id.ICNTL(18)) {
1360   case 0:  /* centralized assembled matrix input */
1361     if (!mumps->myid) {
1362       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1363       if (mumps->id.ICNTL(6)>1) {
1364         mumps->id.a = (MumpsScalar*)mumps->val;
1365       }
1366       if (mumps->id.ICNTL(7) == 1) { /* use user-provide matrix ordering - assuming r = c ordering */
1367         /*
1368         PetscBool      flag;
1369         ierr = ISEqual(r,c,&flag);CHKERRQ(ierr);
1370         if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"row_perm != col_perm");
1371         ierr = ISView(r,PETSC_VIEWER_STDOUT_SELF);
1372          */
1373         if (!mumps->myid) {
1374           const PetscInt *idx;
1375           PetscInt       i,*perm_in;
1376 
1377           ierr = PetscMalloc1(M,&perm_in);CHKERRQ(ierr);
1378           ierr = ISGetIndices(r,&idx);CHKERRQ(ierr);
1379 
1380           mumps->id.perm_in = perm_in;
1381           for (i=0; i<M; i++) perm_in[i] = idx[i]+1; /* perm_in[]: start from 1, not 0! */
1382           ierr = ISRestoreIndices(r,&idx);CHKERRQ(ierr);
1383         }
1384       }
1385     }
1386     break;
1387   case 3:  /* distributed assembled matrix input (size>1) */
1388     mumps->id.nz_loc = mumps->nz;
1389     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1390     if (mumps->id.ICNTL(6)>1) {
1391       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1392     }
1393     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1394     if (!mumps->myid) {
1395       ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->N,&mumps->b_seq);CHKERRQ(ierr);
1396       ierr = ISCreateStride(PETSC_COMM_SELF,A->rmap->N,0,1,&is_iden);CHKERRQ(ierr);
1397     } else {
1398       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1399       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1400     }
1401     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1402     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1403     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1404     ierr = VecDestroy(&b);CHKERRQ(ierr);
1405     break;
1406   }
1407   PetscMUMPS_c(&mumps->id);
1408   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1409 
1410   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1411   F->ops->solve           = MatSolve_MUMPS;
1412   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
1413   F->ops->matsolve        = MatMatSolve_MUMPS;
1414   PetscFunctionReturn(0);
1415 }
1416 
1417 /* Note the Petsc r and c permutations are ignored */
1418 #undef __FUNCT__
1419 #define __FUNCT__ "MatLUFactorSymbolic_BAIJMUMPS"
1420 PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
1421 {
1422   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1423   PetscErrorCode ierr;
1424   Vec            b;
1425   IS             is_iden;
1426   const PetscInt M = A->rmap->N;
1427 
1428   PetscFunctionBegin;
1429   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1430 
1431   /* Set MUMPS options from the options database */
1432   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1433 
1434   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1435 
1436   /* analysis phase */
1437   /*----------------*/
1438   mumps->id.job = JOB_FACTSYMBOLIC;
1439   mumps->id.n   = M;
1440   switch (mumps->id.ICNTL(18)) {
1441   case 0:  /* centralized assembled matrix input */
1442     if (!mumps->myid) {
1443       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1444       if (mumps->id.ICNTL(6)>1) {
1445         mumps->id.a = (MumpsScalar*)mumps->val;
1446       }
1447     }
1448     break;
1449   case 3:  /* distributed assembled matrix input (size>1) */
1450     mumps->id.nz_loc = mumps->nz;
1451     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1452     if (mumps->id.ICNTL(6)>1) {
1453       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1454     }
1455     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1456     if (!mumps->myid) {
1457       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1458       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1459     } else {
1460       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1461       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1462     }
1463     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1464     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1465     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1466     ierr = VecDestroy(&b);CHKERRQ(ierr);
1467     break;
1468   }
1469   PetscMUMPS_c(&mumps->id);
1470   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1471 
1472   F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1473   F->ops->solve           = MatSolve_MUMPS;
1474   F->ops->solvetranspose  = MatSolveTranspose_MUMPS;
1475   PetscFunctionReturn(0);
1476 }
1477 
1478 /* Note the Petsc r permutation and factor info are ignored */
1479 #undef __FUNCT__
1480 #define __FUNCT__ "MatCholeskyFactorSymbolic_MUMPS"
1481 PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info)
1482 {
1483   Mat_MUMPS      *mumps = (Mat_MUMPS*)F->spptr;
1484   PetscErrorCode ierr;
1485   Vec            b;
1486   IS             is_iden;
1487   const PetscInt M = A->rmap->N;
1488 
1489   PetscFunctionBegin;
1490   mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1491 
1492   /* Set MUMPS options from the options database */
1493   ierr = PetscSetMUMPSFromOptions(F,A);CHKERRQ(ierr);
1494 
1495   ierr = (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);CHKERRQ(ierr);
1496 
1497   /* analysis phase */
1498   /*----------------*/
1499   mumps->id.job = JOB_FACTSYMBOLIC;
1500   mumps->id.n   = M;
1501   switch (mumps->id.ICNTL(18)) {
1502   case 0:  /* centralized assembled matrix input */
1503     if (!mumps->myid) {
1504       mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1505       if (mumps->id.ICNTL(6)>1) {
1506         mumps->id.a = (MumpsScalar*)mumps->val;
1507       }
1508     }
1509     break;
1510   case 3:  /* distributed assembled matrix input (size>1) */
1511     mumps->id.nz_loc = mumps->nz;
1512     mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1513     if (mumps->id.ICNTL(6)>1) {
1514       mumps->id.a_loc = (MumpsScalar*)mumps->val;
1515     }
1516     /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1517     if (!mumps->myid) {
1518       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);CHKERRQ(ierr);
1519       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
1520     } else {
1521       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);CHKERRQ(ierr);
1522       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
1523     }
1524     ierr = MatCreateVecs(A,NULL,&b);CHKERRQ(ierr);
1525     ierr = VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);CHKERRQ(ierr);
1526     ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
1527     ierr = VecDestroy(&b);CHKERRQ(ierr);
1528     break;
1529   }
1530   PetscMUMPS_c(&mumps->id);
1531   if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1532 
1533   F->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS;
1534   F->ops->solve                 = MatSolve_MUMPS;
1535   F->ops->solvetranspose        = MatSolve_MUMPS;
1536   F->ops->matsolve              = MatMatSolve_MUMPS;
1537 #if defined(PETSC_USE_COMPLEX)
1538   F->ops->getinertia = NULL;
1539 #else
1540   F->ops->getinertia = MatGetInertia_SBAIJMUMPS;
1541 #endif
1542   PetscFunctionReturn(0);
1543 }
1544 
1545 #undef __FUNCT__
1546 #define __FUNCT__ "MatView_MUMPS"
1547 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer)
1548 {
1549   PetscErrorCode    ierr;
1550   PetscBool         iascii;
1551   PetscViewerFormat format;
1552   Mat_MUMPS         *mumps=(Mat_MUMPS*)A->spptr;
1553 
1554   PetscFunctionBegin;
1555   /* check if matrix is mumps type */
1556   if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0);
1557 
1558   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1559   if (iascii) {
1560     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1561     if (format == PETSC_VIEWER_ASCII_INFO) {
1562       ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr);
1563       ierr = PetscViewerASCIIPrintf(viewer,"  SYM (matrix type):                   %d \n",mumps->id.sym);CHKERRQ(ierr);
1564       ierr = PetscViewerASCIIPrintf(viewer,"  PAR (host participation):            %d \n",mumps->id.par);CHKERRQ(ierr);
1565       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(1) (output for error):         %d \n",mumps->id.ICNTL(1));CHKERRQ(ierr);
1566       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(2) (output of diagnostic msg): %d \n",mumps->id.ICNTL(2));CHKERRQ(ierr);
1567       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(3) (output for global info):   %d \n",mumps->id.ICNTL(3));CHKERRQ(ierr);
1568       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(4) (level of printing):        %d \n",mumps->id.ICNTL(4));CHKERRQ(ierr);
1569       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(5) (input mat struct):         %d \n",mumps->id.ICNTL(5));CHKERRQ(ierr);
1570       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(6) (matrix prescaling):        %d \n",mumps->id.ICNTL(6));CHKERRQ(ierr);
1571       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(7) (sequentia matrix ordering):%d \n",mumps->id.ICNTL(7));CHKERRQ(ierr);
1572       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(8) (scalling strategy):        %d \n",mumps->id.ICNTL(8));CHKERRQ(ierr);
1573       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(10) (max num of refinements):  %d \n",mumps->id.ICNTL(10));CHKERRQ(ierr);
1574       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(11) (error analysis):          %d \n",mumps->id.ICNTL(11));CHKERRQ(ierr);
1575       if (mumps->id.ICNTL(11)>0) {
1576         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(4) (inf norm of input mat):        %g\n",mumps->id.RINFOG(4));CHKERRQ(ierr);
1577         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(5) (inf norm of solution):         %g\n",mumps->id.RINFOG(5));CHKERRQ(ierr);
1578         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(6) (inf norm of residual):         %g\n",mumps->id.RINFOG(6));CHKERRQ(ierr);
1579         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",mumps->id.RINFOG(7),mumps->id.RINFOG(8));CHKERRQ(ierr);
1580         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(9) (error estimate):               %g \n",mumps->id.RINFOG(9));CHKERRQ(ierr);
1581         ierr = PetscViewerASCIIPrintf(viewer,"    RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",mumps->id.RINFOG(10),mumps->id.RINFOG(11));CHKERRQ(ierr);
1582       }
1583       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(12) (efficiency control):                         %d \n",mumps->id.ICNTL(12));CHKERRQ(ierr);
1584       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(13) (efficiency control):                         %d \n",mumps->id.ICNTL(13));CHKERRQ(ierr);
1585       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(14) (percentage of estimated workspace increase): %d \n",mumps->id.ICNTL(14));CHKERRQ(ierr);
1586       /* ICNTL(15-17) not used */
1587       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(18) (input mat struct):                           %d \n",mumps->id.ICNTL(18));CHKERRQ(ierr);
1588       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(19) (Shur complement info):                       %d \n",mumps->id.ICNTL(19));CHKERRQ(ierr);
1589       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(20) (rhs sparse pattern):                         %d \n",mumps->id.ICNTL(20));CHKERRQ(ierr);
1590       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(21) (solution struct):                            %d \n",mumps->id.ICNTL(21));CHKERRQ(ierr);
1591       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(22) (in-core/out-of-core facility):               %d \n",mumps->id.ICNTL(22));CHKERRQ(ierr);
1592       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(23) (max size of memory can be allocated locally):%d \n",mumps->id.ICNTL(23));CHKERRQ(ierr);
1593 
1594       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(24) (detection of null pivot rows):               %d \n",mumps->id.ICNTL(24));CHKERRQ(ierr);
1595       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(25) (computation of a null space basis):          %d \n",mumps->id.ICNTL(25));CHKERRQ(ierr);
1596       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(26) (Schur options for rhs or solution):          %d \n",mumps->id.ICNTL(26));CHKERRQ(ierr);
1597       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(27) (experimental parameter):                     %d \n",mumps->id.ICNTL(27));CHKERRQ(ierr);
1598       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(28) (use parallel or sequential ordering):        %d \n",mumps->id.ICNTL(28));CHKERRQ(ierr);
1599       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(29) (parallel ordering):                          %d \n",mumps->id.ICNTL(29));CHKERRQ(ierr);
1600 
1601       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(30) (user-specified set of entries in inv(A)):    %d \n",mumps->id.ICNTL(30));CHKERRQ(ierr);
1602       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(31) (factors is discarded in the solve phase):    %d \n",mumps->id.ICNTL(31));CHKERRQ(ierr);
1603       ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(33) (compute determinant):                        %d \n",mumps->id.ICNTL(33));CHKERRQ(ierr);
1604 
1605       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(1) (relative pivoting threshold):      %g \n",mumps->id.CNTL(1));CHKERRQ(ierr);
1606       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(2) (stopping criterion of refinement): %g \n",mumps->id.CNTL(2));CHKERRQ(ierr);
1607       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(3) (absolute pivoting threshold):      %g \n",mumps->id.CNTL(3));CHKERRQ(ierr);
1608       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(4) (value of static pivoting):         %g \n",mumps->id.CNTL(4));CHKERRQ(ierr);
1609       ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(5) (fixation for null pivots):         %g \n",mumps->id.CNTL(5));CHKERRQ(ierr);
1610 
1611       /* infomation local to each processor */
1612       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr);
1613       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1614       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %g \n",mumps->myid,mumps->id.RINFO(1));CHKERRQ(ierr);
1615       ierr = PetscViewerFlush(viewer);
1616       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr);
1617       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(2));CHKERRQ(ierr);
1618       ierr = PetscViewerFlush(viewer);
1619       ierr = PetscViewerASCIIPrintf(viewer, "  RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr);
1620       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d]  %g \n",mumps->myid,mumps->id.RINFO(3));CHKERRQ(ierr);
1621       ierr = PetscViewerFlush(viewer);
1622 
1623       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr);
1624       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] %d \n",mumps->myid,mumps->id.INFO(15));CHKERRQ(ierr);
1625       ierr = PetscViewerFlush(viewer);
1626 
1627       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr);
1628       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(16));CHKERRQ(ierr);
1629       ierr = PetscViewerFlush(viewer);
1630 
1631       ierr = PetscViewerASCIIPrintf(viewer, "  INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr);
1632       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(23));CHKERRQ(ierr);
1633       ierr = PetscViewerFlush(viewer);
1634 
1635       if (mumps->ninfo && mumps->ninfo <= 40){
1636         PetscInt i;
1637         for (i=0; i<mumps->ninfo; i++){
1638           ierr = PetscViewerASCIIPrintf(viewer, "  INFO(%d): \n",mumps->info[i]);CHKERRQ(ierr);
1639           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"    [%d] %d \n",mumps->myid,mumps->id.INFO(mumps->info[i]));CHKERRQ(ierr);
1640           ierr = PetscViewerFlush(viewer);
1641         }
1642       }
1643 
1644 
1645       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1646 
1647       if (!mumps->myid) { /* information from the host */
1648         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",mumps->id.RINFOG(1));CHKERRQ(ierr);
1649         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",mumps->id.RINFOG(2));CHKERRQ(ierr);
1650         ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",mumps->id.RINFOG(3));CHKERRQ(ierr);
1651         ierr = PetscViewerASCIIPrintf(viewer,"  (RINFOG(12) RINFOG(13))*2^INFOG(34) (determinant): (%g,%g)*(2^%d)\n",mumps->id.RINFOG(12),mumps->id.RINFOG(13),mumps->id.INFOG(34));CHKERRQ(ierr);
1652 
1653         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(3));CHKERRQ(ierr);
1654         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(4));CHKERRQ(ierr);
1655         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(5) (estimated maximum front size in the complete tree): %d \n",mumps->id.INFOG(5));CHKERRQ(ierr);
1656         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(6) (number of nodes in the complete tree): %d \n",mumps->id.INFOG(6));CHKERRQ(ierr);
1657         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(7) (ordering option effectively use after analysis): %d \n",mumps->id.INFOG(7));CHKERRQ(ierr);
1658         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",mumps->id.INFOG(8));CHKERRQ(ierr);
1659         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",mumps->id.INFOG(9));CHKERRQ(ierr);
1660         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(10) (total integer space store the matrix factors after factorization): %d \n",mumps->id.INFOG(10));CHKERRQ(ierr);
1661         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(11) (order of largest frontal matrix after factorization): %d \n",mumps->id.INFOG(11));CHKERRQ(ierr);
1662         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(12) (number of off-diagonal pivots): %d \n",mumps->id.INFOG(12));CHKERRQ(ierr);
1663         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(13) (number of delayed pivots after factorization): %d \n",mumps->id.INFOG(13));CHKERRQ(ierr);
1664         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(14) (number of memory compress after factorization): %d \n",mumps->id.INFOG(14));CHKERRQ(ierr);
1665         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(15) (number of steps of iterative refinement after solution): %d \n",mumps->id.INFOG(15));CHKERRQ(ierr);
1666         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",mumps->id.INFOG(16));CHKERRQ(ierr);
1667         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",mumps->id.INFOG(17));CHKERRQ(ierr);
1668         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",mumps->id.INFOG(18));CHKERRQ(ierr);
1669         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",mumps->id.INFOG(19));CHKERRQ(ierr);
1670         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(20) (estimated number of entries in the factors): %d \n",mumps->id.INFOG(20));CHKERRQ(ierr);
1671         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",mumps->id.INFOG(21));CHKERRQ(ierr);
1672         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",mumps->id.INFOG(22));CHKERRQ(ierr);
1673         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",mumps->id.INFOG(23));CHKERRQ(ierr);
1674         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",mumps->id.INFOG(24));CHKERRQ(ierr);
1675         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",mumps->id.INFOG(25));CHKERRQ(ierr);
1676         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(28) (after factorization: number of null pivots encountered): %d\n",mumps->id.INFOG(28));CHKERRQ(ierr);
1677         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(29) (after factorization: effective number of entries in the factors (sum over all processors)): %d\n",mumps->id.INFOG(29));CHKERRQ(ierr);
1678         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(30, 31) (after solution: size in Mbytes of memory used during solution phase): %d, %d\n",mumps->id.INFOG(30),mumps->id.INFOG(31));CHKERRQ(ierr);
1679         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(32) (after analysis: type of analysis done): %d\n",mumps->id.INFOG(32));CHKERRQ(ierr);
1680         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(33) (value used for ICNTL(8)): %d\n",mumps->id.INFOG(33));CHKERRQ(ierr);
1681         ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(34) (exponent of the determinant if determinant is requested): %d\n",mumps->id.INFOG(34));CHKERRQ(ierr);
1682       }
1683     }
1684   }
1685   PetscFunctionReturn(0);
1686 }
1687 
1688 #undef __FUNCT__
1689 #define __FUNCT__ "MatGetInfo_MUMPS"
1690 PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info)
1691 {
1692   Mat_MUMPS *mumps =(Mat_MUMPS*)A->spptr;
1693 
1694   PetscFunctionBegin;
1695   info->block_size        = 1.0;
1696   info->nz_allocated      = mumps->id.INFOG(20);
1697   info->nz_used           = mumps->id.INFOG(20);
1698   info->nz_unneeded       = 0.0;
1699   info->assemblies        = 0.0;
1700   info->mallocs           = 0.0;
1701   info->memory            = 0.0;
1702   info->fill_ratio_given  = 0;
1703   info->fill_ratio_needed = 0;
1704   info->factor_mallocs    = 0;
1705   PetscFunctionReturn(0);
1706 }
1707 
1708 /* -------------------------------------------------------------------------------------------*/
1709 #undef __FUNCT__
1710 #define __FUNCT__ "MatMumpsSetSchurIndices_MUMPS"
1711 PetscErrorCode MatMumpsSetSchurIndices_MUMPS(Mat F,PetscInt size,PetscInt idxs[])
1712 {
1713   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1714   PetscErrorCode ierr;
1715 
1716   PetscFunctionBegin;
1717   if (mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MUMPS parallel Schur complements not yet supported from PETSc\n");
1718   if (mumps->id.size_schur != size) {
1719     ierr = PetscFree2(mumps->id.listvar_schur,mumps->id.schur);CHKERRQ(ierr);
1720     mumps->id.size_schur = size;
1721     mumps->id.schur_lld = size;
1722     ierr = PetscMalloc2(size,&mumps->id.listvar_schur,size*size,&mumps->id.schur);CHKERRQ(ierr);
1723   }
1724   ierr = PetscMemcpy(mumps->id.listvar_schur,idxs,size*sizeof(PetscInt));CHKERRQ(ierr);
1725   if (F->factortype == MAT_FACTOR_LU) {
1726     mumps->id.ICNTL(19) = 3; /* MUMPS returns full matrix */
1727   } else {
1728     mumps->id.ICNTL(19) = 2; /* MUMPS returns lower triangular part */
1729   }
1730   /* set a special value of ICNTL (not handled my MUMPS) to be used in the solve phase by PETSc */
1731   mumps->id.ICNTL(26) = -1;
1732   PetscFunctionReturn(0);
1733 }
1734 
1735 #undef __FUNCT__
1736 #define __FUNCT__ "MatMumpsSetSchurIndices"
1737 /*@
1738   MatMumpsSetSchurIndices - Set indices defining the Schur complement that MUMPS will compute during the factorization steps
1739 
1740    Logically Collective on Mat
1741 
1742    Input Parameters:
1743 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1744 .  size - size of the Schur complement indices
1745 -  idxs[] - array of Schur complement indices
1746 
1747    Notes:
1748    The user has to free the array idxs[] since the indices are copied by the routine.
1749    MUMPS Schur complement mode is currently implemented for sequential matrices.
1750 
1751    Level: advanced
1752 
1753    References: MUMPS Users' Guide
1754 
1755 .seealso: MatGetFactor(), MatMumpsCreateSchurComplement(), MatMumpsGetSchurComplement()
1756 @*/
1757 PetscErrorCode MatMumpsSetSchurIndices(Mat F,PetscInt size,PetscInt idxs[])
1758 {
1759   PetscErrorCode ierr;
1760 
1761   PetscFunctionBegin;
1762   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
1763   if (size) PetscValidIntPointer(idxs,3);
1764   ierr = PetscTryMethod(F,"MatMumpsSetSchurIndices_C",(Mat,PetscInt,PetscInt[]),(F,size,idxs));CHKERRQ(ierr);
1765   PetscFunctionReturn(0);
1766 }
1767 
1768 /* -------------------------------------------------------------------------------------------*/
1769 #undef __FUNCT__
1770 #define __FUNCT__ "MatMumpsCreateSchurComplement_MUMPS"
1771 PetscErrorCode MatMumpsCreateSchurComplement_MUMPS(Mat F,Mat* S)
1772 {
1773   Mat            St;
1774   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1775   PetscScalar    *array;
1776 #if defined(PETSC_USE_COMPLEX)
1777   PetscScalar    im = PetscSqrtScalar((PetscScalar)-1.0);
1778 #endif
1779   PetscErrorCode ierr;
1780 
1781   PetscFunctionBegin;
1782   if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
1783   else if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
1784   else if (!mumps->schur_restored) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
1785 
1786   ierr = MatCreate(PetscObjectComm((PetscObject)F),&St);CHKERRQ(ierr);
1787   ierr = MatSetSizes(St,PETSC_DECIDE,PETSC_DECIDE,mumps->id.size_schur,mumps->id.size_schur);CHKERRQ(ierr);
1788   ierr = MatSetType(St,MATDENSE);CHKERRQ(ierr);
1789   ierr = MatSetUp(St);CHKERRQ(ierr);
1790   ierr = MatDenseGetArray(St,&array);CHKERRQ(ierr);
1791   if (!mumps->sym) { /* MUMPS always return a full matrix */
1792     if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
1793       PetscInt i,j,N=mumps->id.size_schur;
1794       for (i=0;i<N;i++) {
1795         for (j=0;j<N;j++) {
1796 #if !defined(PETSC_USE_COMPLEX)
1797           PetscScalar val = mumps->id.schur[i*N+j];
1798 #else
1799           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1800 #endif
1801           array[j*N+i] = val;
1802         }
1803       }
1804     } else { /* stored by columns */
1805       ierr = PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));CHKERRQ(ierr);
1806     }
1807   } else { /* either full or lower-triangular (not packed) */
1808     if (mumps->id.ICNTL(19) == 2) { /* lower triangular stored by columns */
1809       PetscInt i,j,N=mumps->id.size_schur;
1810       for (i=0;i<N;i++) {
1811         for (j=i;j<N;j++) {
1812 #if !defined(PETSC_USE_COMPLEX)
1813           PetscScalar val = mumps->id.schur[i*N+j];
1814 #else
1815           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1816 #endif
1817           array[i*N+j] = val;
1818           array[j*N+i] = val;
1819         }
1820       }
1821     } else if (mumps->id.ICNTL(19) == 3) { /* full matrix */
1822       ierr = PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));CHKERRQ(ierr);
1823     } else { /* ICNTL(19) == 1 lower triangular stored by rows */
1824       PetscInt i,j,N=mumps->id.size_schur;
1825       for (i=0;i<N;i++) {
1826         for (j=0;j<i+1;j++) {
1827 #if !defined(PETSC_USE_COMPLEX)
1828           PetscScalar val = mumps->id.schur[i*N+j];
1829 #else
1830           PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1831 #endif
1832           array[i*N+j] = val;
1833           array[j*N+i] = val;
1834         }
1835       }
1836     }
1837   }
1838   ierr = MatDenseRestoreArray(St,&array);CHKERRQ(ierr);
1839   *S = St;
1840   PetscFunctionReturn(0);
1841 }
1842 
1843 #undef __FUNCT__
1844 #define __FUNCT__ "MatMumpsCreateSchurComplement"
1845 /*@
1846   MatMumpsCreateSchurComplement - Create a Schur complement matrix object using Schur data computed by MUMPS during the factorization step
1847 
1848    Logically Collective on Mat
1849 
1850    Input Parameters:
1851 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1852 .  *S - location where to return the Schur complement (MATDENSE)
1853 
1854    Notes:
1855    MUMPS Schur complement mode is currently implemented for sequential matrices.
1856    The routine provides a copy of the Schur data stored within MUMPS data strutures. The caller must destroy the object when it is no longer needed.
1857    If MatMumpsInvertSchurComplement has been called, the routine gets back the inverse
1858 
1859    Level: advanced
1860 
1861    References: MUMPS Users' Guide
1862 
1863 .seealso: MatGetFactor(), MatMumpsSetSchurIndices(), MatMumpsGetSchurComplement()
1864 @*/
1865 PetscErrorCode MatMumpsCreateSchurComplement(Mat F,Mat* S)
1866 {
1867   PetscErrorCode ierr;
1868 
1869   PetscFunctionBegin;
1870   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
1871   ierr = PetscTryMethod(F,"MatMumpsCreateSchurComplement_C",(Mat,Mat*),(F,S));CHKERRQ(ierr);
1872   PetscFunctionReturn(0);
1873 }
1874 
1875 /* -------------------------------------------------------------------------------------------*/
1876 #undef __FUNCT__
1877 #define __FUNCT__ "MatMumpsGetSchurComplement_MUMPS"
1878 PetscErrorCode MatMumpsGetSchurComplement_MUMPS(Mat F,Mat* S)
1879 {
1880   Mat            St;
1881   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1882   PetscErrorCode ierr;
1883 
1884   PetscFunctionBegin;
1885   if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
1886   else if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
1887   else if (!mumps->schur_restored) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
1888 
1889   /* It should be the responsibility of the user to handle different ICNTL(19) cases if they want to work with the raw data */
1890   /* should I also add errors when the Schur complement has been already factored? */
1891   ierr = MatCreateSeqDense(PetscObjectComm((PetscObject)F),mumps->id.size_schur,mumps->id.size_schur,(PetscScalar*)mumps->id.schur,&St);CHKERRQ(ierr);
1892   *S = St;
1893   mumps->schur_restored = PETSC_FALSE;
1894   PetscFunctionReturn(0);
1895 }
1896 
1897 #undef __FUNCT__
1898 #define __FUNCT__ "MatMumpsGetSchurComplement"
1899 /*@
1900   MatMumpsGetSchurComplement - Get a Schur complement matrix object using the current status of the raw Schur data computed by MUMPS during the factorization step
1901 
1902    Logically Collective on Mat
1903 
1904    Input Parameters:
1905 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1906 .  *S - location where to return the Schur complement (MATDENSE)
1907 
1908    Notes:
1909    MUMPS Schur complement mode is currently implemented for sequential matrices.
1910    The routine uses the pointer to the raw data of the Schur Complement stored within MUMPS data strutures. The caller should call MatMumpsRestoreSchurComplement when the object is no longer needed.
1911    If MatMumpsInvertSchurComplement has been called, the routine gets back the inverse
1912 
1913    Level: advanced
1914 
1915    References: MUMPS Users' Guide
1916 
1917 .seealso: MatGetFactor(), MatMumpsSetSchurIndices(), MatMumpsRestoreSchurComplement(), MatMumpsCreateSchurComplement()
1918 @*/
1919 PetscErrorCode MatMumpsGetSchurComplement(Mat F,Mat* S)
1920 {
1921   PetscErrorCode ierr;
1922 
1923   PetscFunctionBegin;
1924   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
1925   ierr = PetscUseMethod(F,"MatMumpsGetSchurComplement_C",(Mat,Mat*),(F,S));CHKERRQ(ierr);
1926   PetscFunctionReturn(0);
1927 }
1928 
1929 /* -------------------------------------------------------------------------------------------*/
1930 #undef __FUNCT__
1931 #define __FUNCT__ "MatMumpsRestoreSchurComplement_MUMPS"
1932 PetscErrorCode MatMumpsRestoreSchurComplement_MUMPS(Mat F,Mat* S)
1933 {
1934   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1935   PetscErrorCode ierr;
1936 
1937   PetscFunctionBegin;
1938   if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
1939   else if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
1940   else if (mumps->schur_restored) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has been already restored");
1941   ierr = MatDestroy(S);CHKERRQ(ierr);
1942   *S = NULL;
1943   mumps->schur_restored = PETSC_TRUE;
1944   PetscFunctionReturn(0);
1945 }
1946 
1947 #undef __FUNCT__
1948 #define __FUNCT__ "MatMumpsRestoreSchurComplement"
1949 /*@
1950   MatMumpsRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to MatGetSchurComplement
1951 
1952    Logically Collective on Mat
1953 
1954    Input Parameters:
1955 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1956 .  *S - location where the Schur complement is stored
1957 
1958    Notes:
1959    MUMPS Schur complement mode is currently implemented for sequential matrices.
1960 
1961    Level: advanced
1962 
1963    References: MUMPS Users' Guide
1964 
1965 .seealso: MatGetFactor(), MatMumpsSetSchurIndices(), MatMumpsGetSchurComplement(), MatMumpsCreateSchurComplement()
1966 @*/
1967 PetscErrorCode MatMumpsRestoreSchurComplement(Mat F,Mat* S)
1968 {
1969   PetscErrorCode ierr;
1970 
1971   PetscFunctionBegin;
1972   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
1973   ierr = PetscUseMethod(F,"MatMumpsRestoreSchurComplement_C",(Mat,Mat*),(F,S));CHKERRQ(ierr);
1974   PetscFunctionReturn(0);
1975 }
1976 
1977 /* -------------------------------------------------------------------------------------------*/
1978 #undef __FUNCT__
1979 #define __FUNCT__ "MatMumpsInvertSchurComplement_MUMPS"
1980 PetscErrorCode MatMumpsInvertSchurComplement_MUMPS(Mat F)
1981 {
1982   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
1983   PetscErrorCode ierr;
1984 
1985   PetscFunctionBegin;
1986   if (!mumps->id.ICNTL(19)) { /* do nothing */
1987     PetscFunctionReturn(0);
1988   }
1989   if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
1990   else if (!mumps->schur_restored) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
1991   ierr = MatMumpsInvertSchur_Private(mumps);CHKERRQ(ierr);
1992   PetscFunctionReturn(0);
1993 }
1994 
1995 #undef __FUNCT__
1996 #define __FUNCT__ "MatMumpsInvertSchurComplement"
1997 /*@
1998   MatMumpsInvertSchurComplement - Invert the raw Schur data computed by MUMPS during the factorization step
1999 
2000    Logically Collective on Mat
2001 
2002    Input Parameters:
2003 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2004 
2005    Notes:
2006    MUMPS Schur complement mode is currently implemented for sequential matrices.
2007    The routine uses the pointer to the raw data of the Schur Complement stored within MUMPS data strutures.
2008 
2009    Level: advanced
2010 
2011    References: MUMPS Users' Guide
2012 
2013 .seealso: MatGetFactor(), MatMumpsSetSchurIndices()
2014 @*/
2015 PetscErrorCode MatMumpsInvertSchurComplement(Mat F)
2016 {
2017   PetscErrorCode ierr;
2018 
2019   PetscFunctionBegin;
2020   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
2021   ierr = PetscTryMethod(F,"MatMumpsInvertSchurComplement_C",(Mat),(F));CHKERRQ(ierr);
2022   PetscFunctionReturn(0);
2023 }
2024 
2025 /* -------------------------------------------------------------------------------------------*/
2026 #undef __FUNCT__
2027 #define __FUNCT__ "MatMumpsSolveSchurComplement_MUMPS"
2028 PetscErrorCode MatMumpsSolveSchurComplement_MUMPS(Mat F, Vec rhs, Vec sol)
2029 {
2030   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
2031   MumpsScalar    *orhs;
2032   PetscScalar    *osol,*nrhs,*nsol;
2033   PetscInt       orhs_size,osol_size,olrhs_size;
2034   PetscErrorCode ierr;
2035 
2036   PetscFunctionBegin;
2037   if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
2038   if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
2039   else if (!mumps->schur_restored) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
2040 
2041   /* swap pointers */
2042   orhs = mumps->id.redrhs;
2043   olrhs_size = mumps->id.lredrhs;
2044   orhs_size = mumps->sizeredrhs;
2045   osol = mumps->schur_sol;
2046   osol_size = mumps->schur_sizesol;
2047   ierr = VecGetArray(rhs,&nrhs);CHKERRQ(ierr);
2048   ierr = VecGetArray(sol,&nsol);CHKERRQ(ierr);
2049   mumps->id.redrhs = (MumpsScalar*)nrhs;
2050   ierr = VecGetLocalSize(rhs,&mumps->sizeredrhs);CHKERRQ(ierr);
2051   mumps->id.lredrhs = mumps->sizeredrhs;
2052   mumps->schur_sol = nsol;
2053   ierr = VecGetLocalSize(sol,&mumps->schur_sizesol);CHKERRQ(ierr);
2054 
2055   /* solve Schur complement */
2056   mumps->id.nrhs = 1;
2057   ierr = MatMumpsSolveSchur_Private(mumps,PETSC_FALSE);CHKERRQ(ierr);
2058   /* restore pointers */
2059   ierr = VecRestoreArray(rhs,&nrhs);CHKERRQ(ierr);
2060   ierr = VecRestoreArray(sol,&nsol);CHKERRQ(ierr);
2061   mumps->id.redrhs = orhs;
2062   mumps->id.lredrhs = olrhs_size;
2063   mumps->sizeredrhs = orhs_size;
2064   mumps->schur_sol = osol;
2065   mumps->schur_sizesol = osol_size;
2066   PetscFunctionReturn(0);
2067 }
2068 
2069 #undef __FUNCT__
2070 #define __FUNCT__ "MatMumpsSolveSchurComplement"
2071 /*@
2072   MatMumpsSolveSchurComplement - Solve the Schur complement system computed by MUMPS during the factorization step
2073 
2074    Logically Collective on Mat
2075 
2076    Input Parameters:
2077 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2078 .  rhs - location where the right hand side of the Schur complement system is stored
2079 -  sol - location where the solution of the Schur complement system has to be returned
2080 
2081    Notes:
2082    MUMPS Schur complement mode is currently implemented for sequential matrices.
2083    The sizes of the vectors should match the size of the Schur complement
2084 
2085    Level: advanced
2086 
2087    References: MUMPS Users' Guide
2088 
2089 .seealso: MatGetFactor(), MatMumpsSetSchurIndices()
2090 @*/
2091 PetscErrorCode MatMumpsSolveSchurComplement(Mat F, Vec rhs, Vec sol)
2092 {
2093   PetscErrorCode ierr;
2094 
2095   PetscFunctionBegin;
2096   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
2097   PetscValidHeaderSpecific(rhs,VEC_CLASSID,2);
2098   PetscValidHeaderSpecific(sol,VEC_CLASSID,2);
2099   PetscCheckSameComm(F,1,rhs,2);
2100   PetscCheckSameComm(F,1,sol,3);
2101   ierr = PetscUseMethod(F,"MatMumpsSolveSchurComplement_C",(Mat,Vec,Vec),(F,rhs,sol));CHKERRQ(ierr);
2102   PetscFunctionReturn(0);
2103 }
2104 
2105 /* -------------------------------------------------------------------------------------------*/
2106 #undef __FUNCT__
2107 #define __FUNCT__ "MatMumpsSolveSchurComplementTranspose_MUMPS"
2108 PetscErrorCode MatMumpsSolveSchurComplementTranspose_MUMPS(Mat F, Vec rhs, Vec sol)
2109 {
2110   Mat_MUMPS      *mumps =(Mat_MUMPS*)F->spptr;
2111   MumpsScalar    *orhs;
2112   PetscScalar    *osol,*nrhs,*nsol;
2113   PetscInt       orhs_size,osol_size;
2114   PetscErrorCode ierr;
2115 
2116   PetscFunctionBegin;
2117   if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatMumpsSetSchurIndices to enable it");
2118   else if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatMumpsSetSchurIndices before");
2119   if (!mumps->schur_restored) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur matrix has not been restored using MatMumpsRestoreSchurComplement");
2120 
2121   /* swap pointers */
2122   orhs = mumps->id.redrhs;
2123   orhs_size = mumps->sizeredrhs;
2124   osol = mumps->schur_sol;
2125   osol_size = mumps->schur_sizesol;
2126   ierr = VecGetArray(rhs,&nrhs);CHKERRQ(ierr);
2127   ierr = VecGetArray(sol,&nsol);CHKERRQ(ierr);
2128   mumps->id.redrhs = (MumpsScalar*)nrhs;
2129   ierr = VecGetLocalSize(rhs,&mumps->sizeredrhs);CHKERRQ(ierr);
2130   mumps->schur_sol = nsol;
2131   ierr = VecGetLocalSize(sol,&mumps->schur_sizesol);CHKERRQ(ierr);
2132 
2133   /* solve Schur complement */
2134   mumps->id.nrhs = 1;
2135   mumps->id.ICNTL(9) = 0;
2136   ierr = MatMumpsSolveSchur_Private(mumps,PETSC_FALSE);CHKERRQ(ierr);
2137   mumps->id.ICNTL(9) = 1;
2138   /* restore pointers */
2139   ierr = VecRestoreArray(rhs,&nrhs);CHKERRQ(ierr);
2140   ierr = VecRestoreArray(sol,&nsol);CHKERRQ(ierr);
2141   mumps->id.redrhs = orhs;
2142   mumps->sizeredrhs = orhs_size;
2143   mumps->schur_sol = osol;
2144   mumps->schur_sizesol = osol_size;
2145   PetscFunctionReturn(0);
2146 }
2147 
2148 #undef __FUNCT__
2149 #define __FUNCT__ "MatMumpsSolveSchurComplementTranspose"
2150 /*@
2151   MatMumpsSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed by MUMPS during the factorization step
2152 
2153    Logically Collective on Mat
2154 
2155    Input Parameters:
2156 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2157 .  rhs - location where the right hand side of the Schur complement system is stored
2158 -  sol - location where the solution of the Schur complement system has to be returned
2159 
2160    Notes:
2161    MUMPS Schur complement mode is currently implemented for sequential matrices.
2162    The sizes of the vectors should match the size of the Schur complement
2163 
2164    Level: advanced
2165 
2166    References: MUMPS Users' Guide
2167 
2168 .seealso: MatGetFactor(), MatMumpsSetSchurIndices()
2169 @*/
2170 PetscErrorCode MatMumpsSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
2171 {
2172   PetscErrorCode ierr;
2173 
2174   PetscFunctionBegin;
2175   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
2176   PetscValidHeaderSpecific(rhs,VEC_CLASSID,2);
2177   PetscValidHeaderSpecific(sol,VEC_CLASSID,2);
2178   PetscCheckSameComm(F,1,rhs,2);
2179   PetscCheckSameComm(F,1,sol,3);
2180   ierr = PetscUseMethod(F,"MatMumpsSolveSchurComplementTranspose_C",(Mat,Vec,Vec),(F,rhs,sol));CHKERRQ(ierr);
2181   PetscFunctionReturn(0);
2182 }
2183 
2184 /* -------------------------------------------------------------------------------------------*/
2185 #undef __FUNCT__
2186 #define __FUNCT__ "MatMumpsSetIcntl_MUMPS"
2187 PetscErrorCode MatMumpsSetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt ival)
2188 {
2189   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2190 
2191   PetscFunctionBegin;
2192   mumps->id.ICNTL(icntl) = ival;
2193   PetscFunctionReturn(0);
2194 }
2195 
2196 #undef __FUNCT__
2197 #define __FUNCT__ "MatMumpsGetIcntl_MUMPS"
2198 PetscErrorCode MatMumpsGetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt *ival)
2199 {
2200   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2201 
2202   PetscFunctionBegin;
2203   *ival = mumps->id.ICNTL(icntl);
2204   PetscFunctionReturn(0);
2205 }
2206 
2207 #undef __FUNCT__
2208 #define __FUNCT__ "MatMumpsSetIcntl"
2209 /*@
2210   MatMumpsSetIcntl - Set MUMPS parameter ICNTL()
2211 
2212    Logically Collective on Mat
2213 
2214    Input Parameters:
2215 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2216 .  icntl - index of MUMPS parameter array ICNTL()
2217 -  ival - value of MUMPS ICNTL(icntl)
2218 
2219   Options Database:
2220 .   -mat_mumps_icntl_<icntl> <ival>
2221 
2222    Level: beginner
2223 
2224    References: MUMPS Users' Guide
2225 
2226 .seealso: MatGetFactor()
2227 @*/
2228 PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt icntl,PetscInt ival)
2229 {
2230   PetscErrorCode ierr;
2231 
2232   PetscFunctionBegin;
2233   PetscValidLogicalCollectiveInt(F,icntl,2);
2234   PetscValidLogicalCollectiveInt(F,ival,3);
2235   ierr = PetscTryMethod(F,"MatMumpsSetIcntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));CHKERRQ(ierr);
2236   PetscFunctionReturn(0);
2237 }
2238 
2239 #undef __FUNCT__
2240 #define __FUNCT__ "MatMumpsGetIcntl"
2241 /*@
2242   MatMumpsGetIcntl - Get MUMPS parameter ICNTL()
2243 
2244    Logically Collective on Mat
2245 
2246    Input Parameters:
2247 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2248 -  icntl - index of MUMPS parameter array ICNTL()
2249 
2250   Output Parameter:
2251 .  ival - value of MUMPS ICNTL(icntl)
2252 
2253    Level: beginner
2254 
2255    References: MUMPS Users' Guide
2256 
2257 .seealso: MatGetFactor()
2258 @*/
2259 PetscErrorCode MatMumpsGetIcntl(Mat F,PetscInt icntl,PetscInt *ival)
2260 {
2261   PetscErrorCode ierr;
2262 
2263   PetscFunctionBegin;
2264   PetscValidLogicalCollectiveInt(F,icntl,2);
2265   PetscValidIntPointer(ival,3);
2266   ierr = PetscTryMethod(F,"MatMumpsGetIcntl_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
2267   PetscFunctionReturn(0);
2268 }
2269 
2270 /* -------------------------------------------------------------------------------------------*/
2271 #undef __FUNCT__
2272 #define __FUNCT__ "MatMumpsSetCntl_MUMPS"
2273 PetscErrorCode MatMumpsSetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal val)
2274 {
2275   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2276 
2277   PetscFunctionBegin;
2278   mumps->id.CNTL(icntl) = val;
2279   PetscFunctionReturn(0);
2280 }
2281 
2282 #undef __FUNCT__
2283 #define __FUNCT__ "MatMumpsGetCntl_MUMPS"
2284 PetscErrorCode MatMumpsGetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal *val)
2285 {
2286   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2287 
2288   PetscFunctionBegin;
2289   *val = mumps->id.CNTL(icntl);
2290   PetscFunctionReturn(0);
2291 }
2292 
2293 #undef __FUNCT__
2294 #define __FUNCT__ "MatMumpsSetCntl"
2295 /*@
2296   MatMumpsSetCntl - Set MUMPS parameter CNTL()
2297 
2298    Logically Collective on Mat
2299 
2300    Input Parameters:
2301 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2302 .  icntl - index of MUMPS parameter array CNTL()
2303 -  val - value of MUMPS CNTL(icntl)
2304 
2305   Options Database:
2306 .   -mat_mumps_cntl_<icntl> <val>
2307 
2308    Level: beginner
2309 
2310    References: MUMPS Users' Guide
2311 
2312 .seealso: MatGetFactor()
2313 @*/
2314 PetscErrorCode MatMumpsSetCntl(Mat F,PetscInt icntl,PetscReal val)
2315 {
2316   PetscErrorCode ierr;
2317 
2318   PetscFunctionBegin;
2319   PetscValidLogicalCollectiveInt(F,icntl,2);
2320   PetscValidLogicalCollectiveReal(F,val,3);
2321   ierr = PetscTryMethod(F,"MatMumpsSetCntl_C",(Mat,PetscInt,PetscReal),(F,icntl,val));CHKERRQ(ierr);
2322   PetscFunctionReturn(0);
2323 }
2324 
2325 #undef __FUNCT__
2326 #define __FUNCT__ "MatMumpsGetCntl"
2327 /*@
2328   MatMumpsGetCntl - Get MUMPS parameter CNTL()
2329 
2330    Logically Collective on Mat
2331 
2332    Input Parameters:
2333 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2334 -  icntl - index of MUMPS parameter array CNTL()
2335 
2336   Output Parameter:
2337 .  val - value of MUMPS CNTL(icntl)
2338 
2339    Level: beginner
2340 
2341    References: MUMPS Users' Guide
2342 
2343 .seealso: MatGetFactor()
2344 @*/
2345 PetscErrorCode MatMumpsGetCntl(Mat F,PetscInt icntl,PetscReal *val)
2346 {
2347   PetscErrorCode ierr;
2348 
2349   PetscFunctionBegin;
2350   PetscValidLogicalCollectiveInt(F,icntl,2);
2351   PetscValidRealPointer(val,3);
2352   ierr = PetscTryMethod(F,"MatMumpsGetCntl_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
2353   PetscFunctionReturn(0);
2354 }
2355 
2356 #undef __FUNCT__
2357 #define __FUNCT__ "MatMumpsGetInfo_MUMPS"
2358 PetscErrorCode MatMumpsGetInfo_MUMPS(Mat F,PetscInt icntl,PetscInt *info)
2359 {
2360   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2361 
2362   PetscFunctionBegin;
2363   *info = mumps->id.INFO(icntl);
2364   PetscFunctionReturn(0);
2365 }
2366 
2367 #undef __FUNCT__
2368 #define __FUNCT__ "MatMumpsGetInfog_MUMPS"
2369 PetscErrorCode MatMumpsGetInfog_MUMPS(Mat F,PetscInt icntl,PetscInt *infog)
2370 {
2371   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2372 
2373   PetscFunctionBegin;
2374   *infog = mumps->id.INFOG(icntl);
2375   PetscFunctionReturn(0);
2376 }
2377 
2378 #undef __FUNCT__
2379 #define __FUNCT__ "MatMumpsGetRinfo_MUMPS"
2380 PetscErrorCode MatMumpsGetRinfo_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfo)
2381 {
2382   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2383 
2384   PetscFunctionBegin;
2385   *rinfo = mumps->id.RINFO(icntl);
2386   PetscFunctionReturn(0);
2387 }
2388 
2389 #undef __FUNCT__
2390 #define __FUNCT__ "MatMumpsGetRinfog_MUMPS"
2391 PetscErrorCode MatMumpsGetRinfog_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfog)
2392 {
2393   Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2394 
2395   PetscFunctionBegin;
2396   *rinfog = mumps->id.RINFOG(icntl);
2397   PetscFunctionReturn(0);
2398 }
2399 
2400 #undef __FUNCT__
2401 #define __FUNCT__ "MatMumpsGetInfo"
2402 /*@
2403   MatMumpsGetInfo - Get MUMPS parameter INFO()
2404 
2405    Logically Collective on Mat
2406 
2407    Input Parameters:
2408 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2409 -  icntl - index of MUMPS parameter array INFO()
2410 
2411   Output Parameter:
2412 .  ival - value of MUMPS INFO(icntl)
2413 
2414    Level: beginner
2415 
2416    References: MUMPS Users' Guide
2417 
2418 .seealso: MatGetFactor()
2419 @*/
2420 PetscErrorCode MatMumpsGetInfo(Mat F,PetscInt icntl,PetscInt *ival)
2421 {
2422   PetscErrorCode ierr;
2423 
2424   PetscFunctionBegin;
2425   PetscValidIntPointer(ival,3);
2426   ierr = PetscTryMethod(F,"MatMumpsGetInfo_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
2427   PetscFunctionReturn(0);
2428 }
2429 
2430 #undef __FUNCT__
2431 #define __FUNCT__ "MatMumpsGetInfog"
2432 /*@
2433   MatMumpsGetInfog - Get MUMPS parameter INFOG()
2434 
2435    Logically Collective on Mat
2436 
2437    Input Parameters:
2438 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2439 -  icntl - index of MUMPS parameter array INFOG()
2440 
2441   Output Parameter:
2442 .  ival - value of MUMPS INFOG(icntl)
2443 
2444    Level: beginner
2445 
2446    References: MUMPS Users' Guide
2447 
2448 .seealso: MatGetFactor()
2449 @*/
2450 PetscErrorCode MatMumpsGetInfog(Mat F,PetscInt icntl,PetscInt *ival)
2451 {
2452   PetscErrorCode ierr;
2453 
2454   PetscFunctionBegin;
2455   PetscValidIntPointer(ival,3);
2456   ierr = PetscTryMethod(F,"MatMumpsGetInfog_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));CHKERRQ(ierr);
2457   PetscFunctionReturn(0);
2458 }
2459 
2460 #undef __FUNCT__
2461 #define __FUNCT__ "MatMumpsGetRinfo"
2462 /*@
2463   MatMumpsGetRinfo - Get MUMPS parameter RINFO()
2464 
2465    Logically Collective on Mat
2466 
2467    Input Parameters:
2468 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2469 -  icntl - index of MUMPS parameter array RINFO()
2470 
2471   Output Parameter:
2472 .  val - value of MUMPS RINFO(icntl)
2473 
2474    Level: beginner
2475 
2476    References: MUMPS Users' Guide
2477 
2478 .seealso: MatGetFactor()
2479 @*/
2480 PetscErrorCode MatMumpsGetRinfo(Mat F,PetscInt icntl,PetscReal *val)
2481 {
2482   PetscErrorCode ierr;
2483 
2484   PetscFunctionBegin;
2485   PetscValidRealPointer(val,3);
2486   ierr = PetscTryMethod(F,"MatMumpsGetRinfo_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
2487   PetscFunctionReturn(0);
2488 }
2489 
2490 #undef __FUNCT__
2491 #define __FUNCT__ "MatMumpsGetRinfog"
2492 /*@
2493   MatMumpsGetRinfog - Get MUMPS parameter RINFOG()
2494 
2495    Logically Collective on Mat
2496 
2497    Input Parameters:
2498 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2499 -  icntl - index of MUMPS parameter array RINFOG()
2500 
2501   Output Parameter:
2502 .  val - value of MUMPS RINFOG(icntl)
2503 
2504    Level: beginner
2505 
2506    References: MUMPS Users' Guide
2507 
2508 .seealso: MatGetFactor()
2509 @*/
2510 PetscErrorCode MatMumpsGetRinfog(Mat F,PetscInt icntl,PetscReal *val)
2511 {
2512   PetscErrorCode ierr;
2513 
2514   PetscFunctionBegin;
2515   PetscValidRealPointer(val,3);
2516   ierr = PetscTryMethod(F,"MatMumpsGetRinfog_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));CHKERRQ(ierr);
2517   PetscFunctionReturn(0);
2518 }
2519 
2520 /*MC
2521   MATSOLVERMUMPS -  A matrix type providing direct solvers (LU and Cholesky) for
2522   distributed and sequential matrices via the external package MUMPS.
2523 
2524   Works with MATAIJ and MATSBAIJ matrices
2525 
2526   Use ./configure --download-mumps --download-scalapack --download-parmetis --download-metis --download-ptscotch  to have PETSc installed with MUMPS
2527 
2528   Use -pc_type cholesky or lu -pc_factor_mat_solver_package mumps to us this direct solver
2529 
2530   Options Database Keys:
2531 +  -mat_mumps_icntl_1 <6>: ICNTL(1): output stream for error messages (None)
2532 .  -mat_mumps_icntl_2 <0>: ICNTL(2): output stream for diagnostic printing, statistics, and warning (None)
2533 .  -mat_mumps_icntl_3 <0>: ICNTL(3): output stream for global information, collected on the host (None)
2534 .  -mat_mumps_icntl_4 <0>: ICNTL(4): level of printing (0 to 4) (None)
2535 .  -mat_mumps_icntl_6 <7>: ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7) (None)
2536 .  -mat_mumps_icntl_7 <7>: ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis (None)
2537 .  -mat_mumps_icntl_8 <77>: ICNTL(8): scaling strategy (-2 to 8 or 77) (None)
2538 .  -mat_mumps_icntl_10 <0>: ICNTL(10): max num of refinements (None)
2539 .  -mat_mumps_icntl_11 <0>: ICNTL(11): statistics related to an error analysis (via -ksp_view) (None)
2540 .  -mat_mumps_icntl_12 <1>: ICNTL(12): an ordering strategy for symmetric matrices (0 to 3) (None)
2541 .  -mat_mumps_icntl_13 <0>: ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting (None)
2542 .  -mat_mumps_icntl_14 <20>: ICNTL(14): percentage increase in the estimated working space (None)
2543 .  -mat_mumps_icntl_19 <0>: ICNTL(19): computes the Schur complement (None)
2544 .  -mat_mumps_icntl_22 <0>: ICNTL(22): in-core/out-of-core factorization and solve (0 or 1) (None)
2545 .  -mat_mumps_icntl_23 <0>: ICNTL(23): max size of the working memory (MB) that can allocate per processor (None)
2546 .  -mat_mumps_icntl_24 <0>: ICNTL(24): detection of null pivot rows (0 or 1) (None)
2547 .  -mat_mumps_icntl_25 <0>: ICNTL(25): compute a solution of a deficient matrix and a null space basis (None)
2548 .  -mat_mumps_icntl_26 <0>: ICNTL(26): drives the solution phase if a Schur complement matrix (None)
2549 .  -mat_mumps_icntl_28 <1>: ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering (None)
2550 .  -mat_mumps_icntl_29 <0>: ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis (None)
2551 .  -mat_mumps_icntl_30 <0>: ICNTL(30): compute user-specified set of entries in inv(A) (None)
2552 .  -mat_mumps_icntl_31 <0>: ICNTL(31): indicates which factors may be discarded during factorization (None)
2553 .  -mat_mumps_icntl_33 <0>: ICNTL(33): compute determinant (None)
2554 .  -mat_mumps_cntl_1 <0.01>: CNTL(1): relative pivoting threshold (None)
2555 .  -mat_mumps_cntl_2 <1.49012e-08>: CNTL(2): stopping criterion of refinement (None)
2556 .  -mat_mumps_cntl_3 <0>: CNTL(3): absolute pivoting threshold (None)
2557 .  -mat_mumps_cntl_4 <-1>: CNTL(4): value for static pivoting (None)
2558 -  -mat_mumps_cntl_5 <0>: CNTL(5): fixation for null pivots (None)
2559 
2560   Level: beginner
2561 
2562 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
2563 
2564 M*/
2565 
2566 #undef __FUNCT__
2567 #define __FUNCT__ "MatFactorGetSolverPackage_mumps"
2568 static PetscErrorCode MatFactorGetSolverPackage_mumps(Mat A,const MatSolverPackage *type)
2569 {
2570   PetscFunctionBegin;
2571   *type = MATSOLVERMUMPS;
2572   PetscFunctionReturn(0);
2573 }
2574 
2575 /* MatGetFactor for Seq and MPI AIJ matrices */
2576 #undef __FUNCT__
2577 #define __FUNCT__ "MatGetFactor_aij_mumps"
2578 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F)
2579 {
2580   Mat            B;
2581   PetscErrorCode ierr;
2582   Mat_MUMPS      *mumps;
2583   PetscBool      isSeqAIJ;
2584 
2585   PetscFunctionBegin;
2586   /* Create the factorization matrix */
2587   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr);
2588   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2589   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2590   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
2591   if (isSeqAIJ) {
2592     ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
2593   } else {
2594     ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
2595   }
2596 
2597   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2598 
2599   B->ops->view        = MatView_MUMPS;
2600   B->ops->getinfo     = MatGetInfo_MUMPS;
2601   B->ops->getdiagonal = MatGetDiagonal_MUMPS;
2602 
2603   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
2604   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2605   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2606   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2607   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2608 
2609   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2610   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2611   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2612   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2613 
2614   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetSchurIndices_C",MatMumpsSetSchurIndices_MUMPS);CHKERRQ(ierr);
2615   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsInvertSchurComplement_C",MatMumpsInvertSchurComplement_MUMPS);CHKERRQ(ierr);
2616   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsCreateSchurComplement_C",MatMumpsCreateSchurComplement_MUMPS);CHKERRQ(ierr);
2617   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetSchurComplement_C",MatMumpsGetSchurComplement_MUMPS);CHKERRQ(ierr);
2618   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsRestoreSchurComplement_C",MatMumpsRestoreSchurComplement_MUMPS);CHKERRQ(ierr);
2619   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplement_C",MatMumpsSolveSchurComplement_MUMPS);CHKERRQ(ierr);
2620   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplementTranspose_C",MatMumpsSolveSchurComplementTranspose_MUMPS);CHKERRQ(ierr);
2621 
2622   if (ftype == MAT_FACTOR_LU) {
2623     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
2624     B->factortype            = MAT_FACTOR_LU;
2625     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij;
2626     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij;
2627     mumps->sym = 0;
2628   } else {
2629     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
2630     B->factortype                  = MAT_FACTOR_CHOLESKY;
2631     if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij;
2632     else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij;
2633 #if defined(PETSC_USE_COMPLEX)
2634     mumps->sym = 2;
2635 #else
2636     if (A->spd_set && A->spd) mumps->sym = 1;
2637     else                      mumps->sym = 2;
2638 #endif
2639   }
2640 
2641   mumps->isAIJ    = PETSC_TRUE;
2642   mumps->Destroy  = B->ops->destroy;
2643   B->ops->destroy = MatDestroy_MUMPS;
2644   B->spptr        = (void*)mumps;
2645 
2646   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2647 
2648   *F = B;
2649   PetscFunctionReturn(0);
2650 }
2651 
2652 /* MatGetFactor for Seq and MPI SBAIJ matrices */
2653 #undef __FUNCT__
2654 #define __FUNCT__ "MatGetFactor_sbaij_mumps"
2655 PETSC_EXTERN PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
2656 {
2657   Mat            B;
2658   PetscErrorCode ierr;
2659   Mat_MUMPS      *mumps;
2660   PetscBool      isSeqSBAIJ;
2661 
2662   PetscFunctionBegin;
2663   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
2664   if (A->rmap->bs > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead");
2665   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr);
2666   /* Create the factorization matrix */
2667   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2668   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2669   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
2670   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2671   if (isSeqSBAIJ) {
2672     ierr = MatSeqSBAIJSetPreallocation(B,1,0,NULL);CHKERRQ(ierr);
2673 
2674     mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij;
2675   } else {
2676     ierr = MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);CHKERRQ(ierr);
2677 
2678     mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij;
2679   }
2680 
2681   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
2682   B->ops->view                   = MatView_MUMPS;
2683   B->ops->getdiagonal            = MatGetDiagonal_MUMPS;
2684 
2685   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
2686   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2687   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2688   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2689   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2690 
2691   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2692   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2693   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2694   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2695 
2696   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetSchurIndices_C",MatMumpsSetSchurIndices_MUMPS);CHKERRQ(ierr);
2697   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsInvertSchurComplement_C",MatMumpsInvertSchurComplement_MUMPS);CHKERRQ(ierr);
2698   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsCreateSchurComplement_C",MatMumpsCreateSchurComplement_MUMPS);CHKERRQ(ierr);
2699   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetSchurComplement_C",MatMumpsGetSchurComplement_MUMPS);CHKERRQ(ierr);
2700   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsRestoreSchurComplement_C",MatMumpsRestoreSchurComplement_MUMPS);CHKERRQ(ierr);
2701   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplement_C",MatMumpsSolveSchurComplement_MUMPS);CHKERRQ(ierr);
2702   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplementTranspose_C",MatMumpsSolveSchurComplementTranspose_MUMPS);CHKERRQ(ierr);
2703 
2704   B->factortype = MAT_FACTOR_CHOLESKY;
2705 #if defined(PETSC_USE_COMPLEX)
2706   mumps->sym = 2;
2707 #else
2708   if (A->spd_set && A->spd) mumps->sym = 1;
2709   else                      mumps->sym = 2;
2710 #endif
2711 
2712   mumps->isAIJ    = PETSC_FALSE;
2713   mumps->Destroy  = B->ops->destroy;
2714   B->ops->destroy = MatDestroy_MUMPS;
2715   B->spptr        = (void*)mumps;
2716 
2717   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2718 
2719   *F = B;
2720   PetscFunctionReturn(0);
2721 }
2722 
2723 #undef __FUNCT__
2724 #define __FUNCT__ "MatGetFactor_baij_mumps"
2725 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F)
2726 {
2727   Mat            B;
2728   PetscErrorCode ierr;
2729   Mat_MUMPS      *mumps;
2730   PetscBool      isSeqBAIJ;
2731 
2732   PetscFunctionBegin;
2733   /* Create the factorization matrix */
2734   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);CHKERRQ(ierr);
2735   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2736   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2737   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
2738   if (isSeqBAIJ) {
2739     ierr = MatSeqBAIJSetPreallocation(B,A->rmap->bs,0,NULL);CHKERRQ(ierr);
2740   } else {
2741     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
2742   }
2743 
2744   ierr = PetscNewLog(B,&mumps);CHKERRQ(ierr);
2745   if (ftype == MAT_FACTOR_LU) {
2746     B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS;
2747     B->factortype            = MAT_FACTOR_LU;
2748     if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij;
2749     else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij;
2750     mumps->sym = 0;
2751   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n");
2752 
2753   B->ops->view        = MatView_MUMPS;
2754   B->ops->getdiagonal = MatGetDiagonal_MUMPS;
2755 
2756   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);CHKERRQ(ierr);
2757   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);CHKERRQ(ierr);
2758   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);CHKERRQ(ierr);
2759   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);CHKERRQ(ierr);
2760   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);CHKERRQ(ierr);
2761 
2762   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);CHKERRQ(ierr);
2763   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);CHKERRQ(ierr);
2764   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);CHKERRQ(ierr);
2765   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);CHKERRQ(ierr);
2766 
2767   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetSchurIndices_C",MatMumpsSetSchurIndices_MUMPS);CHKERRQ(ierr);
2768   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsInvertSchurComplement_C",MatMumpsInvertSchurComplement_MUMPS);CHKERRQ(ierr);
2769   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsCreateSchurComplement_C",MatMumpsCreateSchurComplement_MUMPS);CHKERRQ(ierr);
2770   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetSchurComplement_C",MatMumpsGetSchurComplement_MUMPS);CHKERRQ(ierr);
2771   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsRestoreSchurComplement_C",MatMumpsRestoreSchurComplement_MUMPS);CHKERRQ(ierr);
2772   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplement_C",MatMumpsSolveSchurComplement_MUMPS);CHKERRQ(ierr);
2773   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMumpsSolveSchurComplementTranspose_C",MatMumpsSolveSchurComplementTranspose_MUMPS);CHKERRQ(ierr);
2774 
2775   mumps->isAIJ    = PETSC_TRUE;
2776   mumps->Destroy  = B->ops->destroy;
2777   B->ops->destroy = MatDestroy_MUMPS;
2778   B->spptr        = (void*)mumps;
2779 
2780   ierr = PetscInitializeMUMPS(A,mumps);CHKERRQ(ierr);
2781 
2782   *F = B;
2783   PetscFunctionReturn(0);
2784 }
2785 
2786 PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat,MatFactorType,Mat*);
2787 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat,MatFactorType,Mat*);
2788 PETSC_EXTERN PetscErrorCode MatGetFactor_sbaij_mumps(Mat,MatFactorType,Mat*);
2789 
2790 #undef __FUNCT__
2791 #define __FUNCT__ "MatSolverPackageRegister_MUMPS"
2792 PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_MUMPS(void)
2793 {
2794   PetscErrorCode ierr;
2795 
2796   PetscFunctionBegin;
2797   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2798   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2799   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2800   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2801   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATMPISBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr);
2802   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2803   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);CHKERRQ(ierr);
2804   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2805   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);CHKERRQ(ierr);
2806   ierr = MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQSBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);CHKERRQ(ierr);
2807   PetscFunctionReturn(0);
2808 }
2809 
2810