xref: /petsc/src/mat/impls/aij/mpi/mumps/mumps.c (revision 2ae77aedd54b15c5c1891663dbfc857d9f45ab81)
1 #define PETSCMAT_DLL
2 
3 /*
4     Provides an interface to the MUMPS sparse solver
5 */
6 #include "src/mat/impls/aij/seq/aij.h"
7 #include "src/mat/impls/aij/mpi/mpiaij.h"
8 #include "src/mat/impls/sbaij/seq/sbaij.h"
9 #include "src/mat/impls/sbaij/mpi/mpisbaij.h"
10 
11 EXTERN_C_BEGIN
12 #if defined(PETSC_USE_COMPLEX)
13 #include "zmumps_c.h"
14 #else
15 #include "dmumps_c.h"
16 #endif
17 EXTERN_C_END
18 #define JOB_INIT -1
19 #define JOB_END -2
20 /* macros s.t. indices match MUMPS documentation */
21 #define ICNTL(I) icntl[(I)-1]
22 #define CNTL(I) cntl[(I)-1]
23 #define INFOG(I) infog[(I)-1]
24 #define INFO(I) info[(I)-1]
25 #define RINFOG(I) rinfog[(I)-1]
26 #define RINFO(I) rinfo[(I)-1]
27 
28 typedef struct {
29 #if defined(PETSC_USE_COMPLEX)
30   ZMUMPS_STRUC_C id;
31 #else
32   DMUMPS_STRUC_C id;
33 #endif
34   MatStructure   matstruc;
35   PetscMPIInt    myid,size;
36   PetscInt       *irn,*jcn,sym,nSolve;
37   PetscScalar    *val;
38   MPI_Comm       comm_mumps;
39   VecScatter     scat_rhs, scat_sol;
40   PetscTruth     isAIJ,CleanUpMUMPS;
41   Vec            b_seq,x_seq;
42   PetscErrorCode (*MatDuplicate)(Mat,MatDuplicateOption,Mat*);
43   PetscErrorCode (*MatView)(Mat,PetscViewer);
44   PetscErrorCode (*MatAssemblyEnd)(Mat,MatAssemblyType);
45   PetscErrorCode (*MatLUFactorSymbolic)(Mat,IS,IS,MatFactorInfo*,Mat*);
46   PetscErrorCode (*MatCholeskyFactorSymbolic)(Mat,IS,MatFactorInfo*,Mat*);
47   PetscErrorCode (*MatDestroy)(Mat);
48   PetscErrorCode (*specialdestroy)(Mat);
49   PetscErrorCode (*MatPreallocate)(Mat,int,int,int*,int,int*);
50 } Mat_MUMPS;
51 
52 EXTERN PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*);
53 EXTERN_C_BEGIN
54 PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_SBAIJ_SBAIJMUMPS(Mat,MatType,MatReuse,Mat*);
55 EXTERN_C_END
56 /* convert Petsc mpiaij matrix to triples: row[nz], col[nz], val[nz] */
57 /*
58   input:
59     A       - matrix in mpiaij or mpisbaij (bs=1) format
60     shift   - 0: C style output triple; 1: Fortran style output triple.
61     valOnly - FALSE: spaces are allocated and values are set for the triple
62               TRUE:  only the values in v array are updated
63   output:
64     nnz     - dim of r, c, and v (number of local nonzero entries of A)
65     r, c, v - row and col index, matrix values (matrix triples)
66  */
67 PetscErrorCode MatConvertToTriples(Mat A,int shift,PetscTruth valOnly,int *nnz,int **r, int **c, PetscScalar **v) {
68   PetscInt       *ai, *aj, *bi, *bj, rstart,nz, *garray;
69   PetscErrorCode ierr;
70   PetscInt       i,j,jj,jB,irow,m=A->rmap.n,*ajj,*bjj,countA,countB,colA_start,jcol;
71   PetscInt       *row,*col;
72   PetscScalar    *av, *bv,*val;
73   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
74 
75   PetscFunctionBegin;
76   if (mumps->isAIJ){
77     Mat_MPIAIJ    *mat =  (Mat_MPIAIJ*)A->data;
78     Mat_SeqAIJ    *aa=(Mat_SeqAIJ*)(mat->A)->data;
79     Mat_SeqAIJ    *bb=(Mat_SeqAIJ*)(mat->B)->data;
80     nz = aa->nz + bb->nz;
81     ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap.rstart;
82     garray = mat->garray;
83     av=aa->a; bv=bb->a;
84 
85   } else {
86     Mat_MPISBAIJ  *mat =  (Mat_MPISBAIJ*)A->data;
87     Mat_SeqSBAIJ  *aa=(Mat_SeqSBAIJ*)(mat->A)->data;
88     Mat_SeqBAIJ    *bb=(Mat_SeqBAIJ*)(mat->B)->data;
89     if (A->rmap.bs > 1) SETERRQ1(PETSC_ERR_SUP," bs=%d is not supported yet\n", A->rmap.bs);
90     nz = aa->nz + bb->nz;
91     ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap.rstart;
92     garray = mat->garray;
93     av=aa->a; bv=bb->a;
94   }
95 
96   if (!valOnly){
97     ierr = PetscMalloc(nz*sizeof(PetscInt) ,&row);CHKERRQ(ierr);
98     ierr = PetscMalloc(nz*sizeof(PetscInt),&col);CHKERRQ(ierr);
99     ierr = PetscMalloc(nz*sizeof(PetscScalar),&val);CHKERRQ(ierr);
100     *r = row; *c = col; *v = val;
101   } else {
102     row = *r; col = *c; val = *v;
103   }
104   *nnz = nz;
105 
106   jj = 0; irow = rstart;
107   for ( i=0; i<m; i++ ) {
108     ajj = aj + ai[i];                 /* ptr to the beginning of this row */
109     countA = ai[i+1] - ai[i];
110     countB = bi[i+1] - bi[i];
111     bjj = bj + bi[i];
112 
113     /* get jB, the starting local col index for the 2nd B-part */
114     colA_start = rstart + ajj[0]; /* the smallest col index for A */
115     j=-1;
116     do {
117       j++;
118       if (j == countB) break;
119       jcol = garray[bjj[j]];
120     } while (jcol < colA_start);
121     jB = j;
122 
123     /* B-part, smaller col index */
124     colA_start = rstart + ajj[0]; /* the smallest col index for A */
125     for (j=0; j<jB; j++){
126       jcol = garray[bjj[j]];
127       if (!valOnly){
128         row[jj] = irow + shift; col[jj] = jcol + shift;
129 
130       }
131       val[jj++] = *bv++;
132     }
133     /* A-part */
134     for (j=0; j<countA; j++){
135       if (!valOnly){
136         row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
137       }
138       val[jj++] = *av++;
139     }
140     /* B-part, larger col index */
141     for (j=jB; j<countB; j++){
142       if (!valOnly){
143         row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
144       }
145       val[jj++] = *bv++;
146     }
147     irow++;
148   }
149 
150   PetscFunctionReturn(0);
151 }
152 
153 EXTERN_C_BEGIN
154 #undef __FUNCT__
155 #define __FUNCT__ "MatConvert_MUMPS_Base"
156 PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MUMPS_Base(Mat A,MatType type,MatReuse reuse,Mat *newmat)
157 {
158   PetscErrorCode ierr;
159   Mat            B=*newmat;
160   Mat_MUMPS      *mumps=(Mat_MUMPS*)A->spptr;
161   void           (*f)(void);
162 
163   PetscFunctionBegin;
164   if (reuse == MAT_INITIAL_MATRIX) {
165     ierr = MatDuplicate(A,MAT_COPY_VALUES,&B);CHKERRQ(ierr);
166   }
167   B->ops->duplicate              = mumps->MatDuplicate;
168   B->ops->view                   = mumps->MatView;
169   B->ops->assemblyend            = mumps->MatAssemblyEnd;
170   B->ops->lufactorsymbolic       = mumps->MatLUFactorSymbolic;
171   B->ops->choleskyfactorsymbolic = mumps->MatCholeskyFactorSymbolic;
172   B->ops->destroy                = mumps->MatDestroy;
173 
174   /* put back original composed preallocation function */
175   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPISBAIJSetPreallocation_C",(PetscVoidStarFunction)&f);CHKERRQ(ierr);
176   if (f) {
177     ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPISBAIJSetPreallocation_C","",(PetscVoidFunction)mumps->MatPreallocate);CHKERRQ(ierr);
178   }
179   ierr     = PetscFree(mumps);CHKERRQ(ierr);
180   A->spptr = PETSC_NULL;
181 
182   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_seqaij_aijmumps_C","",PETSC_NULL);CHKERRQ(ierr);
183   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_aijmumps_seqaij_C","",PETSC_NULL);CHKERRQ(ierr);
184   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpiaij_aijmumps_C","",PETSC_NULL);CHKERRQ(ierr);
185   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_aijmumps_mpiaij_C","",PETSC_NULL);CHKERRQ(ierr);
186   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_seqsbaij_sbaijmumps_C","",PETSC_NULL);CHKERRQ(ierr);
187   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_sbaijmumps_seqsbaij_C","",PETSC_NULL);CHKERRQ(ierr);
188   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpisbaij_sbaijmumps_C","",PETSC_NULL);CHKERRQ(ierr);
189   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_sbaijmumps_mpisbaij_C","",PETSC_NULL);CHKERRQ(ierr);
190 
191   ierr = PetscObjectChangeTypeName((PetscObject)B,type);CHKERRQ(ierr);
192   *newmat = B;
193   PetscFunctionReturn(0);
194 }
195 EXTERN_C_END
196 
197 #undef __FUNCT__
198 #define __FUNCT__ "MatDestroy_MUMPS"
199 PetscErrorCode MatDestroy_MUMPS(Mat A)
200 {
201   Mat_MUMPS      *lu=(Mat_MUMPS*)A->spptr;
202   PetscErrorCode ierr;
203   PetscMPIInt    size=lu->size;
204   PetscErrorCode (*specialdestroy)(Mat);
205   PetscFunctionBegin;
206   if (lu->CleanUpMUMPS) {
207     /* Terminate instance, deallocate memories */
208     if (size > 1){
209       ierr = PetscFree(lu->id.sol_loc);CHKERRQ(ierr);
210       ierr = VecScatterDestroy(lu->scat_rhs);CHKERRQ(ierr);
211       ierr = VecDestroy(lu->b_seq);CHKERRQ(ierr);
212       if (lu->nSolve && lu->scat_sol){ierr = VecScatterDestroy(lu->scat_sol);CHKERRQ(ierr);}
213       if (lu->nSolve && lu->x_seq){ierr = VecDestroy(lu->x_seq);CHKERRQ(ierr);}
214       ierr = PetscFree(lu->val);CHKERRQ(ierr);
215     }
216     lu->id.job=JOB_END;
217 #if defined(PETSC_USE_COMPLEX)
218     zmumps_c(&lu->id);
219 #else
220     dmumps_c(&lu->id);
221 #endif
222     ierr = PetscFree(lu->irn);CHKERRQ(ierr);
223     ierr = PetscFree(lu->jcn);CHKERRQ(ierr);
224     ierr = MPI_Comm_free(&(lu->comm_mumps));CHKERRQ(ierr);
225   }
226   specialdestroy = lu->specialdestroy;
227   ierr = (*specialdestroy)(A);CHKERRQ(ierr);
228   ierr = (*A->ops->destroy)(A);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 #undef __FUNCT__
233 #define __FUNCT__ "MatDestroy_AIJMUMPS"
234 PetscErrorCode MatDestroy_AIJMUMPS(Mat A)
235 {
236   PetscErrorCode ierr;
237   PetscMPIInt    size;
238 
239   PetscFunctionBegin;
240   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
241   if (size==1) {
242     ierr = MatConvert_MUMPS_Base(A,MATSEQAIJ,MAT_REUSE_MATRIX,&A);CHKERRQ(ierr);
243   } else {
244     ierr = MatConvert_MUMPS_Base(A,MATMPIAIJ,MAT_REUSE_MATRIX,&A);CHKERRQ(ierr);
245   }
246   PetscFunctionReturn(0);
247 }
248 
249 #undef __FUNCT__
250 #define __FUNCT__ "MatDestroy_SBAIJMUMPS"
251 PetscErrorCode MatDestroy_SBAIJMUMPS(Mat A)
252 {
253   PetscErrorCode ierr;
254   PetscMPIInt    size;
255 
256   PetscFunctionBegin;
257   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
258   if (size==1) {
259     ierr = MatConvert_MUMPS_Base(A,MATSEQSBAIJ,MAT_REUSE_MATRIX,&A);CHKERRQ(ierr);
260   } else {
261     ierr = MatConvert_MUMPS_Base(A,MATMPISBAIJ,MAT_REUSE_MATRIX,&A);CHKERRQ(ierr);
262   }
263   PetscFunctionReturn(0);
264 }
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "MatSolve_MUMPS"
268 PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x) {
269   Mat_MUMPS      *lu=(Mat_MUMPS*)A->spptr;
270   PetscScalar    *array;
271   Vec            x_seq;
272   IS             is_iden,is_petsc;
273   PetscErrorCode ierr;
274   PetscInt       i;
275 
276   PetscFunctionBegin;
277   lu->id.nrhs = 1;
278   x_seq = lu->b_seq;
279   if (lu->size > 1){
280     /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */
281     ierr = VecScatterBegin(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
282     ierr = VecScatterEnd(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
283     if (!lu->myid) {ierr = VecGetArray(x_seq,&array);CHKERRQ(ierr);}
284   } else {  /* size == 1 */
285     ierr = VecCopy(b,x);CHKERRQ(ierr);
286     ierr = VecGetArray(x,&array);CHKERRQ(ierr);
287   }
288   if (!lu->myid) { /* define rhs on the host */
289 #if defined(PETSC_USE_COMPLEX)
290     lu->id.rhs = (mumps_double_complex*)array;
291 #else
292     lu->id.rhs = array;
293 #endif
294   }
295   if (lu->size == 1){
296     ierr = VecRestoreArray(x,&array);CHKERRQ(ierr);
297   } else if (!lu->myid){
298     ierr = VecRestoreArray(x_seq,&array);CHKERRQ(ierr);
299   }
300 
301   if (lu->size > 1){
302     /* distributed solution */
303     lu->id.ICNTL(21) = 1;
304     if (!lu->nSolve){
305       /* Create x_seq=sol_loc for repeated use */
306       PetscInt    lsol_loc;
307       PetscScalar *sol_loc;
308       lsol_loc = lu->id.INFO(23); /* length of sol_loc */
309       ierr = PetscMalloc((1+lsol_loc)*(sizeof(PetscScalar)+sizeof(PetscInt)),&sol_loc);CHKERRQ(ierr);
310       lu->id.isol_loc = (PetscInt *)(sol_loc + lsol_loc);
311       lu->id.lsol_loc = lsol_loc;
312 #if defined(PETSC_USE_COMPLEX)
313       lu->id.sol_loc  = (ZMUMPS_DOUBLE *)sol_loc;
314 #else
315       lu->id.sol_loc  = (DMUMPS_DOUBLE *)sol_loc;
316 #endif
317       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,lsol_loc,sol_loc,&lu->x_seq);CHKERRQ(ierr);
318     }
319   }
320 
321   /* solve phase */
322   /*-------------*/
323   lu->id.job = 3;
324 #if defined(PETSC_USE_COMPLEX)
325   zmumps_c(&lu->id);
326 #else
327   dmumps_c(&lu->id);
328 #endif
329   if (lu->id.INFOG(1) < 0) {
330     SETERRQ1(PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",lu->id.INFOG(1));
331   }
332 
333   if (lu->size > 1) { /* convert mumps distributed solution to petsc mpi x */
334     if (!lu->nSolve){ /* create scatter scat_sol */
335       ierr = ISCreateStride(PETSC_COMM_SELF,lu->id.lsol_loc,0,1,&is_iden);CHKERRQ(ierr); /* from */
336       for (i=0; i<lu->id.lsol_loc; i++){
337         lu->id.isol_loc[i] -= 1; /* change Fortran style to C style */
338       }
339       ierr = ISCreateGeneral(PETSC_COMM_SELF,lu->id.lsol_loc,lu->id.isol_loc,&is_petsc);CHKERRQ(ierr);  /* to */
340       ierr = VecScatterCreate(lu->x_seq,is_iden,x,is_petsc,&lu->scat_sol);CHKERRQ(ierr);
341       ierr = ISDestroy(is_iden);CHKERRQ(ierr);
342       ierr = ISDestroy(is_petsc);CHKERRQ(ierr);
343     }
344     ierr = VecScatterBegin(lu->scat_sol,lu->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
345     ierr = VecScatterEnd(lu->scat_sol,lu->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
346   }
347   lu->nSolve++;
348   PetscFunctionReturn(0);
349 }
350 
351 #if !defined(PETSC_USE_COMPLEX)
352 /*
353   input:
354    F:        numeric factor
355   output:
356    nneg:     total number of negative pivots
357    nzero:    0
358    npos:     (global dimension of F) - nneg
359 */
360 
361 #undef __FUNCT__
362 #define __FUNCT__ "MatGetInertia_SBAIJMUMPS"
363 PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos)
364 {
365   Mat_MUMPS      *lu =(Mat_MUMPS*)F->spptr;
366   PetscErrorCode ierr;
367   PetscMPIInt    size;
368 
369   PetscFunctionBegin;
370   ierr = MPI_Comm_size(((PetscObject)F)->comm,&size);CHKERRQ(ierr);
371   /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */
372   if (size > 1 && lu->id.ICNTL(13) != 1){
373     SETERRQ1(PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",lu->id.INFOG(13));
374   }
375   if (nneg){
376     if (!lu->myid){
377       *nneg = lu->id.INFOG(12);
378     }
379     ierr = MPI_Bcast(nneg,1,MPI_INT,0,lu->comm_mumps);CHKERRQ(ierr);
380   }
381   if (nzero) *nzero = 0;
382   if (npos)  *npos  = F->rmap.N - (*nneg);
383   PetscFunctionReturn(0);
384 }
385 #endif /* !defined(PETSC_USE_COMPLEX) */
386 
387 #undef __FUNCT__
388 #define __FUNCT__ "MatFactorNumeric_MUMPS"
389 PetscErrorCode MatFactorNumeric_MUMPS(Mat A,MatFactorInfo *info,Mat *F)
390 {
391   Mat_MUMPS      *lu =(Mat_MUMPS*)(*F)->spptr;
392   Mat_MUMPS      *lua=(Mat_MUMPS*)(A)->spptr;
393   PetscErrorCode ierr;
394   PetscInt       rnz,nnz,nz=0,i,M=A->rmap.N,*ai,*aj,icntl;
395   PetscTruth     valOnly,flg;
396   Mat            F_diag;
397   IS             is_iden;
398   Vec            b;
399 
400   PetscFunctionBegin;
401   if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
402     (*F)->ops->solve    = MatSolve_MUMPS;
403 
404     /* Initialize a MUMPS instance */
405     ierr = MPI_Comm_rank(((PetscObject)A)->comm, &lu->myid);
406     ierr = MPI_Comm_size(((PetscObject)A)->comm,&lu->size);CHKERRQ(ierr);
407     lua->myid = lu->myid; lua->size = lu->size;
408     lu->id.job = JOB_INIT;
409     ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_mumps));CHKERRQ(ierr);
410     lu->id.comm_fortran = MPI_Comm_c2f(lu->comm_mumps);
411 
412     /* Set mumps options */
413     ierr = PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"MUMPS Options","Mat");CHKERRQ(ierr);
414     lu->id.par=1;  /* host participates factorizaton and solve */
415     lu->id.sym=lu->sym;
416     if (lu->sym == 2){
417       ierr = PetscOptionsInt("-mat_mumps_sym","SYM: (1,2)","None",lu->id.sym,&icntl,&flg);CHKERRQ(ierr);
418       if (flg && icntl == 1) lu->id.sym=icntl;  /* matrix is spd */
419     }
420 #if defined(PETSC_USE_COMPLEX)
421     zmumps_c(&lu->id);
422 #else
423     dmumps_c(&lu->id);
424 #endif
425 
426     if (lu->size == 1){
427       lu->id.ICNTL(18) = 0;   /* centralized assembled matrix input */
428     } else {
429       lu->id.ICNTL(18) = 3;   /* distributed assembled matrix input */
430     }
431 
432     icntl=-1;
433     lu->id.ICNTL(4) = 0;  /* level of printing; overwrite mumps default ICNTL(4)=2 */
434     ierr = PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",lu->id.ICNTL(4),&icntl,&flg);CHKERRQ(ierr);
435     if ((flg && icntl > 0) || PetscLogPrintInfo) {
436       lu->id.ICNTL(4)=icntl; /* and use mumps default icntl(i), i=1,2,3 */
437     } else { /* no output */
438       lu->id.ICNTL(1) = 0;  /* error message, default= 6 */
439       lu->id.ICNTL(2) = -1; /* output stream for diagnostic printing, statistics, and warning. default=0 */
440       lu->id.ICNTL(3) = -1; /* output stream for global information, default=6 */
441     }
442     ierr = PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): matrix prescaling (0 to 7)","None",lu->id.ICNTL(6),&lu->id.ICNTL(6),PETSC_NULL);CHKERRQ(ierr);
443     icntl=-1;
444     ierr = PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): matrix ordering (0 to 7)","None",lu->id.ICNTL(7),&icntl,&flg);CHKERRQ(ierr);
445     if (flg) {
446       if (icntl== 1){
447         SETERRQ(PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n");
448       } else {
449         lu->id.ICNTL(7) = icntl;
450       }
451     }
452     ierr = PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): A or A^T x=b to be solved. 1: A; otherwise: A^T","None",lu->id.ICNTL(9),&lu->id.ICNTL(9),PETSC_NULL);CHKERRQ(ierr);
453     ierr = PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",lu->id.ICNTL(10),&lu->id.ICNTL(10),PETSC_NULL);CHKERRQ(ierr);
454     ierr = PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): error analysis, a positive value returns statistics (by -ksp_view)","None",lu->id.ICNTL(11),&lu->id.ICNTL(11),PETSC_NULL);CHKERRQ(ierr);
455     ierr = PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): efficiency control","None",lu->id.ICNTL(12),&lu->id.ICNTL(12),PETSC_NULL);CHKERRQ(ierr);
456     ierr = PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): efficiency control","None",lu->id.ICNTL(13),&lu->id.ICNTL(13),PETSC_NULL);CHKERRQ(ierr);
457     ierr = PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage of estimated workspace increase","None",lu->id.ICNTL(14),&lu->id.ICNTL(14),PETSC_NULL);CHKERRQ(ierr);
458     ierr = PetscOptionsInt("-mat_mumps_icntl_15","ICNTL(15): efficiency control","None",lu->id.ICNTL(15),&lu->id.ICNTL(15),PETSC_NULL);CHKERRQ(ierr);
459 
460     ierr = PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",lu->id.CNTL(1),&lu->id.CNTL(1),PETSC_NULL);CHKERRQ(ierr);
461     ierr = PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",lu->id.CNTL(2),&lu->id.CNTL(2),PETSC_NULL);CHKERRQ(ierr);
462     ierr = PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",lu->id.CNTL(3),&lu->id.CNTL(3),PETSC_NULL);CHKERRQ(ierr);
463     ierr = PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",lu->id.CNTL(4),&lu->id.CNTL(4),PETSC_NULL);CHKERRQ(ierr);
464     PetscOptionsEnd();
465   }
466 
467   /* define matrix A */
468   switch (lu->id.ICNTL(18)){
469   case 0:  /* centralized assembled matrix input (size=1) */
470     if (!lu->myid) {
471       if (lua->isAIJ){
472         Mat_SeqAIJ   *aa = (Mat_SeqAIJ*)A->data;
473         nz               = aa->nz;
474         ai = aa->i; aj = aa->j; lu->val = aa->a;
475       } else {
476         Mat_SeqSBAIJ *aa = (Mat_SeqSBAIJ*)A->data;
477         nz                  =  aa->nz;
478         ai = aa->i; aj = aa->j; lu->val = aa->a;
479       }
480       if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){ /* first numeric factorization, get irn and jcn */
481         ierr = PetscMalloc(nz*sizeof(PetscInt),&lu->irn);CHKERRQ(ierr);
482         ierr = PetscMalloc(nz*sizeof(PetscInt),&lu->jcn);CHKERRQ(ierr);
483         nz = 0;
484         for (i=0; i<M; i++){
485           rnz = ai[i+1] - ai[i];
486           while (rnz--) {  /* Fortran row/col index! */
487             lu->irn[nz] = i+1; lu->jcn[nz] = (*aj)+1; aj++; nz++;
488           }
489         }
490       }
491     }
492     break;
493   case 3:  /* distributed assembled matrix input (size>1) */
494     if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
495       valOnly = PETSC_FALSE;
496     } else {
497       valOnly = PETSC_TRUE; /* only update mat values, not row and col index */
498     }
499     ierr = MatConvertToTriples(A,1,valOnly, &nnz, &lu->irn, &lu->jcn, &lu->val);CHKERRQ(ierr);
500     break;
501   default: SETERRQ(PETSC_ERR_SUP,"Matrix input format is not supported by MUMPS.");
502   }
503 
504   /* analysis phase */
505   /*----------------*/
506   if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
507     lu->id.job = 1;
508 
509     lu->id.n = M;
510     switch (lu->id.ICNTL(18)){
511     case 0:  /* centralized assembled matrix input */
512       if (!lu->myid) {
513         lu->id.nz =nz; lu->id.irn=lu->irn; lu->id.jcn=lu->jcn;
514         if (lu->id.ICNTL(6)>1){
515 #if defined(PETSC_USE_COMPLEX)
516           lu->id.a = (mumps_double_complex*)lu->val;
517 #else
518           lu->id.a = lu->val;
519 #endif
520         }
521       }
522       break;
523     case 3:  /* distributed assembled matrix input (size>1) */
524       lu->id.nz_loc = nnz;
525       lu->id.irn_loc=lu->irn; lu->id.jcn_loc=lu->jcn;
526       if (lu->id.ICNTL(6)>1) {
527 #if defined(PETSC_USE_COMPLEX)
528         lu->id.a_loc = (mumps_double_complex*)lu->val;
529 #else
530         lu->id.a_loc = lu->val;
531 #endif
532       }
533       /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
534       if (!lu->myid){
535         ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap.N,&lu->b_seq);CHKERRQ(ierr);
536         ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap.N,0,1,&is_iden);CHKERRQ(ierr);
537       } else {
538         ierr = VecCreateSeq(PETSC_COMM_SELF,0,&lu->b_seq);CHKERRQ(ierr);
539         ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);CHKERRQ(ierr);
540       }
541       ierr = VecCreate(((PetscObject)A)->comm,&b);CHKERRQ(ierr);
542       ierr = VecSetSizes(b,A->rmap.n,PETSC_DECIDE);CHKERRQ(ierr);
543       ierr = VecSetFromOptions(b);CHKERRQ(ierr);
544 
545       ierr = VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);CHKERRQ(ierr);
546       ierr = ISDestroy(is_iden);CHKERRQ(ierr);
547       ierr = VecDestroy(b);CHKERRQ(ierr);
548       break;
549     }
550 #if defined(PETSC_USE_COMPLEX)
551     zmumps_c(&lu->id);
552 #else
553     dmumps_c(&lu->id);
554 #endif
555     if (lu->id.INFOG(1) < 0) {
556       SETERRQ1(PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",lu->id.INFOG(1));
557     }
558   }
559 
560   /* numerical factorization phase */
561   /*-------------------------------*/
562   lu->id.job = 2;
563   if(!lu->id.ICNTL(18)) {
564     if (!lu->myid) {
565 #if defined(PETSC_USE_COMPLEX)
566       lu->id.a = (mumps_double_complex*)lu->val;
567 #else
568       lu->id.a = lu->val;
569 #endif
570     }
571   } else {
572 #if defined(PETSC_USE_COMPLEX)
573     lu->id.a_loc = (mumps_double_complex*)lu->val;
574 #else
575     lu->id.a_loc = lu->val;
576 #endif
577   }
578 #if defined(PETSC_USE_COMPLEX)
579   zmumps_c(&lu->id);
580 #else
581   dmumps_c(&lu->id);
582 #endif
583   if (lu->id.INFOG(1) < 0) {
584     if (lu->id.INFO(1) == -13) {
585       SETERRQ1(PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d megabytes\n",lu->id.INFO(2));
586     } else {
587       SETERRQ2(PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFO(1)=%d, INFO(2)=%d\n",lu->id.INFO(1),lu->id.INFO(2));
588     }
589   }
590 
591   if (!lu->myid && lu->id.ICNTL(16) > 0){
592     SETERRQ1(PETSC_ERR_LIB,"  lu->id.ICNTL(16):=%d\n",lu->id.INFOG(16));
593   }
594 
595   if (lu->size > 1){
596     if ((*F)->factor == FACTOR_LU){
597       F_diag = ((Mat_MPIAIJ *)(*F)->data)->A;
598     } else {
599       F_diag = ((Mat_MPISBAIJ *)(*F)->data)->A;
600     }
601     F_diag->assembled = PETSC_TRUE;
602     if (lu->nSolve){
603       ierr = VecScatterDestroy(lu->scat_sol);CHKERRQ(ierr);
604       ierr = PetscFree(lu->id.sol_loc);CHKERRQ(ierr);
605       ierr = VecDestroy(lu->x_seq);CHKERRQ(ierr);
606     }
607   }
608   (*F)->assembled   = PETSC_TRUE;
609   lu->matstruc      = SAME_NONZERO_PATTERN;
610   lu->CleanUpMUMPS  = PETSC_TRUE;
611   lu->nSolve        = 0;
612   PetscFunctionReturn(0);
613 }
614 
615 /* Note the Petsc r and c permutations are ignored */
616 #undef __FUNCT__
617 #define __FUNCT__ "MatLUFactorSymbolic_AIJMUMPS"
618 PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat A,IS r,IS c,MatFactorInfo *info,Mat *F) {
619   Mat            B;
620   Mat_MUMPS      *lu;
621   PetscErrorCode ierr;
622 
623   PetscFunctionBegin;
624   /* Create the factorization matrix */
625   ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
626   ierr = MatSetSizes(B,A->rmap.n,A->cmap.n,A->rmap.N,A->cmap.N);CHKERRQ(ierr);
627   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
628   ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr);
629   ierr = MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
630 
631   B->ops->lufactornumeric = MatFactorNumeric_MUMPS;
632   B->factor               = FACTOR_LU;
633   lu                      = (Mat_MUMPS*)B->spptr;
634   lu->sym                 = 0;
635   lu->matstruc            = DIFFERENT_NONZERO_PATTERN;
636 
637   *F = B;
638   PetscFunctionReturn(0);
639 }
640 
641 /* Note the Petsc r permutation is ignored */
642 #undef __FUNCT__
643 #define __FUNCT__ "MatCholeskyFactorSymbolic_SBAIJMUMPS"
644 PetscErrorCode MatCholeskyFactorSymbolic_SBAIJMUMPS(Mat A,IS r,MatFactorInfo *info,Mat *F) {
645   Mat            B;
646   Mat_MUMPS      *lu;
647   PetscErrorCode ierr;
648 
649   PetscFunctionBegin;
650   /* Create the factorization matrix */
651   ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
652   ierr = MatSetSizes(B,A->rmap.n,A->cmap.n,A->rmap.N,A->cmap.N);CHKERRQ(ierr);
653   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
654   ierr = MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);CHKERRQ(ierr);
655   ierr = MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
656 
657   B->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS;
658   B->ops->getinertia            = MatGetInertia_SBAIJMUMPS;
659   B->factor                     = FACTOR_CHOLESKY;
660   lu                            = (Mat_MUMPS*)B->spptr;
661   lu->sym                       = 2;
662   lu->matstruc                  = DIFFERENT_NONZERO_PATTERN;
663 
664   *F = B;
665   PetscFunctionReturn(0);
666 }
667 
668 #undef __FUNCT__
669 #define __FUNCT__ "MatFactorInfo_MUMPS"
670 PetscErrorCode MatFactorInfo_MUMPS(Mat A,PetscViewer viewer) {
671   Mat_MUMPS      *lu=(Mat_MUMPS*)A->spptr;
672   PetscErrorCode ierr;
673 
674   PetscFunctionBegin;
675   /* check if matrix is mumps type */
676   if (A->ops->solve != MatSolve_MUMPS) PetscFunctionReturn(0);
677 
678   ierr = PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");CHKERRQ(ierr);
679   ierr = PetscViewerASCIIPrintf(viewer,"  SYM (matrix type):                  %d \n",lu->id.sym);CHKERRQ(ierr);
680   ierr = PetscViewerASCIIPrintf(viewer,"  PAR (host participation):           %d \n",lu->id.par);CHKERRQ(ierr);
681   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(1) (output for error):        %d \n",lu->id.ICNTL(1));CHKERRQ(ierr);
682   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(2) (output of diagnostic msg):%d \n",lu->id.ICNTL(2));CHKERRQ(ierr);
683   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(3) (output for global info):  %d \n",lu->id.ICNTL(3));CHKERRQ(ierr);
684   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(4) (level of printing):       %d \n",lu->id.ICNTL(4));CHKERRQ(ierr);
685   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(5) (input mat struct):        %d \n",lu->id.ICNTL(5));CHKERRQ(ierr);
686   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(6) (matrix prescaling):       %d \n",lu->id.ICNTL(6));CHKERRQ(ierr);
687   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(7) (matrix ordering):         %d \n",lu->id.ICNTL(7));CHKERRQ(ierr);
688   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(8) (scalling strategy):       %d \n",lu->id.ICNTL(8));CHKERRQ(ierr);
689   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(9) (A/A^T x=b is solved):     %d \n",lu->id.ICNTL(9));CHKERRQ(ierr);
690   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(10) (max num of refinements): %d \n",lu->id.ICNTL(10));CHKERRQ(ierr);
691   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(11) (error analysis):         %d \n",lu->id.ICNTL(11));CHKERRQ(ierr);
692   if (!lu->myid && lu->id.ICNTL(11)>0) {
693     ierr = PetscPrintf(PETSC_COMM_SELF,"        RINFOG(4) (inf norm of input mat):        %g\n",lu->id.RINFOG(4));CHKERRQ(ierr);
694     ierr = PetscPrintf(PETSC_COMM_SELF,"        RINFOG(5) (inf norm of solution):         %g\n",lu->id.RINFOG(5));CHKERRQ(ierr);
695     ierr = PetscPrintf(PETSC_COMM_SELF,"        RINFOG(6) (inf norm of residual):         %g\n",lu->id.RINFOG(6));CHKERRQ(ierr);
696     ierr = PetscPrintf(PETSC_COMM_SELF,"        RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",lu->id.RINFOG(7),lu->id.RINFOG(8));CHKERRQ(ierr);
697     ierr = PetscPrintf(PETSC_COMM_SELF,"        RINFOG(9) (error estimate):               %g \n",lu->id.RINFOG(9));CHKERRQ(ierr);
698     ierr = PetscPrintf(PETSC_COMM_SELF,"        RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",lu->id.RINFOG(10),lu->id.RINFOG(11));CHKERRQ(ierr);
699 
700   }
701   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(12) (efficiency control):                         %d \n",lu->id.ICNTL(12));CHKERRQ(ierr);
702   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(13) (efficiency control):                         %d \n",lu->id.ICNTL(13));CHKERRQ(ierr);
703   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(14) (percentage of estimated workspace increase): %d \n",lu->id.ICNTL(14));CHKERRQ(ierr);
704   /* ICNTL(15-17) not used */
705   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(18) (input mat struct):                           %d \n",lu->id.ICNTL(18));CHKERRQ(ierr);
706   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(19) (Shur complement info):                       %d \n",lu->id.ICNTL(19));CHKERRQ(ierr);
707   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(20) (rhs sparse pattern):                         %d \n",lu->id.ICNTL(20));CHKERRQ(ierr);
708   ierr = PetscViewerASCIIPrintf(viewer,"  ICNTL(21) (solution struct):                            %d \n",lu->id.ICNTL(21));CHKERRQ(ierr);
709 
710   ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(1) (relative pivoting threshold):      %g \n",lu->id.CNTL(1));CHKERRQ(ierr);
711   ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(2) (stopping criterion of refinement): %g \n",lu->id.CNTL(2));CHKERRQ(ierr);
712   ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(3) (absolute pivoting threshold):      %g \n",lu->id.CNTL(3));CHKERRQ(ierr);
713   ierr = PetscViewerASCIIPrintf(viewer,"  CNTL(4) (value of static pivoting):         %g \n",lu->id.CNTL(4));CHKERRQ(ierr);
714 
715   /* infomation local to each processor */
716   if (!lu->myid) {ierr = PetscPrintf(PETSC_COMM_SELF, "      RINFO(1) (local estimated flops for the elimination after analysis): \n");CHKERRQ(ierr);}
717   ierr = PetscSynchronizedPrintf(((PetscObject)A)->comm,"             [%d] %g \n",lu->myid,lu->id.RINFO(1));CHKERRQ(ierr);
718   ierr = PetscSynchronizedFlush(((PetscObject)A)->comm);
719   if (!lu->myid) {ierr = PetscPrintf(PETSC_COMM_SELF, "      RINFO(2) (local estimated flops for the assembly after factorization): \n");CHKERRQ(ierr);}
720   ierr = PetscSynchronizedPrintf(((PetscObject)A)->comm,"             [%d]  %g \n",lu->myid,lu->id.RINFO(2));CHKERRQ(ierr);
721   ierr = PetscSynchronizedFlush(((PetscObject)A)->comm);
722   if (!lu->myid) {ierr = PetscPrintf(PETSC_COMM_SELF, "      RINFO(3) (local estimated flops for the elimination after factorization): \n");CHKERRQ(ierr);}
723   ierr = PetscSynchronizedPrintf(((PetscObject)A)->comm,"             [%d]  %g \n",lu->myid,lu->id.RINFO(3));CHKERRQ(ierr);
724   ierr = PetscSynchronizedFlush(((PetscObject)A)->comm);
725   /*
726   if (!lu->myid) {ierr = PetscPrintf(PETSC_COMM_SELF, "      INFO(2) (info about error or warning ): \n");CHKERRQ(ierr);}
727   ierr = PetscSynchronizedPrintf(((PetscObject)A)->comm,"             [%d] %d \n",lu->myid,lu->id.INFO(2));CHKERRQ(ierr);
728   ierr = PetscSynchronizedFlush(((PetscObject)A)->comm);
729   */
730 
731   if (!lu->myid) {ierr = PetscPrintf(PETSC_COMM_SELF, "      INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");CHKERRQ(ierr);}
732   ierr = PetscSynchronizedPrintf(((PetscObject)A)->comm,"             [%d] %d \n",lu->myid,lu->id.INFO(15));CHKERRQ(ierr);
733   ierr = PetscSynchronizedFlush(((PetscObject)A)->comm);
734 
735   if (!lu->myid) {ierr = PetscPrintf(PETSC_COMM_SELF, "      INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");CHKERRQ(ierr);}
736   ierr = PetscSynchronizedPrintf(((PetscObject)A)->comm,"             [%d] %d \n",lu->myid,lu->id.INFO(16));CHKERRQ(ierr);
737   ierr = PetscSynchronizedFlush(((PetscObject)A)->comm);
738 
739   if (!lu->myid) {ierr = PetscPrintf(PETSC_COMM_SELF, "      INFO(23) (num of pivots eliminated on this processor after factorization): \n");CHKERRQ(ierr);}
740   ierr = PetscSynchronizedPrintf(((PetscObject)A)->comm,"             [%d] %d \n",lu->myid,lu->id.INFO(23));CHKERRQ(ierr);
741   ierr = PetscSynchronizedFlush(((PetscObject)A)->comm);
742 
743   if (!lu->myid){ /* information from the host */
744     ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",lu->id.RINFOG(1));CHKERRQ(ierr);
745     ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",lu->id.RINFOG(2));CHKERRQ(ierr);
746     ierr = PetscViewerASCIIPrintf(viewer,"  RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",lu->id.RINFOG(3));CHKERRQ(ierr);
747 
748     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",lu->id.INFOG(3));CHKERRQ(ierr);
749     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",lu->id.INFOG(4));CHKERRQ(ierr);
750     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(5) (estimated maximum front size in the complete tree): %d \n",lu->id.INFOG(5));CHKERRQ(ierr);
751     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(6) (number of nodes in the complete tree): %d \n",lu->id.INFOG(6));CHKERRQ(ierr);
752     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(7) (ordering option effectively uese after analysis): %d \n",lu->id.INFOG(7));CHKERRQ(ierr);
753     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",lu->id.INFOG(8));CHKERRQ(ierr);
754     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",lu->id.INFOG(9));CHKERRQ(ierr);
755     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(10) (total integer space store the matrix factors after factorization): %d \n",lu->id.INFOG(10));CHKERRQ(ierr);
756     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(11) (order of largest frontal matrix after factorization): %d \n",lu->id.INFOG(11));CHKERRQ(ierr);
757     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(12) (number of off-diagonal pivots): %d \n",lu->id.INFOG(12));CHKERRQ(ierr);
758     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(13) (number of delayed pivots after factorization): %d \n",lu->id.INFOG(13));CHKERRQ(ierr);
759     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(14) (number of memory compress after factorization): %d \n",lu->id.INFOG(14));CHKERRQ(ierr);
760     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(15) (number of steps of iterative refinement after solution): %d \n",lu->id.INFOG(15));CHKERRQ(ierr);
761     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",lu->id.INFOG(16));CHKERRQ(ierr);
762     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",lu->id.INFOG(17));CHKERRQ(ierr);
763     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",lu->id.INFOG(18));CHKERRQ(ierr);
764     ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",lu->id.INFOG(19));CHKERRQ(ierr);
765      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(20) (estimated number of entries in the factors): %d \n",lu->id.INFOG(20));CHKERRQ(ierr);
766      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",lu->id.INFOG(21));CHKERRQ(ierr);
767      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",lu->id.INFOG(22));CHKERRQ(ierr);
768      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",lu->id.INFOG(23));CHKERRQ(ierr);
769      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",lu->id.INFOG(24));CHKERRQ(ierr);
770      ierr = PetscViewerASCIIPrintf(viewer,"  INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",lu->id.INFOG(25));CHKERRQ(ierr);
771   }
772 
773   PetscFunctionReturn(0);
774 }
775 
776 #undef __FUNCT__
777 #define __FUNCT__ "MatView_MUMPS"
778 PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer) {
779   PetscErrorCode    ierr;
780   PetscTruth        iascii;
781   PetscViewerFormat format;
782   Mat_MUMPS         *mumps=(Mat_MUMPS*)(A->spptr);
783 
784   PetscFunctionBegin;
785   ierr = (*mumps->MatView)(A,viewer);CHKERRQ(ierr);
786 
787   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
788   if (iascii) {
789     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
790     if (format == PETSC_VIEWER_ASCII_INFO){
791       ierr = MatFactorInfo_MUMPS(A,viewer);CHKERRQ(ierr);
792     }
793   }
794   PetscFunctionReturn(0);
795 }
796 
797 #undef __FUNCT__
798 #define __FUNCT__ "MatAssemblyEnd_AIJMUMPS"
799 PetscErrorCode MatAssemblyEnd_AIJMUMPS(Mat A,MatAssemblyType mode) {
800   PetscErrorCode ierr;
801   Mat_MUMPS *mumps=(Mat_MUMPS*)A->spptr;
802 
803   PetscFunctionBegin;
804   ierr = (*mumps->MatAssemblyEnd)(A,mode);CHKERRQ(ierr);
805 
806   mumps->MatLUFactorSymbolic       = A->ops->lufactorsymbolic;
807   mumps->MatCholeskyFactorSymbolic = A->ops->choleskyfactorsymbolic;
808   A->ops->lufactorsymbolic         = MatLUFactorSymbolic_AIJMUMPS;
809   PetscFunctionReturn(0);
810 }
811 
812 EXTERN_C_BEGIN
813 #undef __FUNCT__
814 #define __FUNCT__ "MatConvert_AIJ_AIJMUMPS"
815 PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_AIJ_AIJMUMPS(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
816 {
817   PetscErrorCode ierr;
818   PetscMPIInt    size;
819   MPI_Comm       comm;
820   Mat            B=*newmat;
821   Mat_MUMPS      *mumps;
822 
823   PetscFunctionBegin;
824   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
825   ierr = PetscNewLog(B,Mat_MUMPS,&mumps);CHKERRQ(ierr);
826 
827   if (reuse == MAT_INITIAL_MATRIX) {
828     ierr = MatDuplicate(A,MAT_COPY_VALUES,&B);CHKERRQ(ierr);
829     /* A may have special container that is not duplicated,
830        e.g., A is obtainted from MatMatMult(,&A). Save B->ops instead */
831     mumps->MatDuplicate              = B->ops->duplicate;
832     mumps->MatView                   = B->ops->view;
833     mumps->MatAssemblyEnd            = B->ops->assemblyend;
834     mumps->MatLUFactorSymbolic       = B->ops->lufactorsymbolic;
835     mumps->MatCholeskyFactorSymbolic = B->ops->choleskyfactorsymbolic;
836     mumps->MatDestroy                = B->ops->destroy;
837   } else {
838     mumps->MatDuplicate              = A->ops->duplicate;
839     mumps->MatView                   = A->ops->view;
840     mumps->MatAssemblyEnd            = A->ops->assemblyend;
841     mumps->MatLUFactorSymbolic       = A->ops->lufactorsymbolic;
842     mumps->MatCholeskyFactorSymbolic = A->ops->choleskyfactorsymbolic;
843     mumps->MatDestroy                = A->ops->destroy;
844   }
845   mumps->specialdestroy            = MatDestroy_AIJMUMPS;
846   mumps->CleanUpMUMPS              = PETSC_FALSE;
847   mumps->isAIJ                     = PETSC_TRUE;
848   mumps->scat_rhs                  = PETSC_NULL;
849   mumps->scat_sol                  = PETSC_NULL;
850   mumps->nSolve                    = 0;
851 
852   B->spptr                         = (void*)mumps;
853   B->ops->duplicate                = MatDuplicate_MUMPS;
854   B->ops->view                     = MatView_MUMPS;
855   B->ops->assemblyend              = MatAssemblyEnd_AIJMUMPS;
856   B->ops->lufactorsymbolic         = MatLUFactorSymbolic_AIJMUMPS;
857   B->ops->destroy                  = MatDestroy_MUMPS;
858 
859   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);CHKERRQ(ierr);
860   if (size == 1) {
861     ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_seqaij_aijmumps_C",
862                                              "MatConvert_AIJ_AIJMUMPS",MatConvert_AIJ_AIJMUMPS);CHKERRQ(ierr);
863     ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_aijmumps_seqaij_C",
864                                              "MatConvert_MUMPS_Base",MatConvert_MUMPS_Base);CHKERRQ(ierr);
865   } else {
866     ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_aijmumps_C",
867                                              "MatConvert_AIJ_AIJMUMPS",MatConvert_AIJ_AIJMUMPS);CHKERRQ(ierr);
868     ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_aijmumps_mpiaij_C",
869                                              "MatConvert_MUMPS_Base",MatConvert_MUMPS_Base);CHKERRQ(ierr);
870   }
871 
872   ierr = PetscInfo(A,"Using MUMPS for LU factorization and solves.\n");CHKERRQ(ierr);
873   ierr = PetscObjectChangeTypeName((PetscObject)B,newtype);CHKERRQ(ierr);
874   *newmat = B;
875   PetscFunctionReturn(0);
876 }
877 EXTERN_C_END
878 
879 /*MC
880   MATAIJMUMPS - MATAIJMUMPS = "aijmumps" - A matrix type providing direct solvers (LU) for distributed
881   and sequential matrices via the external package MUMPS.
882 
883   If MUMPS is installed (see the manual for instructions
884   on how to declare the existence of external packages),
885   a matrix type can be constructed which invokes MUMPS solvers.
886   After calling MatCreate(...,A), simply call MatSetType(A,MATAIJMUMPS), then
887   optionally call MatSeqAIJSetPreallocation() or MatMPIAIJSetPreallocation() etc DO NOT
888   call MatCreateSeqAIJ/MPIAIJ() directly or the preallocation information will be LOST!
889 
890   If created with a single process communicator, this matrix type inherits from MATSEQAIJ.
891   Otherwise, this matrix type inherits from MATMPIAIJ.  Hence for single process communicators,
892   MatSeqAIJSetPreallocation() is supported, and similarly MatMPIAIJSetPreallocation() is supported
893   for communicators controlling multiple processes.  It is recommended that you call both of
894   the above preallocation routines for simplicity.  One can also call MatConvert() for an inplace
895   conversion to or from the MATSEQAIJ or MATMPIAIJ type (depending on the communicator size)
896   without data copy AFTER the matrix values are set.
897 
898   Options Database Keys:
899 + -mat_type aijmumps - sets the matrix type to "aijmumps" during a call to MatSetFromOptions()
900 . -mat_mumps_sym <0,1,2> - 0 the matrix is unsymmetric, 1 symmetric positive definite, 2 symmetric
901 . -mat_mumps_icntl_4 <0,1,2,3,4> - print level
902 . -mat_mumps_icntl_6 <0,...,7> - matrix prescaling options (see MUMPS User's Guide)
903 . -mat_mumps_icntl_7 <0,...,7> - matrix orderings (see MUMPS User's Guide)
904 . -mat_mumps_icntl_9 <1,2> - A or A^T x=b to be solved: 1 denotes A, 2 denotes A^T
905 . -mat_mumps_icntl_10 <n> - maximum number of iterative refinements
906 . -mat_mumps_icntl_11 <n> - error analysis, a positive value returns statistics during -ksp_view
907 . -mat_mumps_icntl_12 <n> - efficiency control (see MUMPS User's Guide)
908 . -mat_mumps_icntl_13 <n> - efficiency control (see MUMPS User's Guide)
909 . -mat_mumps_icntl_14 <n> - efficiency control (see MUMPS User's Guide)
910 . -mat_mumps_icntl_15 <n> - efficiency control (see MUMPS User's Guide)
911 . -mat_mumps_cntl_1 <delta> - relative pivoting threshold
912 . -mat_mumps_cntl_2 <tol> - stopping criterion for refinement
913 - -mat_mumps_cntl_3 <adelta> - absolute pivoting threshold
914 
915   Level: beginner
916 
917 .seealso: MATSBAIJMUMPS
918 M*/
919 
920 EXTERN_C_BEGIN
921 #undef __FUNCT__
922 #define __FUNCT__ "MatCreate_AIJMUMPS"
923 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_AIJMUMPS(Mat A)
924 {
925   PetscErrorCode ierr;
926   PetscMPIInt    size;
927 
928   PetscFunctionBegin;
929   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);CHKERRQ(ierr);
930   ierr = MatSetType(A,MATAIJ);CHKERRQ(ierr);
931   ierr = MatConvert_AIJ_AIJMUMPS(A,MATAIJMUMPS,MAT_REUSE_MATRIX,&A);CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 EXTERN_C_END
935 
936 #undef __FUNCT__
937 #define __FUNCT__ "MatAssemblyEnd_SBAIJMUMPS"
938 PetscErrorCode MatAssemblyEnd_SBAIJMUMPS(Mat A,MatAssemblyType mode)
939 {
940   PetscErrorCode ierr;
941   Mat_MUMPS *mumps=(Mat_MUMPS*)A->spptr;
942 
943   PetscFunctionBegin;
944   ierr = (*mumps->MatAssemblyEnd)(A,mode);CHKERRQ(ierr);
945   mumps->MatCholeskyFactorSymbolic = A->ops->choleskyfactorsymbolic;
946   A->ops->choleskyfactorsymbolic   = MatCholeskyFactorSymbolic_SBAIJMUMPS;
947   PetscFunctionReturn(0);
948 }
949 
950 EXTERN_C_BEGIN
951 #undef __FUNCT__
952 #define __FUNCT__ "MatMPISBAIJSetPreallocation_MPISBAIJMUMPS"
953 PetscErrorCode PETSCMAT_DLLEXPORT MatMPISBAIJSetPreallocation_MPISBAIJMUMPS(Mat B,PetscInt bs,PetscInt d_nz,PetscInt *d_nnz,PetscInt o_nz,PetscInt *o_nnz)
954 {
955   Mat       A;
956   Mat_MUMPS *mumps=(Mat_MUMPS*)B->spptr;
957   PetscErrorCode ierr;
958 
959   PetscFunctionBegin;
960   /*
961     After performing the MPISBAIJ Preallocation, we need to convert the local diagonal block matrix
962     into MUMPS type so that the block jacobi preconditioner (for example) can use MUMPS.  I would
963     like this to be done in the MatCreate routine, but the creation of this inner matrix requires
964     block size info so that PETSc can determine the local size properly.  The block size info is set
965     in the preallocation routine.
966   */
967   ierr = (*mumps->MatPreallocate)(B,bs,d_nz,d_nnz,o_nz,o_nnz);
968   A    = ((Mat_MPISBAIJ *)B->data)->A;
969   ierr = MatConvert_SBAIJ_SBAIJMUMPS(A,MATSBAIJMUMPS,MAT_REUSE_MATRIX,&A);CHKERRQ(ierr);
970   PetscFunctionReturn(0);
971 }
972 EXTERN_C_END
973 
974 EXTERN_C_BEGIN
975 #undef __FUNCT__
976 #define __FUNCT__ "MatConvert_SBAIJ_SBAIJMUMPS"
977 PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_SBAIJ_SBAIJMUMPS(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
978 {
979   PetscErrorCode ierr;
980   PetscMPIInt    size;
981   MPI_Comm       comm;
982   Mat            B=*newmat;
983   Mat_MUMPS      *mumps;
984   void           (*f)(void);
985 
986   PetscFunctionBegin;
987   if (reuse == MAT_INITIAL_MATRIX) {
988     ierr = MatDuplicate(A,MAT_COPY_VALUES,&B);CHKERRQ(ierr);
989   }
990 
991   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
992   ierr = PetscNewLog(B,Mat_MUMPS,&mumps);CHKERRQ(ierr);
993 
994   mumps->MatDuplicate              = A->ops->duplicate;
995   mumps->MatView                   = A->ops->view;
996   mumps->MatAssemblyEnd            = A->ops->assemblyend;
997   mumps->MatLUFactorSymbolic       = A->ops->lufactorsymbolic;
998   mumps->MatCholeskyFactorSymbolic = A->ops->choleskyfactorsymbolic;
999   mumps->MatDestroy                = A->ops->destroy;
1000   mumps->specialdestroy            = MatDestroy_SBAIJMUMPS;
1001   mumps->CleanUpMUMPS              = PETSC_FALSE;
1002   mumps->isAIJ                     = PETSC_FALSE;
1003   mumps->scat_rhs                  = PETSC_NULL;
1004   mumps->scat_sol                  = PETSC_NULL;
1005   mumps->nSolve                    = 0;
1006 
1007   B->spptr                         = (void*)mumps;
1008   B->ops->duplicate                = MatDuplicate_MUMPS;
1009   B->ops->view                     = MatView_MUMPS;
1010   B->ops->assemblyend              = MatAssemblyEnd_SBAIJMUMPS;
1011   B->ops->choleskyfactorsymbolic   = MatCholeskyFactorSymbolic_SBAIJMUMPS;
1012   B->ops->destroy                  = MatDestroy_MUMPS;
1013 
1014   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);CHKERRQ(ierr);
1015   if (size == 1) {
1016     ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_seqsbaij_sbaijmumps_C",
1017                                              "MatConvert_SBAIJ_SBAIJMUMPS",MatConvert_SBAIJ_SBAIJMUMPS);CHKERRQ(ierr);
1018     ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_sbaijmumps_seqsbaij_C",
1019                                              "MatConvert_MUMPS_Base",MatConvert_MUMPS_Base);CHKERRQ(ierr);
1020   } else {
1021   /* I really don't like needing to know the tag: MatMPISBAIJSetPreallocation_C */
1022     ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPISBAIJSetPreallocation_C",(PetscVoidStarFunction)&f);CHKERRQ(ierr);
1023     if (f) { /* This case should always be true when this routine is called */
1024       mumps->MatPreallocate = (PetscErrorCode (*)(Mat,int,int,int*,int,int*))f;
1025       ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPISBAIJSetPreallocation_C",
1026                                                "MatMPISBAIJSetPreallocation_MPISBAIJMUMPS",
1027                                                MatMPISBAIJSetPreallocation_MPISBAIJMUMPS);CHKERRQ(ierr);
1028     }
1029     ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpisbaij_sbaijmumps_C",
1030                                              "MatConvert_SBAIJ_SBAIJMUMPS",MatConvert_SBAIJ_SBAIJMUMPS);CHKERRQ(ierr);
1031     ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_sbaijmumps_mpisbaij_C",
1032                                              "MatConvert_MUMPS_Base",MatConvert_MUMPS_Base);CHKERRQ(ierr);
1033   }
1034 
1035   ierr = PetscInfo(A,"Using MUMPS for Cholesky factorization and solves.\n");CHKERRQ(ierr);
1036   ierr = PetscObjectChangeTypeName((PetscObject)B,newtype);CHKERRQ(ierr);
1037   *newmat = B;
1038   PetscFunctionReturn(0);
1039 }
1040 EXTERN_C_END
1041 
1042 #undef __FUNCT__
1043 #define __FUNCT__ "MatDuplicate_MUMPS"
1044 PetscErrorCode MatDuplicate_MUMPS(Mat A, MatDuplicateOption op, Mat *M) {
1045   PetscErrorCode ierr;
1046   Mat_MUMPS   *lu=(Mat_MUMPS *)A->spptr;
1047 
1048   PetscFunctionBegin;
1049   ierr = (*lu->MatDuplicate)(A,op,M);CHKERRQ(ierr);
1050   ierr = PetscMemcpy((*M)->spptr,lu,sizeof(Mat_MUMPS));CHKERRQ(ierr);
1051   PetscFunctionReturn(0);
1052 }
1053 
1054 /*MC
1055   MATSBAIJMUMPS - MATSBAIJMUMPS = "sbaijmumps" - A symmetric matrix type providing direct solvers (Cholesky) for
1056   distributed and sequential matrices via the external package MUMPS.
1057 
1058   If MUMPS is installed (see the manual for instructions
1059   on how to declare the existence of external packages),
1060   a matrix type can be constructed which invokes MUMPS solvers.
1061   After calling MatCreate(...,A), simply call MatSetType(A,MATSBAIJMUMPS), then
1062   optionally call MatSeqSBAIJSetPreallocation() or MatMPISBAIJSetPreallocation() DO NOT
1063   call MatCreateSeqSBAIJ/MPISBAIJ() directly or the preallocation information will be LOST!
1064 
1065   If created with a single process communicator, this matrix type inherits from MATSEQSBAIJ.
1066   Otherwise, this matrix type inherits from MATMPISBAIJ.  Hence for single process communicators,
1067   MatSeqSBAIJSetPreallocation() is supported, and similarly MatMPISBAIJSetPreallocation() is supported
1068   for communicators controlling multiple processes.  It is recommended that you call both of
1069   the above preallocation routines for simplicity.  One can also call MatConvert() for an inplace
1070   conversion to or from the MATSEQSBAIJ or MATMPISBAIJ type (depending on the communicator size)
1071   without data copy AFTER the matrix values have been set.
1072 
1073   Options Database Keys:
1074 + -mat_type sbaijmumps - sets the matrix type to "sbaijmumps" during a call to MatSetFromOptions()
1075 . -mat_mumps_sym <0,1,2> - 0 the matrix is unsymmetric, 1 symmetric positive definite, 2 symmetric
1076 . -mat_mumps_icntl_4 <0,...,4> - print level
1077 . -mat_mumps_icntl_6 <0,...,7> - matrix prescaling options (see MUMPS User's Guide)
1078 . -mat_mumps_icntl_7 <0,...,7> - matrix orderings (see MUMPS User's Guide)
1079 . -mat_mumps_icntl_9 <1,2> - A or A^T x=b to be solved: 1 denotes A, 2 denotes A^T
1080 . -mat_mumps_icntl_10 <n> - maximum number of iterative refinements
1081 . -mat_mumps_icntl_11 <n> - error analysis, a positive value returns statistics during -ksp_view
1082 . -mat_mumps_icntl_12 <n> - efficiency control (see MUMPS User's Guide)
1083 . -mat_mumps_icntl_13 <n> - efficiency control (see MUMPS User's Guide)
1084 . -mat_mumps_icntl_14 <n> - efficiency control (see MUMPS User's Guide)
1085 . -mat_mumps_icntl_15 <n> - efficiency control (see MUMPS User's Guide)
1086 . -mat_mumps_cntl_1 <delta> - relative pivoting threshold
1087 . -mat_mumps_cntl_2 <tol> - stopping criterion for refinement
1088 - -mat_mumps_cntl_3 <adelta> - absolute pivoting threshold
1089 
1090   Level: beginner
1091 
1092 .seealso: MATAIJMUMPS
1093 M*/
1094 
1095 EXTERN_C_BEGIN
1096 #undef __FUNCT__
1097 #define __FUNCT__ "MatCreate_SBAIJMUMPS"
1098 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_SBAIJMUMPS(Mat A)
1099 {
1100   PetscErrorCode ierr;
1101   PetscMPIInt    size;
1102 
1103   PetscFunctionBegin;
1104   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);CHKERRQ(ierr);
1105   ierr = MatSetType(A,MATSBAIJ);CHKERRQ(ierr);
1106   ierr = MatConvert_SBAIJ_SBAIJMUMPS(A,MATSBAIJMUMPS,MAT_REUSE_MATRIX,&A);CHKERRQ(ierr);
1107   PetscFunctionReturn(0);
1108 }
1109 EXTERN_C_END
1110