xref: /petsc/src/mat/impls/aij/mpi/pastix/pastix.c (revision 2d30e087755efd99e28fdfe792ffbeb2ee1ea928)
1 /*
2  Provides an interface to the PaStiX sparse solver
3  */
4 #include <../src/mat/impls/aij/seq/aij.h>
5 #include <../src/mat/impls/aij/mpi/mpiaij.h>
6 #include <../src/mat/impls/sbaij/seq/sbaij.h>
7 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
8 
9 #if defined(PETSC_USE_COMPLEX)
10 #define _H_COMPLEX
11 #endif
12 
13 EXTERN_C_BEGIN
14 #include <pastix.h>
15 EXTERN_C_END
16 
17 #if defined(PETSC_USE_COMPLEX)
18 #if defined(PETSC_USE_REAL_SINGLE)
19 #define PASTIX_CALL c_pastix
20 #else
21 #define PASTIX_CALL z_pastix
22 #endif
23 
24 #else /* PETSC_USE_COMPLEX */
25 
26 #if defined(PETSC_USE_REAL_SINGLE)
27 #define PASTIX_CALL s_pastix
28 #else
29 #define PASTIX_CALL d_pastix
30 #endif
31 
32 #endif /* PETSC_USE_COMPLEX */
33 
34 typedef PetscScalar PastixScalar;
35 
36 typedef struct Mat_Pastix_ {
37   pastix_data_t *pastix_data; /* Pastix data storage structure                        */
38   MatStructure   matstruc;
39   PetscInt       n;                 /* Number of columns in the matrix                      */
40   PetscInt      *colptr;            /* Index of first element of each column in row and val */
41   PetscInt      *row;               /* Row of each element of the matrix                    */
42   PetscScalar   *val;               /* Value of each element of the matrix                  */
43   PetscInt      *perm;              /* Permutation tabular                                  */
44   PetscInt      *invp;              /* Reverse permutation tabular                          */
45   PetscScalar   *rhs;               /* Rhight-hand-side member                              */
46   PetscInt       rhsnbr;            /* Rhight-hand-side number (must be 1)                  */
47   PetscInt       iparm[IPARM_SIZE]; /* Integer parameters                                   */
48   double         dparm[DPARM_SIZE]; /* Floating point parameters                            */
49   MPI_Comm       pastix_comm;       /* PaStiX MPI communicator                              */
50   PetscMPIInt    commRank;          /* MPI rank                                             */
51   PetscMPIInt    commSize;          /* MPI communicator size                                */
52   PetscBool      CleanUpPastix;     /* Boolean indicating if we call PaStiX clean step      */
53   VecScatter     scat_rhs;
54   VecScatter     scat_sol;
55   Vec            b_seq;
56 } Mat_Pastix;
57 
58 extern PetscErrorCode MatDuplicate_Pastix(Mat, MatDuplicateOption, Mat *);
59 
60 /*
61    convert Petsc seqaij matrix to CSC: colptr[n], row[nz], val[nz]
62 
63   input:
64     A       - matrix in seqaij or mpisbaij (bs=1) format
65     valOnly - FALSE: spaces are allocated and values are set for the CSC
66               TRUE:  Only fill values
67   output:
68     n       - Size of the matrix
69     colptr  - Index of first element of each column in row and val
70     row     - Row of each element of the matrix
71     values  - Value of each element of the matrix
72  */
73 PetscErrorCode MatConvertToCSC(Mat A, PetscBool valOnly, PetscInt *n, PetscInt **colptr, PetscInt **row, PetscScalar **values) {
74   Mat_SeqAIJ  *aa      = (Mat_SeqAIJ *)A->data;
75   PetscInt    *rowptr  = aa->i;
76   PetscInt    *col     = aa->j;
77   PetscScalar *rvalues = aa->a;
78   PetscInt     m       = A->rmap->N;
79   PetscInt     nnz;
80   PetscInt     i, j, k;
81   PetscInt     base = 1;
82   PetscInt     idx;
83   PetscInt     colidx;
84   PetscInt    *colcount;
85   PetscBool    isSBAIJ;
86   PetscBool    isSeqSBAIJ;
87   PetscBool    isMpiSBAIJ;
88   PetscBool    isSym;
89 
90   PetscFunctionBegin;
91   PetscCall(MatIsSymmetric(A, 0.0, &isSym));
92   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSBAIJ, &isSBAIJ));
93   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQSBAIJ, &isSeqSBAIJ));
94   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATMPISBAIJ, &isMpiSBAIJ));
95 
96   *n = A->cmap->N;
97 
98   /* PaStiX only needs triangular matrix if matrix is symmetric
99    */
100   if (isSym && !(isSBAIJ || isSeqSBAIJ || isMpiSBAIJ)) nnz = (aa->nz - *n) / 2 + *n;
101   else nnz = aa->nz;
102 
103   if (!valOnly) {
104     PetscCall(PetscMalloc1((*n) + 1, colptr));
105     PetscCall(PetscMalloc1(nnz, row));
106     PetscCall(PetscMalloc1(nnz, values));
107 
108     if (isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) {
109       PetscCall(PetscArraycpy(*colptr, rowptr, (*n) + 1));
110       for (i = 0; i < *n + 1; i++) (*colptr)[i] += base;
111       PetscCall(PetscArraycpy(*row, col, nnz));
112       for (i = 0; i < nnz; i++) (*row)[i] += base;
113       PetscCall(PetscArraycpy(*values, rvalues, nnz));
114     } else {
115       PetscCall(PetscMalloc1(*n, &colcount));
116 
117       for (i = 0; i < m; i++) colcount[i] = 0;
118       /* Fill-in colptr */
119       for (i = 0; i < m; i++) {
120         for (j = rowptr[i]; j < rowptr[i + 1]; j++) {
121           if (!isSym || col[j] <= i) colcount[col[j]]++;
122         }
123       }
124 
125       (*colptr)[0] = base;
126       for (j = 0; j < *n; j++) {
127         (*colptr)[j + 1] = (*colptr)[j] + colcount[j];
128         /* in next loop we fill starting from (*colptr)[colidx] - base */
129         colcount[j]      = -base;
130       }
131 
132       /* Fill-in rows and values */
133       for (i = 0; i < m; i++) {
134         for (j = rowptr[i]; j < rowptr[i + 1]; j++) {
135           if (!isSym || col[j] <= i) {
136             colidx         = col[j];
137             idx            = (*colptr)[colidx] + colcount[colidx];
138             (*row)[idx]    = i + base;
139             (*values)[idx] = rvalues[j];
140             colcount[colidx]++;
141           }
142         }
143       }
144       PetscCall(PetscFree(colcount));
145     }
146   } else {
147     /* Fill-in only values */
148     for (i = 0; i < m; i++) {
149       for (j = rowptr[i]; j < rowptr[i + 1]; j++) {
150         colidx = col[j];
151         if ((isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) || !isSym || col[j] <= i) {
152           /* look for the value to fill */
153           for (k = (*colptr)[colidx] - base; k < (*colptr)[colidx + 1] - base; k++) {
154             if (((*row)[k] - base) == i) {
155               (*values)[k] = rvalues[j];
156               break;
157             }
158           }
159           /* data structure of sparse matrix has changed */
160           PetscCheck(k != (*colptr)[colidx + 1] - base, PETSC_COMM_SELF, PETSC_ERR_PLIB, "overflow on k %" PetscInt_FMT, k);
161         }
162       }
163     }
164   }
165   PetscFunctionReturn(0);
166 }
167 
168 /*
169   Call clean step of PaStiX if lu->CleanUpPastix == true.
170   Free the CSC matrix.
171  */
172 PetscErrorCode MatDestroy_Pastix(Mat A) {
173   Mat_Pastix *lu = (Mat_Pastix *)A->data;
174 
175   PetscFunctionBegin;
176   if (lu->CleanUpPastix) {
177     /* Terminate instance, deallocate memories */
178     PetscCall(VecScatterDestroy(&lu->scat_rhs));
179     PetscCall(VecDestroy(&lu->b_seq));
180     PetscCall(VecScatterDestroy(&lu->scat_sol));
181 
182     lu->iparm[IPARM_START_TASK] = API_TASK_CLEAN;
183     lu->iparm[IPARM_END_TASK]   = API_TASK_CLEAN;
184 
185     PASTIX_CALL(&(lu->pastix_data), lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
186     PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in destroy: iparm(IPARM_ERROR_NUMBER)=%" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
187     PetscCall(PetscFree(lu->colptr));
188     PetscCall(PetscFree(lu->row));
189     PetscCall(PetscFree(lu->val));
190     PetscCall(PetscFree(lu->perm));
191     PetscCall(PetscFree(lu->invp));
192     PetscCallMPI(MPI_Comm_free(&(lu->pastix_comm)));
193   }
194   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
195   PetscCall(PetscFree(A->data));
196   PetscFunctionReturn(0);
197 }
198 
199 /*
200   Gather right-hand-side.
201   Call for Solve step.
202   Scatter solution.
203  */
204 PetscErrorCode MatSolve_PaStiX(Mat A, Vec b, Vec x) {
205   Mat_Pastix  *lu = (Mat_Pastix *)A->data;
206   PetscScalar *array;
207   Vec          x_seq;
208 
209   PetscFunctionBegin;
210   lu->rhsnbr = 1;
211   x_seq      = lu->b_seq;
212   if (lu->commSize > 1) {
213     /* PaStiX only supports centralized rhs. Scatter b into a sequential rhs vector */
214     PetscCall(VecScatterBegin(lu->scat_rhs, b, x_seq, INSERT_VALUES, SCATTER_FORWARD));
215     PetscCall(VecScatterEnd(lu->scat_rhs, b, x_seq, INSERT_VALUES, SCATTER_FORWARD));
216     PetscCall(VecGetArray(x_seq, &array));
217   } else { /* size == 1 */
218     PetscCall(VecCopy(b, x));
219     PetscCall(VecGetArray(x, &array));
220   }
221   lu->rhs = array;
222   if (lu->commSize == 1) {
223     PetscCall(VecRestoreArray(x, &array));
224   } else {
225     PetscCall(VecRestoreArray(x_seq, &array));
226   }
227 
228   /* solve phase */
229   /*-------------*/
230   lu->iparm[IPARM_START_TASK] = API_TASK_SOLVE;
231   lu->iparm[IPARM_END_TASK]   = API_TASK_REFINE;
232   lu->iparm[IPARM_RHS_MAKING] = API_RHS_B;
233 
234   PASTIX_CALL(&(lu->pastix_data), lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
235   PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in solve phase: lu->iparm[IPARM_ERROR_NUMBER] = %" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
236 
237   if (lu->commSize == 1) {
238     PetscCall(VecRestoreArray(x, &(lu->rhs)));
239   } else {
240     PetscCall(VecRestoreArray(x_seq, &(lu->rhs)));
241   }
242 
243   if (lu->commSize > 1) { /* convert PaStiX centralized solution to petsc mpi x */
244     PetscCall(VecScatterBegin(lu->scat_sol, x_seq, x, INSERT_VALUES, SCATTER_FORWARD));
245     PetscCall(VecScatterEnd(lu->scat_sol, x_seq, x, INSERT_VALUES, SCATTER_FORWARD));
246   }
247   PetscFunctionReturn(0);
248 }
249 
250 /*
251   Numeric factorisation using PaStiX solver.
252 
253  */
254 PetscErrorCode MatFactorNumeric_PaStiX(Mat F, Mat A, const MatFactorInfo *info) {
255   Mat_Pastix *lu = (Mat_Pastix *)(F)->data;
256   Mat        *tseq;
257   PetscInt    icntl;
258   PetscInt    M = A->rmap->N;
259   PetscBool   valOnly, flg, isSym;
260   IS          is_iden;
261   Vec         b;
262   IS          isrow;
263   PetscBool   isSeqAIJ, isSeqSBAIJ, isMPIAIJ;
264 
265   PetscFunctionBegin;
266   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &isSeqAIJ));
267   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATMPIAIJ, &isMPIAIJ));
268   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQSBAIJ, &isSeqSBAIJ));
269   if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
270     (F)->ops->solve = MatSolve_PaStiX;
271 
272     /* Initialize a PASTIX instance */
273     PetscCallMPI(MPI_Comm_dup(PetscObjectComm((PetscObject)A), &(lu->pastix_comm)));
274     PetscCallMPI(MPI_Comm_rank(lu->pastix_comm, &lu->commRank));
275     PetscCallMPI(MPI_Comm_size(lu->pastix_comm, &lu->commSize));
276 
277     /* Set pastix options */
278     lu->iparm[IPARM_MODIFY_PARAMETER] = API_NO;
279     lu->iparm[IPARM_START_TASK]       = API_TASK_INIT;
280     lu->iparm[IPARM_END_TASK]         = API_TASK_INIT;
281 
282     lu->rhsnbr = 1;
283 
284     /* Call to set default pastix options */
285     PASTIX_CALL(&(lu->pastix_data), lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
286     PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in MatFactorNumeric: iparm(IPARM_ERROR_NUMBER)=%" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
287 
288     PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "PaStiX Options", "Mat");
289     icntl                    = -1;
290     lu->iparm[IPARM_VERBOSE] = API_VERBOSE_NOT;
291     PetscCall(PetscOptionsInt("-mat_pastix_verbose", "iparm[IPARM_VERBOSE] : level of printing (0 to 2)", "None", lu->iparm[IPARM_VERBOSE], &icntl, &flg));
292     if ((flg && icntl >= 0) || PetscLogPrintInfo) lu->iparm[IPARM_VERBOSE] = icntl;
293     icntl = -1;
294     PetscCall(PetscOptionsInt("-mat_pastix_threadnbr", "iparm[IPARM_THREAD_NBR] : Number of thread by MPI node", "None", lu->iparm[IPARM_THREAD_NBR], &icntl, &flg));
295     if ((flg && icntl > 0)) lu->iparm[IPARM_THREAD_NBR] = icntl;
296     PetscOptionsEnd();
297     valOnly = PETSC_FALSE;
298   } else {
299     if (isSeqAIJ || isMPIAIJ) {
300       PetscCall(PetscFree(lu->colptr));
301       PetscCall(PetscFree(lu->row));
302       PetscCall(PetscFree(lu->val));
303       valOnly = PETSC_FALSE;
304     } else valOnly = PETSC_TRUE;
305   }
306 
307   lu->iparm[IPARM_MATRIX_VERIFICATION] = API_YES;
308 
309   /* convert mpi A to seq mat A */
310   PetscCall(ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow));
311   PetscCall(MatCreateSubMatrices(A, 1, &isrow, &isrow, MAT_INITIAL_MATRIX, &tseq));
312   PetscCall(ISDestroy(&isrow));
313 
314   PetscCall(MatConvertToCSC(*tseq, valOnly, &lu->n, &lu->colptr, &lu->row, &lu->val));
315   PetscCall(MatIsSymmetric(*tseq, 0.0, &isSym));
316   PetscCall(MatDestroyMatrices(1, &tseq));
317 
318   if (!lu->perm) {
319     PetscCall(PetscMalloc1(lu->n, &(lu->perm)));
320     PetscCall(PetscMalloc1(lu->n, &(lu->invp)));
321   }
322 
323   if (isSym) {
324     /* On symmetric matrix, LLT */
325     lu->iparm[IPARM_SYM]           = API_SYM_YES;
326     lu->iparm[IPARM_FACTORIZATION] = API_FACT_LDLT;
327   } else {
328     /* On unsymmetric matrix, LU */
329     lu->iparm[IPARM_SYM]           = API_SYM_NO;
330     lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
331   }
332 
333   /*----------------*/
334   if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
335     if (!(isSeqAIJ || isSeqSBAIJ) && !lu->b_seq) {
336       /* PaStiX only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
337       PetscCall(VecCreateSeq(PETSC_COMM_SELF, A->cmap->N, &lu->b_seq));
338       PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, &is_iden));
339       PetscCall(MatCreateVecs(A, NULL, &b));
340       PetscCall(VecScatterCreate(b, is_iden, lu->b_seq, is_iden, &lu->scat_rhs));
341       PetscCall(VecScatterCreate(lu->b_seq, is_iden, b, is_iden, &lu->scat_sol));
342       PetscCall(ISDestroy(&is_iden));
343       PetscCall(VecDestroy(&b));
344     }
345     lu->iparm[IPARM_START_TASK] = API_TASK_ORDERING;
346     lu->iparm[IPARM_END_TASK]   = API_TASK_NUMFACT;
347 
348     PASTIX_CALL(&(lu->pastix_data), lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
349     PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
350   } else {
351     lu->iparm[IPARM_START_TASK] = API_TASK_NUMFACT;
352     lu->iparm[IPARM_END_TASK]   = API_TASK_NUMFACT;
353     PASTIX_CALL(&(lu->pastix_data), lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
354     PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
355   }
356 
357   (F)->assembled    = PETSC_TRUE;
358   lu->matstruc      = SAME_NONZERO_PATTERN;
359   lu->CleanUpPastix = PETSC_TRUE;
360   PetscFunctionReturn(0);
361 }
362 
363 /* Note the Petsc r and c permutations are ignored */
364 PetscErrorCode MatLUFactorSymbolic_AIJPASTIX(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info) {
365   Mat_Pastix *lu = (Mat_Pastix *)F->data;
366 
367   PetscFunctionBegin;
368   lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
369   lu->iparm[IPARM_SYM]           = API_SYM_YES;
370   lu->matstruc                   = DIFFERENT_NONZERO_PATTERN;
371   F->ops->lufactornumeric        = MatFactorNumeric_PaStiX;
372   PetscFunctionReturn(0);
373 }
374 
375 PetscErrorCode MatCholeskyFactorSymbolic_SBAIJPASTIX(Mat F, Mat A, IS r, const MatFactorInfo *info) {
376   Mat_Pastix *lu = (Mat_Pastix *)(F)->data;
377 
378   PetscFunctionBegin;
379   lu->iparm[IPARM_FACTORIZATION]  = API_FACT_LLT;
380   lu->iparm[IPARM_SYM]            = API_SYM_NO;
381   lu->matstruc                    = DIFFERENT_NONZERO_PATTERN;
382   (F)->ops->choleskyfactornumeric = MatFactorNumeric_PaStiX;
383   PetscFunctionReturn(0);
384 }
385 
386 PetscErrorCode MatView_PaStiX(Mat A, PetscViewer viewer) {
387   PetscBool         iascii;
388   PetscViewerFormat format;
389 
390   PetscFunctionBegin;
391   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
392   if (iascii) {
393     PetscCall(PetscViewerGetFormat(viewer, &format));
394     if (format == PETSC_VIEWER_ASCII_INFO) {
395       Mat_Pastix *lu = (Mat_Pastix *)A->data;
396 
397       PetscCall(PetscViewerASCIIPrintf(viewer, "PaStiX run parameters:\n"));
398       PetscCall(PetscViewerASCIIPrintf(viewer, "  Matrix type :                      %s \n", ((lu->iparm[IPARM_SYM] == API_SYM_YES) ? "Symmetric" : "Unsymmetric")));
399       PetscCall(PetscViewerASCIIPrintf(viewer, "  Level of printing (0,1,2):         %" PetscInt_FMT " \n", lu->iparm[IPARM_VERBOSE]));
400       PetscCall(PetscViewerASCIIPrintf(viewer, "  Number of refinements iterations : %" PetscInt_FMT " \n", lu->iparm[IPARM_NBITER]));
401       PetscCall(PetscPrintf(PETSC_COMM_SELF, "  Error :                        %g \n", lu->dparm[DPARM_RELATIVE_ERROR]));
402     }
403   }
404   PetscFunctionReturn(0);
405 }
406 
407 /*MC
408      MATSOLVERPASTIX  - A solver package providing direct solvers (LU) for distributed
409   and sequential matrices via the external package PaStiX.
410 
411   Use ./configure --download-pastix --download-ptscotch  to have PETSc installed with PasTiX
412 
413   Use -pc_type lu -pc_factor_mat_solver_type pastix to use this direct solver
414 
415   Options Database Keys:
416 + -mat_pastix_verbose   <0,1,2>   - print level
417 - -mat_pastix_threadnbr <integer> - Set the thread number by MPI task.
418 
419   Notes:
420     This only works for matrices with symmetric nonzero structure, if you pass it a matrix with
421    nonsymmetric structure PasTiX and hence PETSc return with an error.
422 
423   Level: beginner
424 
425 .seealso: `PCFactorSetMatSolverType()`, `MatSolverType`, `MatGetFactor()`
426 M*/
427 
428 PetscErrorCode MatGetInfo_PaStiX(Mat A, MatInfoType flag, MatInfo *info) {
429   Mat_Pastix *lu = (Mat_Pastix *)A->data;
430 
431   PetscFunctionBegin;
432   info->block_size        = 1.0;
433   info->nz_allocated      = lu->iparm[IPARM_NNZEROS];
434   info->nz_used           = lu->iparm[IPARM_NNZEROS];
435   info->nz_unneeded       = 0.0;
436   info->assemblies        = 0.0;
437   info->mallocs           = 0.0;
438   info->memory            = 0.0;
439   info->fill_ratio_given  = 0;
440   info->fill_ratio_needed = 0;
441   info->factor_mallocs    = 0;
442   PetscFunctionReturn(0);
443 }
444 
445 static PetscErrorCode MatFactorGetSolverType_pastix(Mat A, MatSolverType *type) {
446   PetscFunctionBegin;
447   *type = MATSOLVERPASTIX;
448   PetscFunctionReturn(0);
449 }
450 
451 /*
452     The seq and mpi versions of this function are the same
453 */
454 static PetscErrorCode MatGetFactor_seqaij_pastix(Mat A, MatFactorType ftype, Mat *F) {
455   Mat         B;
456   Mat_Pastix *pastix;
457 
458   PetscFunctionBegin;
459   PetscCheck(ftype == MAT_FACTOR_LU, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
460   /* Create the factorization matrix */
461   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
462   PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
463   PetscCall(PetscStrallocpy("pastix", &((PetscObject)B)->type_name));
464   PetscCall(MatSetUp(B));
465 
466   B->trivialsymbolic       = PETSC_TRUE;
467   B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
468   B->ops->view             = MatView_PaStiX;
469   B->ops->getinfo          = MatGetInfo_PaStiX;
470 
471   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_pastix));
472 
473   B->factortype = MAT_FACTOR_LU;
474 
475   /* set solvertype */
476   PetscCall(PetscFree(B->solvertype));
477   PetscCall(PetscStrallocpy(MATSOLVERPASTIX, &B->solvertype));
478 
479   PetscCall(PetscNewLog(B, &pastix));
480 
481   pastix->CleanUpPastix = PETSC_FALSE;
482   pastix->scat_rhs      = NULL;
483   pastix->scat_sol      = NULL;
484   B->ops->getinfo       = MatGetInfo_External;
485   B->ops->destroy       = MatDestroy_Pastix;
486   B->data               = (void *)pastix;
487 
488   *F = B;
489   PetscFunctionReturn(0);
490 }
491 
492 static PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A, MatFactorType ftype, Mat *F) {
493   Mat         B;
494   Mat_Pastix *pastix;
495 
496   PetscFunctionBegin;
497   PetscCheck(ftype == MAT_FACTOR_LU, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
498   /* Create the factorization matrix */
499   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
500   PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
501   PetscCall(PetscStrallocpy("pastix", &((PetscObject)B)->type_name));
502   PetscCall(MatSetUp(B));
503 
504   B->trivialsymbolic       = PETSC_TRUE;
505   B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
506   B->ops->view             = MatView_PaStiX;
507   B->ops->getinfo          = MatGetInfo_PaStiX;
508   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_pastix));
509 
510   B->factortype = MAT_FACTOR_LU;
511 
512   /* set solvertype */
513   PetscCall(PetscFree(B->solvertype));
514   PetscCall(PetscStrallocpy(MATSOLVERPASTIX, &B->solvertype));
515 
516   PetscCall(PetscNewLog(B, &pastix));
517 
518   pastix->CleanUpPastix = PETSC_FALSE;
519   pastix->scat_rhs      = NULL;
520   pastix->scat_sol      = NULL;
521   B->ops->getinfo       = MatGetInfo_External;
522   B->ops->destroy       = MatDestroy_Pastix;
523   B->data               = (void *)pastix;
524 
525   *F = B;
526   PetscFunctionReturn(0);
527 }
528 
529 static PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A, MatFactorType ftype, Mat *F) {
530   Mat         B;
531   Mat_Pastix *pastix;
532 
533   PetscFunctionBegin;
534   PetscCheck(ftype == MAT_FACTOR_CHOLESKY, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
535   /* Create the factorization matrix */
536   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
537   PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
538   PetscCall(PetscStrallocpy("pastix", &((PetscObject)B)->type_name));
539   PetscCall(MatSetUp(B));
540 
541   B->trivialsymbolic             = PETSC_TRUE;
542   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
543   B->ops->view                   = MatView_PaStiX;
544   B->ops->getinfo                = MatGetInfo_PaStiX;
545   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_pastix));
546 
547   B->factortype = MAT_FACTOR_CHOLESKY;
548 
549   /* set solvertype */
550   PetscCall(PetscFree(B->solvertype));
551   PetscCall(PetscStrallocpy(MATSOLVERPASTIX, &B->solvertype));
552 
553   PetscCall(PetscNewLog(B, &pastix));
554 
555   pastix->CleanUpPastix = PETSC_FALSE;
556   pastix->scat_rhs      = NULL;
557   pastix->scat_sol      = NULL;
558   B->ops->getinfo       = MatGetInfo_External;
559   B->ops->destroy       = MatDestroy_Pastix;
560   B->data               = (void *)pastix;
561   *F                    = B;
562   PetscFunctionReturn(0);
563 }
564 
565 static PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A, MatFactorType ftype, Mat *F) {
566   Mat         B;
567   Mat_Pastix *pastix;
568 
569   PetscFunctionBegin;
570   PetscCheck(ftype == MAT_FACTOR_CHOLESKY, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
571 
572   /* Create the factorization matrix */
573   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
574   PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
575   PetscCall(PetscStrallocpy("pastix", &((PetscObject)B)->type_name));
576   PetscCall(MatSetUp(B));
577 
578   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
579   B->ops->view                   = MatView_PaStiX;
580   B->ops->getinfo                = MatGetInfo_PaStiX;
581   B->ops->destroy                = MatDestroy_Pastix;
582   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_pastix));
583 
584   B->factortype = MAT_FACTOR_CHOLESKY;
585 
586   /* set solvertype */
587   PetscCall(PetscFree(B->solvertype));
588   PetscCall(PetscStrallocpy(MATSOLVERPASTIX, &B->solvertype));
589 
590   PetscCall(PetscNewLog(B, &pastix));
591 
592   pastix->CleanUpPastix = PETSC_FALSE;
593   pastix->scat_rhs      = NULL;
594   pastix->scat_sol      = NULL;
595   B->data               = (void *)pastix;
596 
597   *F = B;
598   PetscFunctionReturn(0);
599 }
600 
601 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Pastix(void) {
602   PetscFunctionBegin;
603   PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_mpiaij_pastix));
604   PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_seqaij_pastix));
605   PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATMPISBAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_mpisbaij_pastix));
606   PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATSEQSBAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_seqsbaij_pastix));
607   PetscFunctionReturn(0);
608 }
609