xref: /petsc/src/mat/impls/aij/mpi/pastix/pastix.c (revision cf1aed2ce99d23e50336629af3ca8cf096900abb)
1 /*
2  Provides an interface to the PaStiX sparse solver
3  */
4 #include <../src/mat/impls/aij/seq/aij.h>
5 #include <../src/mat/impls/aij/mpi/mpiaij.h>
6 #include <../src/mat/impls/sbaij/seq/sbaij.h>
7 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
8 
9 #if defined(PETSC_USE_COMPLEX) && defined(__cplusplus)
10 #define _H_COMPLEX
11 #endif
12 
13 EXTERN_C_BEGIN
14 #include <pastix.h>
15 EXTERN_C_END
16 
17 #if defined(PETSC_USE_COMPLEX)
18 #if defined(PETSC_USE_REAL_SINGLE)
19 #define PASTIX_CALL c_pastix
20 #define PASTIX_CHECKMATRIX c_pastix_checkMatrix
21 #define PastixScalar COMPLEX
22 #else
23 #define PASTIX_CALL z_pastix
24 #define PASTIX_CHECKMATRIX z_pastix_checkMatrix
25 #define PastixScalar DCOMPLEX
26 #endif
27 
28 #else /* PETSC_USE_COMPLEX */
29 
30 #if defined(PETSC_USE_REAL_SINGLE)
31 #define PASTIX_CALL s_pastix
32 #define PASTIX_CHECKMATRIX s_pastix_checkMatrix
33 #define PastixScalar float
34 #else
35 #define PASTIX_CALL d_pastix
36 #define PASTIX_CHECKMATRIX d_pastix_checkMatrix
37 #define PastixScalar double
38 #endif
39 
40 #endif /* PETSC_USE_COMPLEX */
41 
42 typedef struct Mat_Pastix_ {
43   pastix_data_t *pastix_data;    /* Pastix data storage structure                        */
44   MatStructure  matstruc;
45   PetscInt      n;               /* Number of columns in the matrix                      */
46   PetscInt      *colptr;         /* Index of first element of each column in row and val */
47   PetscInt      *row;            /* Row of each element of the matrix                    */
48   PetscScalar   *val;            /* Value of each element of the matrix                  */
49   PetscInt      *perm;           /* Permutation tabular                                  */
50   PetscInt      *invp;           /* Reverse permutation tabular                          */
51   PetscScalar   *rhs;            /* Rhight-hand-side member                              */
52   PetscInt      rhsnbr;          /* Rhight-hand-side number (must be 1)                  */
53   PetscInt      iparm[64];       /* Integer parameters                                   */
54   double        dparm[64];       /* Floating point parameters                            */
55   MPI_Comm      pastix_comm;     /* PaStiX MPI communicator                              */
56   PetscMPIInt   commRank;        /* MPI rank                                             */
57   PetscMPIInt   commSize;        /* MPI communicator size                                */
58   PetscBool     CleanUpPastix;   /* Boolean indicating if we call PaStiX clean step      */
59   VecScatter    scat_rhs;
60   VecScatter    scat_sol;
61   Vec           b_seq;
62   PetscBool     isAIJ;
63   PetscErrorCode (*Destroy)(Mat);
64 } Mat_Pastix;
65 
66 extern PetscErrorCode MatDuplicate_Pastix(Mat,MatDuplicateOption,Mat*);
67 
68 #undef __FUNCT__
69 #define __FUNCT__ "MatConvertToCSC"
70 /*
71    convert Petsc seqaij matrix to CSC: colptr[n], row[nz], val[nz]
72 
73   input:
74     A       - matrix in seqaij or mpisbaij (bs=1) format
75     valOnly - FALSE: spaces are allocated and values are set for the CSC
76               TRUE:  Only fill values
77   output:
78     n       - Size of the matrix
79     colptr  - Index of first element of each column in row and val
80     row     - Row of each element of the matrix
81     values  - Value of each element of the matrix
82  */
83 PetscErrorCode MatConvertToCSC(Mat A,PetscBool valOnly,PetscInt *n,PetscInt **colptr,PetscInt **row,PetscScalar **values)
84 {
85   Mat_SeqAIJ     *aa      = (Mat_SeqAIJ*)A->data;
86   PetscInt       *rowptr  = aa->i;
87   PetscInt       *col     = aa->j;
88   PetscScalar    *rvalues = aa->a;
89   PetscInt       m        = A->rmap->N;
90   PetscInt       nnz;
91   PetscInt       i,j, k;
92   PetscInt       base = 1;
93   PetscInt       idx;
94   PetscErrorCode ierr;
95   PetscInt       colidx;
96   PetscInt       *colcount;
97   PetscBool      isSBAIJ;
98   PetscBool      isSeqSBAIJ;
99   PetscBool      isMpiSBAIJ;
100   PetscBool      isSym;
101   PetscBool      flg;
102   PetscInt       icntl;
103   PetscInt       verb;
104   PetscInt       check;
105 
106   PetscFunctionBegin;
107   ierr = MatIsSymmetric(A,0.0,&isSym);CHKERRQ(ierr);
108   ierr = PetscObjectTypeCompare((PetscObject)A,MATSBAIJ,&isSBAIJ);CHKERRQ(ierr);
109   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr);
110   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPISBAIJ,&isMpiSBAIJ);CHKERRQ(ierr);
111 
112   *n = A->cmap->N;
113 
114   /* PaStiX only needs triangular matrix if matrix is symmetric
115    */
116   if (isSym && !(isSBAIJ || isSeqSBAIJ || isMpiSBAIJ)) nnz = (aa->nz - *n)/2 + *n;
117   else nnz = aa->nz;
118 
119   if (!valOnly) {
120     ierr = PetscMalloc(((*n)+1) *sizeof(PetscInt)   ,colptr);CHKERRQ(ierr);
121     ierr = PetscMalloc(nnz      *sizeof(PetscInt)   ,row);CHKERRQ(ierr);
122     ierr = PetscMalloc1(nnz      ,values);CHKERRQ(ierr);
123 
124     if (isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) {
125       ierr = PetscMemcpy (*colptr, rowptr, ((*n)+1)*sizeof(PetscInt));CHKERRQ(ierr);
126       for (i = 0; i < *n+1; i++) (*colptr)[i] += base;
127       ierr = PetscMemcpy (*row, col, (nnz)*sizeof(PetscInt));CHKERRQ(ierr);
128       for (i = 0; i < nnz; i++) (*row)[i] += base;
129       ierr = PetscMemcpy (*values, rvalues, (nnz)*sizeof(PetscScalar));CHKERRQ(ierr);
130     } else {
131       ierr = PetscMalloc1((*n),&colcount);CHKERRQ(ierr);
132 
133       for (i = 0; i < m; i++) colcount[i] = 0;
134       /* Fill-in colptr */
135       for (i = 0; i < m; i++) {
136         for (j = rowptr[i]; j < rowptr[i+1]; j++) {
137           if (!isSym || col[j] <= i)  colcount[col[j]]++;
138         }
139       }
140 
141       (*colptr)[0] = base;
142       for (j = 0; j < *n; j++) {
143         (*colptr)[j+1] = (*colptr)[j] + colcount[j];
144         /* in next loop we fill starting from (*colptr)[colidx] - base */
145         colcount[j] = -base;
146       }
147 
148       /* Fill-in rows and values */
149       for (i = 0; i < m; i++) {
150         for (j = rowptr[i]; j < rowptr[i+1]; j++) {
151           if (!isSym || col[j] <= i) {
152             colidx         = col[j];
153             idx            = (*colptr)[colidx] + colcount[colidx];
154             (*row)[idx]    = i + base;
155             (*values)[idx] = rvalues[j];
156             colcount[colidx]++;
157           }
158         }
159       }
160       ierr = PetscFree(colcount);CHKERRQ(ierr);
161     }
162   } else {
163     /* Fill-in only values */
164     for (i = 0; i < m; i++) {
165       for (j = rowptr[i]; j < rowptr[i+1]; j++) {
166         colidx = col[j];
167         if ((isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) ||!isSym || col[j] <= i) {
168           /* look for the value to fill */
169           for (k = (*colptr)[colidx] - base; k < (*colptr)[colidx + 1] - base; k++) {
170             if (((*row)[k]-base) == i) {
171               (*values)[k] = rvalues[j];
172               break;
173             }
174           }
175           /* data structure of sparse matrix has changed */
176           if (k == (*colptr)[colidx + 1] - base) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"overflow on k %D",k);
177         }
178       }
179     }
180   }
181 
182   icntl =-1;
183   check = 0;
184   ierr  = PetscOptionsGetInt(((PetscObject) A)->prefix, "-mat_pastix_check", &icntl, &flg);CHKERRQ(ierr);
185   if ((flg && icntl >= 0) || PetscLogPrintInfo) check =  icntl;
186 
187   if (check == 1) {
188     PetscScalar *tmpvalues;
189     PetscInt    *tmprows,*tmpcolptr;
190     tmpvalues = (PetscScalar*)malloc(nnz*sizeof(PetscScalar));    if (!tmpvalues) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
191     tmprows   = (PetscInt*)   malloc(nnz*sizeof(PetscInt));       if (!tmprows)   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
192     tmpcolptr = (PetscInt*)   malloc((*n+1)*sizeof(PetscInt));    if (!tmpcolptr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
193 
194     ierr = PetscMemcpy(tmpcolptr,*colptr,(*n+1)*sizeof(PetscInt));CHKERRQ(ierr);
195     ierr = PetscMemcpy(tmprows,*row,nnz*sizeof(PetscInt));CHKERRQ(ierr);
196     ierr = PetscMemcpy(tmpvalues,*values,nnz*sizeof(PetscScalar));CHKERRQ(ierr);
197     ierr = PetscFree(*row);CHKERRQ(ierr);
198     ierr = PetscFree(*values);CHKERRQ(ierr);
199 
200     icntl=-1;
201     verb = API_VERBOSE_NOT;
202     /* "iparm[IPARM_VERBOSE] : level of printing (0 to 2)" */
203     ierr = PetscOptionsGetInt(((PetscObject) A)->prefix, "-mat_pastix_verbose", &icntl, &flg);CHKERRQ(ierr);
204     if ((flg && icntl >= 0) || PetscLogPrintInfo) verb =  icntl;
205     PASTIX_CHECKMATRIX(MPI_COMM_WORLD,verb,((isSym != 0) ? API_SYM_YES : API_SYM_NO),API_YES,*n,&tmpcolptr,&tmprows,(PastixScalar**)&tmpvalues,NULL,1);
206 
207     ierr = PetscMemcpy(*colptr,tmpcolptr,(*n+1)*sizeof(PetscInt));CHKERRQ(ierr);
208     ierr = PetscMalloc1(((*colptr)[*n]-1),row);CHKERRQ(ierr);
209     ierr = PetscMemcpy(*row,tmprows,((*colptr)[*n]-1)*sizeof(PetscInt));CHKERRQ(ierr);
210     ierr = PetscMalloc1(((*colptr)[*n]-1),values);CHKERRQ(ierr);
211     ierr = PetscMemcpy(*values,tmpvalues,((*colptr)[*n]-1)*sizeof(PetscScalar));CHKERRQ(ierr);
212     free(tmpvalues);
213     free(tmprows);
214     free(tmpcolptr);
215 
216   }
217   PetscFunctionReturn(0);
218 }
219 
220 
221 
222 #undef __FUNCT__
223 #define __FUNCT__ "MatDestroy_Pastix"
224 /*
225   Call clean step of PaStiX if lu->CleanUpPastix == true.
226   Free the CSC matrix.
227  */
228 PetscErrorCode MatDestroy_Pastix(Mat A)
229 {
230   Mat_Pastix     *lu=(Mat_Pastix*)A->spptr;
231   PetscErrorCode ierr;
232   PetscMPIInt    size=lu->commSize;
233 
234   PetscFunctionBegin;
235   if (lu && lu->CleanUpPastix) {
236     /* Terminate instance, deallocate memories */
237     if (size > 1) {
238       ierr = VecScatterDestroy(&lu->scat_rhs);CHKERRQ(ierr);
239       ierr = VecDestroy(&lu->b_seq);CHKERRQ(ierr);
240       ierr = VecScatterDestroy(&lu->scat_sol);CHKERRQ(ierr);
241     }
242 
243     lu->iparm[IPARM_START_TASK]=API_TASK_CLEAN;
244     lu->iparm[IPARM_END_TASK]  =API_TASK_CLEAN;
245 
246     PASTIX_CALL(&(lu->pastix_data),
247                 lu->pastix_comm,
248                 lu->n,
249                 lu->colptr,
250                 lu->row,
251                 (PastixScalar*)lu->val,
252                 lu->perm,
253                 lu->invp,
254                 (PastixScalar*)lu->rhs,
255                 lu->rhsnbr,
256                 lu->iparm,
257                 lu->dparm);
258 
259     ierr = PetscFree(lu->colptr);CHKERRQ(ierr);
260     ierr = PetscFree(lu->row);CHKERRQ(ierr);
261     ierr = PetscFree(lu->val);CHKERRQ(ierr);
262     ierr = PetscFree(lu->perm);CHKERRQ(ierr);
263     ierr = PetscFree(lu->invp);CHKERRQ(ierr);
264     ierr = MPI_Comm_free(&(lu->pastix_comm));CHKERRQ(ierr);
265   }
266   if (lu && lu->Destroy) {
267     ierr = (lu->Destroy)(A);CHKERRQ(ierr);
268   }
269   ierr = PetscFree(A->spptr);CHKERRQ(ierr);
270   PetscFunctionReturn(0);
271 }
272 
273 #undef __FUNCT__
274 #define __FUNCT__ "MatSolve_PaStiX"
275 /*
276   Gather right-hand-side.
277   Call for Solve step.
278   Scatter solution.
279  */
280 PetscErrorCode MatSolve_PaStiX(Mat A,Vec b,Vec x)
281 {
282   Mat_Pastix     *lu=(Mat_Pastix*)A->spptr;
283   PetscScalar    *array;
284   Vec            x_seq;
285   PetscErrorCode ierr;
286 
287   PetscFunctionBegin;
288   lu->rhsnbr = 1;
289   x_seq      = lu->b_seq;
290   if (lu->commSize > 1) {
291     /* PaStiX only supports centralized rhs. Scatter b into a seqential rhs vector */
292     ierr = VecScatterBegin(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
293     ierr = VecScatterEnd(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
294     ierr = VecGetArray(x_seq,&array);CHKERRQ(ierr);
295   } else {  /* size == 1 */
296     ierr = VecCopy(b,x);CHKERRQ(ierr);
297     ierr = VecGetArray(x,&array);CHKERRQ(ierr);
298   }
299   lu->rhs = array;
300   if (lu->commSize == 1) {
301     ierr = VecRestoreArray(x,&array);CHKERRQ(ierr);
302   } else {
303     ierr = VecRestoreArray(x_seq,&array);CHKERRQ(ierr);
304   }
305 
306   /* solve phase */
307   /*-------------*/
308   lu->iparm[IPARM_START_TASK] = API_TASK_SOLVE;
309   lu->iparm[IPARM_END_TASK]   = API_TASK_REFINE;
310   lu->iparm[IPARM_RHS_MAKING] = API_RHS_B;
311 
312   PASTIX_CALL(&(lu->pastix_data),
313               lu->pastix_comm,
314               lu->n,
315               lu->colptr,
316               lu->row,
317               (PastixScalar*)lu->val,
318               lu->perm,
319               lu->invp,
320               (PastixScalar*)lu->rhs,
321               lu->rhsnbr,
322               lu->iparm,
323               lu->dparm);
324 
325   if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in solve phase: lu->iparm[IPARM_ERROR_NUMBER] = %d\n",lu->iparm[IPARM_ERROR_NUMBER]);
326 
327   if (lu->commSize == 1) {
328     ierr = VecRestoreArray(x,&(lu->rhs));CHKERRQ(ierr);
329   } else {
330     ierr = VecRestoreArray(x_seq,&(lu->rhs));CHKERRQ(ierr);
331   }
332 
333   if (lu->commSize > 1) { /* convert PaStiX centralized solution to petsc mpi x */
334     ierr = VecScatterBegin(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
335     ierr = VecScatterEnd(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
336   }
337   PetscFunctionReturn(0);
338 }
339 
340 /*
341   Numeric factorisation using PaStiX solver.
342 
343  */
344 #undef __FUNCT__
345 #define __FUNCT__ "MatFactorNumeric_PaStiX"
346 PetscErrorCode MatFactorNumeric_PaStiX(Mat F,Mat A,const MatFactorInfo *info)
347 {
348   Mat_Pastix     *lu =(Mat_Pastix*)(F)->spptr;
349   Mat            *tseq;
350   PetscErrorCode ierr = 0;
351   PetscInt       icntl;
352   PetscInt       M=A->rmap->N;
353   PetscBool      valOnly,flg, isSym;
354   Mat            F_diag;
355   IS             is_iden;
356   Vec            b;
357   IS             isrow;
358   PetscBool      isSeqAIJ,isSeqSBAIJ,isMPIAIJ;
359 
360   PetscFunctionBegin;
361   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);CHKERRQ(ierr);
362   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);CHKERRQ(ierr);
363   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);CHKERRQ(ierr);
364   if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
365     (F)->ops->solve = MatSolve_PaStiX;
366 
367     /* Initialize a PASTIX instance */
368     ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->pastix_comm));CHKERRQ(ierr);
369     ierr = MPI_Comm_rank(lu->pastix_comm, &lu->commRank);CHKERRQ(ierr);
370     ierr = MPI_Comm_size(lu->pastix_comm, &lu->commSize);CHKERRQ(ierr);
371 
372     /* Set pastix options */
373     lu->iparm[IPARM_MODIFY_PARAMETER] = API_NO;
374     lu->iparm[IPARM_START_TASK]       = API_TASK_INIT;
375     lu->iparm[IPARM_END_TASK]         = API_TASK_INIT;
376 
377     lu->rhsnbr = 1;
378 
379     /* Call to set default pastix options */
380     PASTIX_CALL(&(lu->pastix_data),
381                 lu->pastix_comm,
382                 lu->n,
383                 lu->colptr,
384                 lu->row,
385                 (PastixScalar*)lu->val,
386                 lu->perm,
387                 lu->invp,
388                 (PastixScalar*)lu->rhs,
389                 lu->rhsnbr,
390                 lu->iparm,
391                 lu->dparm);
392 
393     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"PaStiX Options","Mat");CHKERRQ(ierr);
394 
395     icntl = -1;
396 
397     lu->iparm[IPARM_VERBOSE] = API_VERBOSE_NOT;
398 
399     ierr = PetscOptionsInt("-mat_pastix_verbose","iparm[IPARM_VERBOSE] : level of printing (0 to 2)","None",lu->iparm[IPARM_VERBOSE],&icntl,&flg);CHKERRQ(ierr);
400     if ((flg && icntl >= 0) || PetscLogPrintInfo) {
401       lu->iparm[IPARM_VERBOSE] =  icntl;
402     }
403     icntl=-1;
404     ierr = PetscOptionsInt("-mat_pastix_threadnbr","iparm[IPARM_THREAD_NBR] : Number of thread by MPI node","None",lu->iparm[IPARM_THREAD_NBR],&icntl,&flg);CHKERRQ(ierr);
405     if ((flg && icntl > 0)) {
406       lu->iparm[IPARM_THREAD_NBR] = icntl;
407     }
408     PetscOptionsEnd();
409     valOnly = PETSC_FALSE;
410   } else {
411     if (isSeqAIJ || isMPIAIJ) {
412       ierr    = PetscFree(lu->colptr);CHKERRQ(ierr);
413       ierr    = PetscFree(lu->row);CHKERRQ(ierr);
414       ierr    = PetscFree(lu->val);CHKERRQ(ierr);
415       valOnly = PETSC_FALSE;
416     } else valOnly = PETSC_TRUE;
417   }
418 
419   lu->iparm[IPARM_MATRIX_VERIFICATION] = API_YES;
420 
421   /* convert mpi A to seq mat A */
422   ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr);
423   ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr);
424   ierr = ISDestroy(&isrow);CHKERRQ(ierr);
425 
426   ierr = MatConvertToCSC(*tseq,valOnly, &lu->n, &lu->colptr, &lu->row, &lu->val);CHKERRQ(ierr);
427   ierr = MatIsSymmetric(*tseq,0.0,&isSym);CHKERRQ(ierr);
428   ierr = MatDestroyMatrices(1,&tseq);CHKERRQ(ierr);
429 
430   if (!lu->perm) {
431     ierr = PetscMalloc1((lu->n),&(lu->perm));CHKERRQ(ierr);
432     ierr = PetscMalloc1((lu->n),&(lu->invp));CHKERRQ(ierr);
433   }
434 
435   if (isSym) {
436     /* On symmetric matrix, LLT */
437     lu->iparm[IPARM_SYM]           = API_SYM_YES;
438     lu->iparm[IPARM_FACTORIZATION] = API_FACT_LDLT;
439   } else {
440     /* On unsymmetric matrix, LU */
441     lu->iparm[IPARM_SYM]           = API_SYM_NO;
442     lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
443   }
444 
445   /*----------------*/
446   if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
447     if (!(isSeqAIJ || isSeqSBAIJ)) {
448       /* PaStiX only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
449       ierr = VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);CHKERRQ(ierr);
450       ierr = ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);CHKERRQ(ierr);
451       ierr = MatGetVecs(A,NULL,&b);CHKERRQ(ierr);
452       ierr = VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);CHKERRQ(ierr);
453       ierr = VecScatterCreate(lu->b_seq,is_iden,b,is_iden,&lu->scat_sol);CHKERRQ(ierr);
454       ierr = ISDestroy(&is_iden);CHKERRQ(ierr);
455       ierr = VecDestroy(&b);CHKERRQ(ierr);
456     }
457     lu->iparm[IPARM_START_TASK] = API_TASK_ORDERING;
458     lu->iparm[IPARM_END_TASK]   = API_TASK_NUMFACT;
459 
460     PASTIX_CALL(&(lu->pastix_data),
461                 lu->pastix_comm,
462                 lu->n,
463                 lu->colptr,
464                 lu->row,
465                 (PastixScalar*)lu->val,
466                 lu->perm,
467                 lu->invp,
468                 (PastixScalar*)lu->rhs,
469                 lu->rhsnbr,
470                 lu->iparm,
471                 lu->dparm);
472     if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
473   } else {
474     lu->iparm[IPARM_START_TASK] = API_TASK_NUMFACT;
475     lu->iparm[IPARM_END_TASK]   = API_TASK_NUMFACT;
476     PASTIX_CALL(&(lu->pastix_data),
477                 lu->pastix_comm,
478                 lu->n,
479                 lu->colptr,
480                 lu->row,
481                 (PastixScalar*)lu->val,
482                 lu->perm,
483                 lu->invp,
484                 (PastixScalar*)lu->rhs,
485                 lu->rhsnbr,
486                 lu->iparm,
487                 lu->dparm);
488 
489     if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
490   }
491 
492   if (lu->commSize > 1) {
493     if ((F)->factortype == MAT_FACTOR_LU) {
494       F_diag = ((Mat_MPIAIJ*)(F)->data)->A;
495     } else {
496       F_diag = ((Mat_MPISBAIJ*)(F)->data)->A;
497     }
498     F_diag->assembled = PETSC_TRUE;
499   }
500   (F)->assembled    = PETSC_TRUE;
501   lu->matstruc      = SAME_NONZERO_PATTERN;
502   lu->CleanUpPastix = PETSC_TRUE;
503   PetscFunctionReturn(0);
504 }
505 
506 /* Note the Petsc r and c permutations are ignored */
507 #undef __FUNCT__
508 #define __FUNCT__ "MatLUFactorSymbolic_AIJPASTIX"
509 PetscErrorCode MatLUFactorSymbolic_AIJPASTIX(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
510 {
511   Mat_Pastix *lu = (Mat_Pastix*)F->spptr;
512 
513   PetscFunctionBegin;
514   lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
515   lu->iparm[IPARM_SYM]           = API_SYM_YES;
516   lu->matstruc                   = DIFFERENT_NONZERO_PATTERN;
517   F->ops->lufactornumeric        = MatFactorNumeric_PaStiX;
518   PetscFunctionReturn(0);
519 }
520 
521 
522 /* Note the Petsc r permutation is ignored */
523 #undef __FUNCT__
524 #define __FUNCT__ "MatCholeskyFactorSymbolic_SBAIJPASTIX"
525 PetscErrorCode MatCholeskyFactorSymbolic_SBAIJPASTIX(Mat F,Mat A,IS r,const MatFactorInfo *info)
526 {
527   Mat_Pastix *lu = (Mat_Pastix*)(F)->spptr;
528 
529   PetscFunctionBegin;
530   lu->iparm[IPARM_FACTORIZATION]  = API_FACT_LLT;
531   lu->iparm[IPARM_SYM]            = API_SYM_NO;
532   lu->matstruc                    = DIFFERENT_NONZERO_PATTERN;
533   (F)->ops->choleskyfactornumeric = MatFactorNumeric_PaStiX;
534   PetscFunctionReturn(0);
535 }
536 
537 #undef __FUNCT__
538 #define __FUNCT__ "MatView_PaStiX"
539 PetscErrorCode MatView_PaStiX(Mat A,PetscViewer viewer)
540 {
541   PetscErrorCode    ierr;
542   PetscBool         iascii;
543   PetscViewerFormat format;
544 
545   PetscFunctionBegin;
546   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
547   if (iascii) {
548     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
549     if (format == PETSC_VIEWER_ASCII_INFO) {
550       Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
551 
552       ierr = PetscViewerASCIIPrintf(viewer,"PaStiX run parameters:\n");CHKERRQ(ierr);
553       ierr = PetscViewerASCIIPrintf(viewer,"  Matrix type :                      %s \n",((lu->iparm[IPARM_SYM] == API_SYM_YES) ? "Symmetric" : "Unsymmetric"));CHKERRQ(ierr);
554       ierr = PetscViewerASCIIPrintf(viewer,"  Level of printing (0,1,2):         %d \n",lu->iparm[IPARM_VERBOSE]);CHKERRQ(ierr);
555       ierr = PetscViewerASCIIPrintf(viewer,"  Number of refinements iterations : %d \n",lu->iparm[IPARM_NBITER]);CHKERRQ(ierr);
556       ierr = PetscPrintf(PETSC_COMM_SELF,"  Error :                        %g \n",lu->dparm[DPARM_RELATIVE_ERROR]);CHKERRQ(ierr);
557     }
558   }
559   PetscFunctionReturn(0);
560 }
561 
562 
563 /*MC
564      MATSOLVERPASTIX  - A solver package providing direct solvers (LU) for distributed
565   and sequential matrices via the external package PaStiX.
566 
567   Use ./configure --download-pastix to have PETSc installed with PaStiX
568 
569   Options Database Keys:
570 + -mat_pastix_verbose   <0,1,2>   - print level
571 - -mat_pastix_threadnbr <integer> - Set the thread number by MPI task.
572 
573   Level: beginner
574 
575 .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
576 
577 M*/
578 
579 
580 #undef __FUNCT__
581 #define __FUNCT__ "MatGetInfo_PaStiX"
582 PetscErrorCode MatGetInfo_PaStiX(Mat A,MatInfoType flag,MatInfo *info)
583 {
584   Mat_Pastix *lu =(Mat_Pastix*)A->spptr;
585 
586   PetscFunctionBegin;
587   info->block_size        = 1.0;
588   info->nz_allocated      = lu->iparm[IPARM_NNZEROS];
589   info->nz_used           = lu->iparm[IPARM_NNZEROS];
590   info->nz_unneeded       = 0.0;
591   info->assemblies        = 0.0;
592   info->mallocs           = 0.0;
593   info->memory            = 0.0;
594   info->fill_ratio_given  = 0;
595   info->fill_ratio_needed = 0;
596   info->factor_mallocs    = 0;
597   PetscFunctionReturn(0);
598 }
599 
600 #undef __FUNCT__
601 #define __FUNCT__ "MatFactorGetSolverPackage_pastix"
602 PetscErrorCode MatFactorGetSolverPackage_pastix(Mat A,const MatSolverPackage *type)
603 {
604   PetscFunctionBegin;
605   *type = MATSOLVERPASTIX;
606   PetscFunctionReturn(0);
607 }
608 
609 /*
610     The seq and mpi versions of this function are the same
611 */
612 #undef __FUNCT__
613 #define __FUNCT__ "MatGetFactor_seqaij_pastix"
614 PETSC_EXTERN PetscErrorCode MatGetFactor_seqaij_pastix(Mat A,MatFactorType ftype,Mat *F)
615 {
616   Mat            B;
617   PetscErrorCode ierr;
618   Mat_Pastix     *pastix;
619 
620   PetscFunctionBegin;
621   if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
622   /* Create the factorization matrix */
623   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
624   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
625   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
626   ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
627 
628   B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
629   B->ops->view             = MatView_PaStiX;
630   B->ops->getinfo          = MatGetInfo_PaStiX;
631 
632   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);CHKERRQ(ierr);
633 
634   B->factortype = MAT_FACTOR_LU;
635 
636   ierr = PetscNewLog(B,&pastix);CHKERRQ(ierr);
637 
638   pastix->CleanUpPastix = PETSC_FALSE;
639   pastix->isAIJ         = PETSC_TRUE;
640   pastix->scat_rhs      = NULL;
641   pastix->scat_sol      = NULL;
642   pastix->Destroy       = B->ops->destroy;
643   B->ops->destroy       = MatDestroy_Pastix;
644   B->spptr              = (void*)pastix;
645 
646   *F = B;
647   PetscFunctionReturn(0);
648 }
649 
650 #undef __FUNCT__
651 #define __FUNCT__ "MatGetFactor_mpiaij_pastix"
652 PETSC_EXTERN PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A,MatFactorType ftype,Mat *F)
653 {
654   Mat            B;
655   PetscErrorCode ierr;
656   Mat_Pastix     *pastix;
657 
658   PetscFunctionBegin;
659   if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
660   /* Create the factorization matrix */
661   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
662   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
663   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
664   ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
665   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
666 
667   B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
668   B->ops->view             = MatView_PaStiX;
669 
670   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);CHKERRQ(ierr);
671 
672   B->factortype = MAT_FACTOR_LU;
673 
674   ierr = PetscNewLog(B,&pastix);CHKERRQ(ierr);
675 
676   pastix->CleanUpPastix = PETSC_FALSE;
677   pastix->isAIJ         = PETSC_TRUE;
678   pastix->scat_rhs      = NULL;
679   pastix->scat_sol      = NULL;
680   pastix->Destroy       = B->ops->destroy;
681   B->ops->destroy       = MatDestroy_Pastix;
682   B->spptr              = (void*)pastix;
683 
684   *F = B;
685   PetscFunctionReturn(0);
686 }
687 
688 #undef __FUNCT__
689 #define __FUNCT__ "MatGetFactor_seqsbaij_pastix"
690 PETSC_EXTERN PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
691 {
692   Mat            B;
693   PetscErrorCode ierr;
694   Mat_Pastix     *pastix;
695 
696   PetscFunctionBegin;
697   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
698   /* Create the factorization matrix */
699   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
700   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
701   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
702   ierr = MatSeqSBAIJSetPreallocation(B,1,0,NULL);CHKERRQ(ierr);
703   ierr = MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);CHKERRQ(ierr);
704 
705   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
706   B->ops->view                   = MatView_PaStiX;
707 
708   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);CHKERRQ(ierr);
709 
710   B->factortype = MAT_FACTOR_CHOLESKY;
711 
712   ierr = PetscNewLog(B,&pastix);CHKERRQ(ierr);
713 
714   pastix->CleanUpPastix = PETSC_FALSE;
715   pastix->isAIJ         = PETSC_TRUE;
716   pastix->scat_rhs      = NULL;
717   pastix->scat_sol      = NULL;
718   pastix->Destroy       = B->ops->destroy;
719   B->ops->destroy       = MatDestroy_Pastix;
720   B->spptr              = (void*)pastix;
721 
722   *F = B;
723   PetscFunctionReturn(0);
724 }
725 
726 #undef __FUNCT__
727 #define __FUNCT__ "MatGetFactor_mpisbaij_pastix"
728 PETSC_EXTERN PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
729 {
730   Mat            B;
731   PetscErrorCode ierr;
732   Mat_Pastix     *pastix;
733 
734   PetscFunctionBegin;
735   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
736 
737   /* Create the factorization matrix */
738   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
739   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
740   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
741   ierr = MatSeqSBAIJSetPreallocation(B,1,0,NULL);CHKERRQ(ierr);
742   ierr = MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);CHKERRQ(ierr);
743 
744   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
745   B->ops->view                   = MatView_PaStiX;
746 
747   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);CHKERRQ(ierr);
748 
749   B->factortype = MAT_FACTOR_CHOLESKY;
750 
751   ierr = PetscNewLog(B,&pastix);CHKERRQ(ierr);
752 
753   pastix->CleanUpPastix = PETSC_FALSE;
754   pastix->isAIJ         = PETSC_TRUE;
755   pastix->scat_rhs      = NULL;
756   pastix->scat_sol      = NULL;
757   pastix->Destroy       = B->ops->destroy;
758   B->ops->destroy       = MatDestroy_Pastix;
759   B->spptr              = (void*)pastix;
760 
761   *F = B;
762   PetscFunctionReturn(0);
763 }
764 
765