xref: /petsc/src/mat/impls/hypre/mhypre.c (revision bb4689dd300edcc1bdc6e337af69bbd99e9531fb)
1 
2 /*
3     Creates hypre ijmatrix from PETSc matrix
4 */
5 
6 /*MC
7    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
8           based on the hypre IJ interface.
9 
10    Level: intermediate
11 
12 .seealso: MatCreate()
13 M*/
14 
15 #include <petscsys.h>
16 #include <petsc/private/matimpl.h>
17 #include <../src/mat/impls/hypre/mhypre.h>
18 #include <../src/mat/impls/aij/mpi/mpiaij.h>
19 #include <../src/vec/vec/impls/hypre/vhyp.h>
20 #include <HYPRE.h>
21 #include <HYPRE_utilities.h>
22 #include <_hypre_parcsr_ls.h>
23 
24 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
25 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
26 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
27 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
28 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool);
29 static PetscErrorCode hypre_array_destroy(void*);
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "MatHYPRE_IJMatrixPreallocate"
33 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
34 {
35   PetscErrorCode ierr;
36   PetscInt       i,n_d,n_o;
37   const PetscInt *ia_d,*ia_o;
38   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
39   PetscInt       *nnz_d=NULL,*nnz_o=NULL;
40 
41   PetscFunctionBegin;
42   if (A_d) { /* determine number of nonzero entries in local diagonal part */
43     ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr);
44     if (done_d) {
45       ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr);
46       for (i=0; i<n_d; i++) {
47         nnz_d[i] = ia_d[i+1] - ia_d[i];
48       }
49     }
50     ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr);
51   }
52   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
53     ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
54     if (done_o) {
55       ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr);
56       for (i=0; i<n_o; i++) {
57         nnz_o[i] = ia_o[i+1] - ia_o[i];
58       }
59     }
60     ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
61   }
62   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
63     if (!done_o) { /* only diagonal part */
64       ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr);
65       for (i=0; i<n_d; i++) {
66         nnz_o[i] = 0;
67       }
68     }
69     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o));
70     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
71     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
72   }
73   PetscFunctionReturn(0);
74 }
75 
76 #undef __FUNCT__
77 #define __FUNCT__ "MatHYPRE_CreateFromMat"
78 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
79 {
80   PetscErrorCode ierr;
81   PetscInt       rstart,rend,cstart,cend;
82 
83   PetscFunctionBegin;
84   ierr   = MatSetUp(A);CHKERRQ(ierr);
85   rstart = A->rmap->rstart;
86   rend   = A->rmap->rend;
87   cstart = A->cmap->rstart;
88   cend   = A->cmap->rend;
89   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
90   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
91   {
92     PetscBool      same;
93     Mat            A_d,A_o;
94     const PetscInt *colmap;
95     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr);
96     if (same) {
97       ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
98       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
99       PetscFunctionReturn(0);
100     }
101     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr);
102     if (same) {
103       ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
104       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
105       PetscFunctionReturn(0);
106     }
107     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr);
108     if (same) {
109       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
110       PetscFunctionReturn(0);
111     }
112     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr);
113     if (same) {
114       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
115       PetscFunctionReturn(0);
116     }
117   }
118   PetscFunctionReturn(0);
119 }
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "MatHYPRE_IJMatrixCopy"
123 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
124 {
125   PetscErrorCode    ierr;
126   PetscInt          i,rstart,rend,ncols,nr,nc;
127   const PetscScalar *values;
128   const PetscInt    *cols;
129   PetscBool         flg;
130 
131   PetscFunctionBegin;
132   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
133   ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr);
134   if (flg && nr == nc) {
135     ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr);
136     PetscFunctionReturn(0);
137   }
138   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
139   if (flg) {
140     ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr);
141     PetscFunctionReturn(0);
142   }
143 
144   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
145   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
146   for (i=rstart; i<rend; i++) {
147     ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
148     PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values));
149     ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
150   }
151   PetscFunctionReturn(0);
152 }
153 
154 #undef __FUNCT__
155 #define __FUNCT__ "MatHYPRE_IJMatrixFastCopy_SeqAIJ"
156 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
157 {
158   PetscErrorCode        ierr;
159   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
160   HYPRE_Int             type;
161   hypre_ParCSRMatrix    *par_matrix;
162   hypre_AuxParCSRMatrix *aux_matrix;
163   hypre_CSRMatrix       *hdiag;
164 
165   PetscFunctionBegin;
166   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
167   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
168   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
169   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
170   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
171   /*
172        this is the Hack part where we monkey directly with the hypre datastructures
173   */
174   ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));
175   ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));
176   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
177 
178   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
179   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
180   PetscFunctionReturn(0);
181 }
182 
183 #undef __FUNCT__
184 #define __FUNCT__ "MatHYPRE_IJMatrixFastCopy_MPIAIJ"
185 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
186 {
187   PetscErrorCode        ierr;
188   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
189   Mat_SeqAIJ            *pdiag,*poffd;
190   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
191   HYPRE_Int             type;
192   hypre_ParCSRMatrix    *par_matrix;
193   hypre_AuxParCSRMatrix *aux_matrix;
194   hypre_CSRMatrix       *hdiag,*hoffd;
195 
196   PetscFunctionBegin;
197   pdiag = (Mat_SeqAIJ*) pA->A->data;
198   poffd = (Mat_SeqAIJ*) pA->B->data;
199   /* cstart is only valid for square MPIAIJ layed out in the usual way */
200   ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr);
201 
202   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
203   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
204   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
205   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
206   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
207   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
208 
209   /*
210        this is the Hack part where we monkey directly with the hypre datastructures
211   */
212   ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
213   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
214   jj  = (PetscInt*)hdiag->j;
215   pjj = (PetscInt*)pdiag->j;
216   for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i];
217   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
218   ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
219   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
220      If we hacked a hypre a bit more we might be able to avoid this step */
221   jj  = (PetscInt*) hoffd->j;
222   pjj = (PetscInt*) poffd->j;
223   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
224   ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));
225 
226   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
227   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "MatConvert_HYPRE_IS"
233 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
234 {
235   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
236   Mat                    lA;
237   ISLocalToGlobalMapping rl2g,cl2g;
238   IS                     is;
239   hypre_ParCSRMatrix     *hA;
240   hypre_CSRMatrix        *hdiag,*hoffd;
241   MPI_Comm               comm;
242   PetscScalar            *hdd,*hod,*aa,*data;
243   PetscInt               *col_map_offd,*hdi,*hdj,*hoi,*hoj;
244   PetscInt               *ii,*jj,*iptr,*jptr;
245   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
246   HYPRE_Int              type;
247   PetscErrorCode         ierr;
248 
249   PetscFunctionBegin;
250   comm = PetscObjectComm((PetscObject)A);
251   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
252   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
253   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
254   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
255   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
256   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
257   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
258   hdiag = hypre_ParCSRMatrixDiag(hA);
259   hoffd = hypre_ParCSRMatrixOffd(hA);
260   dr    = hypre_CSRMatrixNumRows(hdiag);
261   dc    = hypre_CSRMatrixNumCols(hdiag);
262   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
263   hdi   = hypre_CSRMatrixI(hdiag);
264   hdj   = hypre_CSRMatrixJ(hdiag);
265   hdd   = hypre_CSRMatrixData(hdiag);
266   oc    = hypre_CSRMatrixNumCols(hoffd);
267   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
268   hoi   = hypre_CSRMatrixI(hoffd);
269   hoj   = hypre_CSRMatrixJ(hoffd);
270   hod   = hypre_CSRMatrixData(hoffd);
271   if (reuse != MAT_REUSE_MATRIX) {
272     PetscInt *aux;
273 
274     /* generate l2g maps for rows and cols */
275     ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr);
276     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
277     ierr = ISDestroy(&is);CHKERRQ(ierr);
278     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
279     ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr);
280     for (i=0; i<dc; i++) aux[i] = i+stc;
281     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
282     ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
283     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
284     ierr = ISDestroy(&is);CHKERRQ(ierr);
285     /* create MATIS object */
286     ierr = MatCreate(comm,B);CHKERRQ(ierr);
287     ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr);
288     ierr = MatSetType(*B,MATIS);CHKERRQ(ierr);
289     ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr);
290     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
291     ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
292 
293     /* allocate CSR for local matrix */
294     ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr);
295     ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr);
296     ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr);
297   } else {
298     PetscInt  nr;
299     PetscBool done;
300     ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr);
301     ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr);
302     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
303     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
304     ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr);
305   }
306   /* merge local matrices */
307   ii   = iptr;
308   jj   = jptr;
309   aa   = data;
310   *ii  = *(hdi++) + *(hoi++);
311   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
312     PetscScalar *aold = aa;
313     PetscInt    *jold = jj,nc = jd+jo;
314     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
315     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
316     *(++ii) = *(hdi++) + *(hoi++);
317     ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr);
318   }
319   for (; cum<dr; cum++) *(++ii) = nnz;
320   if (reuse != MAT_REUSE_MATRIX) {
321     Mat_SeqAIJ* a;
322 
323     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr);
324     ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr);
325     /* hack SeqAIJ */
326     a          = (Mat_SeqAIJ*)(lA->data);
327     a->free_a  = PETSC_TRUE;
328     a->free_ij = PETSC_TRUE;
329     ierr = MatDestroy(&lA);CHKERRQ(ierr);
330   }
331   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
332   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
333   if (reuse == MAT_INPLACE_MATRIX) {
334     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
335   }
336   PetscFunctionReturn(0);
337 }
338 
339 #undef __FUNCT__
340 #define __FUNCT__ "MatConvert_AIJ_HYPRE"
341 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
342 {
343   Mat_HYPRE      *hB;
344   MPI_Comm       comm = PetscObjectComm((PetscObject)A);
345   PetscErrorCode ierr;
346 
347   PetscFunctionBegin;
348   if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_INPLACE_MATRIX");
349   if (reuse == MAT_REUSE_MATRIX) {
350     /* always destroy the old matrix and create a new memory;
351        hope this does not churn the memory too much. The problem
352        is I do not know if it is possible to put the matrix back to
353        its initial state so that we can directly copy the values
354        the second time through. */
355     hB = (Mat_HYPRE*)((*B)->data);
356     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
357   } else {
358     ierr = MatCreate(comm,B);CHKERRQ(ierr);
359     ierr = MatSetType(*B,MATHYPRE);CHKERRQ(ierr);
360     ierr = MatSetSizes(*B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
361     ierr = MatSetUp(*B);CHKERRQ(ierr);
362     hB   = (Mat_HYPRE*)((*B)->data);
363   }
364   ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr);
365   ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr);
366   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
367   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
368   PetscFunctionReturn(0);
369 }
370 
371 #undef __FUNCT__
372 #define __FUNCT__ "MatConvert_HYPRE_AIJ"
373 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
374 {
375   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
376   hypre_ParCSRMatrix *parcsr;
377   hypre_CSRMatrix    *hdiag,*hoffd;
378   MPI_Comm           comm;
379   PetscScalar        *da,*oa,*aptr;
380   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
381   PetscInt           i,dnnz,onnz,m,n;
382   HYPRE_Int          type;
383   PetscMPIInt        size;
384   PetscErrorCode     ierr;
385 
386   PetscFunctionBegin;
387   comm = PetscObjectComm((PetscObject)A);
388   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
389   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
390   if (reuse == MAT_REUSE_MATRIX) {
391     PetscBool ismpiaij,isseqaij;
392     ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
393     ierr = PetscObjectTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
394     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
395   }
396   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
397 
398   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
399   hdiag = hypre_ParCSRMatrixDiag(parcsr);
400   hoffd = hypre_ParCSRMatrixOffd(parcsr);
401   m     = hypre_CSRMatrixNumRows(hdiag);
402   n     = hypre_CSRMatrixNumCols(hdiag);
403   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
404   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
405   if (reuse == MAT_INITIAL_MATRIX) {
406     ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
407     ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
408     ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr);
409   } else if (reuse == MAT_REUSE_MATRIX) {
410     PetscInt  nr;
411     PetscBool done;
412     if (size > 1) {
413       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
414 
415       ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
416       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
417       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
418       ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr);
419     } else {
420       ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
421       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
422       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
423       ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr);
424     }
425   } else { /* MAT_INPLACE_MATRIX */
426     dii = hypre_CSRMatrixI(hdiag);
427     djj = hypre_CSRMatrixJ(hdiag);
428     da  = hypre_CSRMatrixData(hdiag);
429   }
430   ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
431   ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr);
432   ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr);
433   iptr = djj;
434   aptr = da;
435   for (i=0; i<m; i++) {
436     PetscInt nc = dii[i+1]-dii[i];
437     ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
438     iptr += nc;
439     aptr += nc;
440   }
441   if (size > 1) {
442     PetscInt *offdj,*coffd;
443 
444     if (reuse == MAT_INITIAL_MATRIX) {
445       ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
446       ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
447       ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr);
448     } else if (reuse == MAT_REUSE_MATRIX) {
449       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
450       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
451       PetscBool  done;
452 
453       ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr);
454       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
455       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
456       ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr);
457     } else { /* MAT_INPLACE_MATRIX */
458       oii = hypre_CSRMatrixI(hoffd);
459       ojj = hypre_CSRMatrixJ(hoffd);
460       oa  = hypre_CSRMatrixData(hoffd);
461     }
462     ierr  = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
463     offdj = hypre_CSRMatrixJ(hoffd);
464     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
465     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
466     ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr);
467     iptr = ojj;
468     aptr = oa;
469     for (i=0; i<m; i++) {
470        PetscInt nc = oii[i+1]-oii[i];
471        ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
472        iptr += nc;
473        aptr += nc;
474     }
475     if (reuse == MAT_INITIAL_MATRIX) {
476       Mat_MPIAIJ *b;
477       Mat_SeqAIJ *d,*o;
478 
479       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
480                                             djj,da,oii,ojj,oa,B);CHKERRQ(ierr);
481       /* hack MPIAIJ */
482       b          = (Mat_MPIAIJ*)((*B)->data);
483       d          = (Mat_SeqAIJ*)b->A->data;
484       o          = (Mat_SeqAIJ*)b->B->data;
485       d->free_a  = PETSC_TRUE;
486       d->free_ij = PETSC_TRUE;
487       o->free_a  = PETSC_TRUE;
488       o->free_ij = PETSC_TRUE;
489     } else if (reuse == MAT_INPLACE_MATRIX) {
490       Mat T;
491       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
492                                             djj,da,oii,ojj,oa,&T);CHKERRQ(ierr);
493       hypre_CSRMatrixI(hdiag)    = NULL;
494       hypre_CSRMatrixJ(hdiag)    = NULL;
495       hypre_CSRMatrixData(hdiag) = NULL;
496       hypre_CSRMatrixI(hoffd)    = NULL;
497       hypre_CSRMatrixJ(hoffd)    = NULL;
498       hypre_CSRMatrixData(hoffd) = NULL;
499       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
500     }
501   } else {
502     oii  = NULL;
503     ojj  = NULL;
504     oa   = NULL;
505     if (reuse == MAT_INITIAL_MATRIX) {
506       Mat_SeqAIJ* b;
507       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr);
508       /* hack SeqAIJ */
509       b          = (Mat_SeqAIJ*)((*B)->data);
510       b->free_a  = PETSC_TRUE;
511       b->free_ij = PETSC_TRUE;
512     } else if (reuse == MAT_INPLACE_MATRIX) {
513       Mat T;
514       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr);
515       hypre_CSRMatrixI(hdiag)    = NULL;
516       hypre_CSRMatrixJ(hdiag)    = NULL;
517       hypre_CSRMatrixData(hdiag) = NULL;
518       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
519     }
520   }
521 
522   /* we have to use hypre_Tfree to free the arrays */
523   if (reuse == MAT_INPLACE_MATRIX) {
524     void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
525     const char *names[6] = {"_hypre_csr_dii",
526                             "_hypre_csr_djj",
527                             "_hypre_csr_da",
528                             "_hypre_csr_oii",
529                             "_hypre_csr_ojj",
530                             "_hypre_csr_oa"};
531     for (i=0; i<6; i++) {
532       PetscContainer c;
533 
534       ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr);
535       ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr);
536       ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr);
537       ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr);
538       ierr = PetscContainerDestroy(&c);CHKERRQ(ierr);
539     }
540   }
541   PetscFunctionReturn(0);
542 }
543 
544 #undef __FUNCT__
545 #define __FUNCT__ "MatPtAP_AIJ_HYPRE"
546 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
547 {
548   Mat_HYPRE          *hP = (Mat_HYPRE*)P->data;
549   hypre_ParCSRMatrix *tA,*Pparcsr,*ptapparcsr;
550   hypre_CSRMatrix    *hdiag,*hoffd;
551   Mat_SeqAIJ         *diag,*offd;
552   PetscInt           *garray,noffd,dnnz,onnz,*row_starts,*col_starts;
553   HYPRE_Int          type,P_owns_col_starts;
554   PetscBool          ismpiaij,isseqaij;
555   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
556   char               mtype[256];
557   PetscErrorCode     ierr;
558 
559   PetscFunctionBegin;
560   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
561   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
562   if (scall == MAT_REUSE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_REUSE_MATRIX");
563   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
564   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
565   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type);
566 
567   /* It looks like we don't need to have the diagonal entries
568      ordered first in the rows of the diagonal part
569      for boomerAMGBuildCoarseOperator to work */
570   if (ismpiaij) {
571     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);
572 
573     diag   = (Mat_SeqAIJ*)a->A->data;
574     offd   = (Mat_SeqAIJ*)a->B->data;
575     garray = a->garray;
576     noffd  = a->B->cmap->N;
577     dnnz   = diag->nz;
578     onnz   = offd->nz;
579   } else {
580     diag    = (Mat_SeqAIJ*)A->data;
581     offd    = NULL;
582     garray  = NULL;
583     noffd   = 0;
584     dnnz    = diag->nz;
585     onnz    = 0;
586   }
587 
588   /* create a temporary ParCSR */
589   if (HYPRE_AssumedPartitionCheck()) {
590    PetscMPIInt myid;
591 
592    ierr       = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
593    row_starts = A->rmap->range + myid;
594    col_starts = A->cmap->range + myid;
595   } else {
596    row_starts = A->rmap->range;
597    col_starts = A->cmap->range;
598   }
599   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,row_starts,col_starts,noffd,dnnz,onnz);
600   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
601   hypre_ParCSRMatrixSetColStartsOwner(tA,0);
602 
603   /* set diagonal part */
604   hdiag = hypre_ParCSRMatrixDiag(tA);
605   hypre_CSRMatrixI(hdiag)           = diag->i;
606   hypre_CSRMatrixJ(hdiag)           = diag->j;
607   hypre_CSRMatrixData(hdiag)        = diag->a;
608   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
609   hypre_CSRMatrixSetRownnz(hdiag);
610   hypre_CSRMatrixSetDataOwner(hdiag,0);
611 
612   /* set offdiagonal part */
613   hoffd = hypre_ParCSRMatrixOffd(tA);
614   if (offd) {
615     hypre_CSRMatrixI(hoffd)           = offd->i;
616     hypre_CSRMatrixJ(hoffd)           = offd->j;
617     hypre_CSRMatrixData(hoffd)        = offd->a;
618     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
619     hypre_CSRMatrixSetRownnz(hoffd);
620     hypre_CSRMatrixSetDataOwner(hoffd,0);
621     hypre_ParCSRMatrixSetNumNonzeros(tA);
622     hypre_ParCSRMatrixColMapOffd(tA) = garray;
623   }
624 
625   /* call RAP from BoomerAMG */
626   /* Warning: hypre_BoomerAMGBuildCoarseOperator steals the col_starts
627      from Pparcsr (even if it does not own them)! */
628   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
629   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(Pparcsr);
630   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
631   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(Pparcsr,tA,Pparcsr,&ptapparcsr));
632   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
633   hypre_ParCSRMatrixSetRowStartsOwner(ptapparcsr,0);
634   hypre_ParCSRMatrixSetColStartsOwner(ptapparcsr,0);
635   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(Pparcsr, 1);
636 
637   /* set pointers to NULL before destroying tA */
638   hypre_CSRMatrixI(hdiag)          = NULL;
639   hypre_CSRMatrixJ(hdiag)          = NULL;
640   hypre_CSRMatrixData(hdiag)       = NULL;
641   hypre_CSRMatrixI(hoffd)          = NULL;
642   hypre_CSRMatrixJ(hoffd)          = NULL;
643   hypre_CSRMatrixData(hoffd)       = NULL;
644   hypre_ParCSRMatrixColMapOffd(tA) = NULL;
645   hypre_ParCSRMatrixDestroy(tA);
646 
647   /* create C depending on mtype */
648   sprintf(mtype,MATAIJ);
649   ierr = PetscOptionsGetString(((PetscObject)A)->options,((PetscObject)A)->prefix,"-matptap_hypre_outtype",mtype,256,NULL);CHKERRQ(ierr);
650   ierr = MatCreateFromParCSR(ptapparcsr,mtype,PETSC_OWN_POINTER,C);CHKERRQ(ierr);
651   PetscFunctionReturn(0);
652 }
653 
654 #undef __FUNCT__
655 #define __FUNCT__ "MatPtAP_HYPRE_HYPRE"
656 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
657 {
658   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
659   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data, *hP = (Mat_HYPRE*)P->data;
660   HYPRE_Int          type,P_owns_col_starts;
661   PetscErrorCode     ierr;
662 
663   PetscFunctionBegin;
664   if (scall == MAT_REUSE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Unsupported MAT_REUSE_MATRIX");
665   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
666   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
667   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
668   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
669   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
670   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
671 
672   /* call RAP from BoomerAMG */
673   /* Warning: hypre_BoomerAMGBuildCoarseOperator steals the col_starts
674      from Pparcsr (even if it does not own them)! */
675   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(Pparcsr);
676   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
677   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr));
678   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
679   hypre_ParCSRMatrixSetRowStartsOwner(ptapparcsr,0);
680   hypre_ParCSRMatrixSetColStartsOwner(ptapparcsr,0);
681   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(Pparcsr, 1);
682 
683   /* create MatHYPRE */
684   ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,C);CHKERRQ(ierr);
685   PetscFunctionReturn(0);
686 }
687 
688 #undef __FUNCT__
689 #define __FUNCT__ "MatMultTranspose_HYPRE"
690 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
691 {
692   PetscErrorCode ierr;
693 
694   PetscFunctionBegin;
695   ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);CHKERRQ(ierr);
696   PetscFunctionReturn(0);
697 }
698 
699 #undef __FUNCT__
700 #define __FUNCT__ "MatMult_HYPRE"
701 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
702 {
703   PetscErrorCode ierr;
704 
705   PetscFunctionBegin;
706   ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);CHKERRQ(ierr);
707   PetscFunctionReturn(0);
708 }
709 
710 #undef __FUNCT__
711 #define __FUNCT__ "MatHYPRE_MultKernel_Private"
712 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans)
713 {
714   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
715   hypre_ParCSRMatrix *parcsr;
716   hypre_ParVector    *hx,*hy;
717   PetscScalar        *ax,*ay,*sax,*say;
718   PetscErrorCode     ierr;
719 
720   PetscFunctionBegin;
721   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
722   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
723   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
724   ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
725   ierr = VecGetArray(y,&ay);CHKERRQ(ierr);
726   if (trans) {
727     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
728     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
729     hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx);
730     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
731     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
732   } else {
733     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
734     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
735     hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy);
736     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
737     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
738   }
739   ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
740   ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr);
741   PetscFunctionReturn(0);
742 }
743 
744 #undef __FUNCT__
745 #define __FUNCT__ "MatDestroy_HYPRE"
746 static PetscErrorCode MatDestroy_HYPRE(Mat A)
747 {
748   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
749   PetscErrorCode ierr;
750 
751   PetscFunctionBegin;
752   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
753   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
754   if (hA->ij) {
755     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
756     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
757   }
758   if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); }
759   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr);
760   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr);
761   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr);
762   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr);
763   ierr = PetscFree(A->data);CHKERRQ(ierr);
764   PetscFunctionReturn(0);
765 }
766 
767 #undef __FUNCT__
768 #define __FUNCT__ "MatSetUp_HYPRE"
769 static PetscErrorCode MatSetUp_HYPRE(Mat A)
770 {
771   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
772   Vec                x,b;
773   PetscErrorCode     ierr;
774 
775   PetscFunctionBegin;
776   if (hA->x) PetscFunctionReturn(0);
777   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
778   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
779   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr);
780   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr);
781   ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr);
782   ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr);
783   ierr = VecDestroy(&x);CHKERRQ(ierr);
784   ierr = VecDestroy(&b);CHKERRQ(ierr);
785   PetscFunctionReturn(0);
786 }
787 
788 #undef __FUNCT__
789 #define __FUNCT__ "MatAssemblyEnd_HYPRE"
790 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
791 {
792   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
793   PetscErrorCode     ierr;
794 
795   PetscFunctionBegin;
796   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
797   PetscFunctionReturn(0);
798 }
799 
800 /*
801    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix
802 
803    Collective
804 
805    Input Parameters:
806 +  vparcsr  - the pointer to the hypre_ParCSRMatrix
807 .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
808 -  copymode - PETSc copying options
809 
810    Output Parameter:
811 .  A  - the matrix
812 
813    Level: intermediate
814 
815    Notes: Not all the combinations between copymode and mtype are currently supported.
816 
817 .seealso: MatHYPRE, PetscCopyMode
818 */
819 #undef __FUNCT__
820 #define __FUNCT__ "MatCreateFromParCSR"
821 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(void *vparcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
822 {
823   Mat                   T;
824   Mat_HYPRE             *hA;
825   hypre_ParCSRMatrix    *parcsr;
826   MPI_Comm              comm;
827   PetscInt              rstart,rend,cstart,cend,M,N;
828   PetscBool             isseqaij,ismpiaij,isaij,ishyp,isis;
829   PetscErrorCode        ierr;
830 
831   PetscFunctionBegin;
832   parcsr = (hypre_ParCSRMatrix *)vparcsr;
833   comm   = hypre_ParCSRMatrixComm(parcsr);
834   ierr   = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
835   ierr   = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
836   ierr   = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr);
837   ierr   = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr);
838   ierr   = PetscStrcmp(mtype,MATHYPRE,&isis);CHKERRQ(ierr);
839   isaij  = (PetscBool)(isseqaij || ismpiaij || isaij);
840   if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
841   if (ishyp && copymode == PETSC_COPY_VALUES) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported copymode PETSC_COPY_VALUES");
842   if (isis  && copymode == PETSC_OWN_POINTER) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported copymode PETSC_OWN_POINTER");
843 
844   /* access ParCSRMatrix */
845   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
846   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
847   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
848   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
849   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
850   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
851 
852   /* create PETSc matrix with MatHYPRE */
853   ierr = MatCreate(comm,&T);CHKERRQ(ierr);
854   ierr = MatSetSizes(T,rend-rstart+1,cend-cstart+1,M,N);CHKERRQ(ierr);
855   ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr);
856   ierr = MatSetUp(T);CHKERRQ(ierr);
857   hA   = (Mat_HYPRE*)(T->data);
858 
859   /* create HYPRE_IJMatrix */
860   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
861 
862   /* set ParCSR object */
863   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
864   hypre_IJMatrixObject(hA->ij) = parcsr;
865 
866   /* set assembled flag */
867   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
868   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
869   if (ishyp) {
870     /* prevent from freeing the pointer */
871     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
872     *A = T;
873     ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
874     ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
875   } else if (isaij) {
876     if (copymode != PETSC_OWN_POINTER) {
877       /* prevent from freeing the pointer */
878       hA->inner_free = PETSC_FALSE;
879       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
880       ierr = MatDestroy(&T);CHKERRQ(ierr);
881     } else { /* AIJ return type with PETSC_OWN_POINTER */
882       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr);
883       *A   = T;
884     }
885   } else if (isis) {
886     ierr = MatConvert_HYPRE_AIJ(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
887     ierr = MatDestroy(&T);CHKERRQ(ierr);
888   }
889   PetscFunctionReturn(0);
890 }
891 
892 #undef __FUNCT__
893 #define __FUNCT__ "MatCreate_HYPRE"
894 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
895 {
896   Mat_HYPRE      *hB;
897   PetscErrorCode ierr;
898 
899   PetscFunctionBegin;
900   ierr           = PetscNewLog(B,&hB);CHKERRQ(ierr);
901   hB->inner_free = PETSC_TRUE;
902 
903   B->data       = (void*)hB;
904   B->rmap->bs   = 1;
905   B->assembled  = PETSC_FALSE;
906 
907   B->ops->mult          = MatMult_HYPRE;
908   B->ops->multtranspose = MatMultTranspose_HYPRE;
909   B->ops->setup         = MatSetUp_HYPRE;
910   B->ops->destroy       = MatDestroy_HYPRE;
911   B->ops->assemblyend   = MatAssemblyEnd_HYPRE;
912   B->ops->ptap          = MatPtAP_HYPRE_HYPRE;
913 
914   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr);
915   ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr);
916   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr);
917   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr);
918   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
919   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
920   PetscFunctionReturn(0);
921 }
922 
923 #undef __FUNCT__
924 #define __FUNCT__ "hypre_array_destroy"
925 static PetscErrorCode hypre_array_destroy(void *ptr)
926 {
927    PetscFunctionBegin;
928    hypre_TFree(ptr);
929    PetscFunctionReturn(0);
930 }
931 
932 #if 0
933 /*
934     Does NOT copy the data over, instead uses DIRECTLY the pointers from the PETSc MPIAIJ format
935 
936     This is UNFINISHED and does NOT work! The problem is that hypre puts the diagonal entry first
937     which will corrupt the PETSc data structure if we did this. Need a work around to this problem.
938 */
939 #include <_hypre_IJ_mv.h>
940 #include <HYPRE_IJ_mv.h>
941 
942 #undef __FUNCT__
943 #define __FUNCT__ "MatHYPRE_IJMatrixLink"
944 PetscErrorCode MatHYPRE_IJMatrixLink(Mat A, HYPRE_IJMatrix *ij)
945 {
946   PetscErrorCode        ierr;
947   PetscInt              rstart,rend,cstart,cend;
948   PetscBool             flg;
949   hypre_AuxParCSRMatrix *aux_matrix;
950 
951   PetscFunctionBegin;
952   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
953   PetscValidType(A,1);
954   PetscValidPointer(ij,2);
955   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
956   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Can only use with PETSc MPIAIJ matrices");
957   ierr = MatSetUp(A);CHKERRQ(ierr);
958 
959   ierr   = PetscLogEventBegin(MAT_Convert,A,0,0,0);CHKERRQ(ierr);
960   rstart = A->rmap->rstart;
961   rend   = A->rmap->rend;
962   cstart = A->cmap->rstart;
963   cend   = A->cmap->rend;
964   PetscStackCallStandard(HYPRE_IJMatrixCreate,(PetscObjectComm((PetscObject)A),rstart,rend-1,cstart,cend-1,ij));
965   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(*ij,HYPRE_PARCSR));
966 
967   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(*ij));
968   PetscStackCall("hypre_IJMatrixTranslator",aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(*ij));
969 
970   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
971 
972   /* this is the Hack part where we monkey directly with the hypre datastructures */
973 
974   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(*ij));
975   ierr = PetscLogEventEnd(MAT_Convert,A,0,0,0);CHKERRQ(ierr);
976   PetscFunctionReturn(0);
977 }
978 #endif
979