xref: /petsc/src/mat/impls/hypre/mhypre.c (revision 3dad0653c8b7d79802f38d512f3aec944c073f92)
1 
2 /*
3     Creates hypre ijmatrix from PETSc matrix
4 */
5 
6 /*MC
7    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
8           based on the hypre IJ interface.
9 
10    Level: intermediate
11 
12 .seealso: MatCreate()
13 M*/
14 
15 #include <petscmathypre.h>
16 #include <petsc/private/matimpl.h>
17 #include <../src/mat/impls/hypre/mhypre.h>
18 #include <../src/mat/impls/aij/mpi/mpiaij.h>
19 #include <../src/vec/vec/impls/hypre/vhyp.h>
20 #include <HYPRE.h>
21 #include <HYPRE_utilities.h>
22 #include <_hypre_parcsr_ls.h>
23 
24 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
25 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
26 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
27 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
28 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool);
29 static PetscErrorCode hypre_array_destroy(void*);
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "MatHYPRE_IJMatrixPreallocate"
33 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
34 {
35   PetscErrorCode ierr;
36   PetscInt       i,n_d,n_o;
37   const PetscInt *ia_d,*ia_o;
38   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
39   PetscInt       *nnz_d=NULL,*nnz_o=NULL;
40 
41   PetscFunctionBegin;
42   if (A_d) { /* determine number of nonzero entries in local diagonal part */
43     ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr);
44     if (done_d) {
45       ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr);
46       for (i=0; i<n_d; i++) {
47         nnz_d[i] = ia_d[i+1] - ia_d[i];
48       }
49     }
50     ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr);
51   }
52   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
53     ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
54     if (done_o) {
55       ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr);
56       for (i=0; i<n_o; i++) {
57         nnz_o[i] = ia_o[i+1] - ia_o[i];
58       }
59     }
60     ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
61   }
62   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
63     if (!done_o) { /* only diagonal part */
64       ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr);
65       for (i=0; i<n_d; i++) {
66         nnz_o[i] = 0;
67       }
68     }
69     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o));
70     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
71     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
72   }
73   PetscFunctionReturn(0);
74 }
75 
76 #undef __FUNCT__
77 #define __FUNCT__ "MatHYPRE_CreateFromMat"
78 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
79 {
80   PetscErrorCode ierr;
81   PetscInt       rstart,rend,cstart,cend;
82 
83   PetscFunctionBegin;
84   ierr   = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
85   ierr   = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
86   rstart = A->rmap->rstart;
87   rend   = A->rmap->rend;
88   cstart = A->cmap->rstart;
89   cend   = A->cmap->rend;
90   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
91   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
92   {
93     PetscBool      same;
94     Mat            A_d,A_o;
95     const PetscInt *colmap;
96     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr);
97     if (same) {
98       ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
99       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
100       PetscFunctionReturn(0);
101     }
102     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr);
103     if (same) {
104       ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
105       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
106       PetscFunctionReturn(0);
107     }
108     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr);
109     if (same) {
110       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
111       PetscFunctionReturn(0);
112     }
113     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr);
114     if (same) {
115       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
116       PetscFunctionReturn(0);
117     }
118   }
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "MatHYPRE_IJMatrixCopy"
124 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
125 {
126   PetscErrorCode    ierr;
127   PetscInt          i,rstart,rend,ncols,nr,nc;
128   const PetscScalar *values;
129   const PetscInt    *cols;
130   PetscBool         flg;
131 
132   PetscFunctionBegin;
133   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
134   ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr);
135   if (flg && nr == nc) {
136     ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr);
137     PetscFunctionReturn(0);
138   }
139   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
140   if (flg) {
141     ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr);
142     PetscFunctionReturn(0);
143   }
144 
145   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
146   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
147   for (i=rstart; i<rend; i++) {
148     ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
149     if (ncols) {
150       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values));
151     }
152     ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
153   }
154   PetscFunctionReturn(0);
155 }
156 
157 #undef __FUNCT__
158 #define __FUNCT__ "MatHYPRE_IJMatrixFastCopy_SeqAIJ"
159 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
160 {
161   PetscErrorCode        ierr;
162   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
163   HYPRE_Int             type;
164   hypre_ParCSRMatrix    *par_matrix;
165   hypre_AuxParCSRMatrix *aux_matrix;
166   hypre_CSRMatrix       *hdiag;
167 
168   PetscFunctionBegin;
169   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
170   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
171   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
172   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
173   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
174   /*
175        this is the Hack part where we monkey directly with the hypre datastructures
176   */
177   ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));
178   ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));
179   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
180 
181   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
182   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
183   PetscFunctionReturn(0);
184 }
185 
186 #undef __FUNCT__
187 #define __FUNCT__ "MatHYPRE_IJMatrixFastCopy_MPIAIJ"
188 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
189 {
190   PetscErrorCode        ierr;
191   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
192   Mat_SeqAIJ            *pdiag,*poffd;
193   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
194   HYPRE_Int             type;
195   hypre_ParCSRMatrix    *par_matrix;
196   hypre_AuxParCSRMatrix *aux_matrix;
197   hypre_CSRMatrix       *hdiag,*hoffd;
198 
199   PetscFunctionBegin;
200   pdiag = (Mat_SeqAIJ*) pA->A->data;
201   poffd = (Mat_SeqAIJ*) pA->B->data;
202   /* cstart is only valid for square MPIAIJ layed out in the usual way */
203   ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr);
204 
205   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
206   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
207   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
208   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
209   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
210   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
211 
212   /*
213        this is the Hack part where we monkey directly with the hypre datastructures
214   */
215   ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
216   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
217   jj  = (PetscInt*)hdiag->j;
218   pjj = (PetscInt*)pdiag->j;
219   for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i];
220   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
221   ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
222   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
223      If we hacked a hypre a bit more we might be able to avoid this step */
224   jj  = (PetscInt*) hoffd->j;
225   pjj = (PetscInt*) poffd->j;
226   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
227   ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));
228 
229   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
230   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
231   PetscFunctionReturn(0);
232 }
233 
234 #undef __FUNCT__
235 #define __FUNCT__ "MatConvert_HYPRE_IS"
236 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
237 {
238   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
239   Mat                    lA;
240   ISLocalToGlobalMapping rl2g,cl2g;
241   IS                     is;
242   hypre_ParCSRMatrix     *hA;
243   hypre_CSRMatrix        *hdiag,*hoffd;
244   MPI_Comm               comm;
245   PetscScalar            *hdd,*hod,*aa,*data;
246   HYPRE_Int              *col_map_offd,*hdi,*hdj,*hoi,*hoj;
247   PetscInt               *ii,*jj,*iptr,*jptr;
248   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
249   HYPRE_Int              type;
250   PetscErrorCode         ierr;
251 
252   PetscFunctionBegin;
253   comm = PetscObjectComm((PetscObject)A);
254   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
255   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
256   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
257   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
258   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
259   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
260   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
261   hdiag = hypre_ParCSRMatrixDiag(hA);
262   hoffd = hypre_ParCSRMatrixOffd(hA);
263   dr    = hypre_CSRMatrixNumRows(hdiag);
264   dc    = hypre_CSRMatrixNumCols(hdiag);
265   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
266   hdi   = hypre_CSRMatrixI(hdiag);
267   hdj   = hypre_CSRMatrixJ(hdiag);
268   hdd   = hypre_CSRMatrixData(hdiag);
269   oc    = hypre_CSRMatrixNumCols(hoffd);
270   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
271   hoi   = hypre_CSRMatrixI(hoffd);
272   hoj   = hypre_CSRMatrixJ(hoffd);
273   hod   = hypre_CSRMatrixData(hoffd);
274   if (reuse != MAT_REUSE_MATRIX) {
275     PetscInt *aux;
276 
277     /* generate l2g maps for rows and cols */
278     ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr);
279     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
280     ierr = ISDestroy(&is);CHKERRQ(ierr);
281     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
282     ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr);
283     for (i=0; i<dc; i++) aux[i] = i+stc;
284     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
285     ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
286     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
287     ierr = ISDestroy(&is);CHKERRQ(ierr);
288     /* create MATIS object */
289     ierr = MatCreate(comm,B);CHKERRQ(ierr);
290     ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr);
291     ierr = MatSetType(*B,MATIS);CHKERRQ(ierr);
292     ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr);
293     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
294     ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
295 
296     /* allocate CSR for local matrix */
297     ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr);
298     ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr);
299     ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr);
300   } else {
301     PetscInt  nr;
302     PetscBool done;
303     ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr);
304     ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr);
305     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
306     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
307     ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr);
308   }
309   /* merge local matrices */
310   ii   = iptr;
311   jj   = jptr;
312   aa   = data;
313   *ii  = *(hdi++) + *(hoi++);
314   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
315     PetscScalar *aold = aa;
316     PetscInt    *jold = jj,nc = jd+jo;
317     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
318     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
319     *(++ii) = *(hdi++) + *(hoi++);
320     ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr);
321   }
322   for (; cum<dr; cum++) *(++ii) = nnz;
323   if (reuse != MAT_REUSE_MATRIX) {
324     Mat_SeqAIJ* a;
325 
326     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr);
327     ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr);
328     /* hack SeqAIJ */
329     a          = (Mat_SeqAIJ*)(lA->data);
330     a->free_a  = PETSC_TRUE;
331     a->free_ij = PETSC_TRUE;
332     ierr = MatDestroy(&lA);CHKERRQ(ierr);
333   }
334   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
335   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
336   if (reuse == MAT_INPLACE_MATRIX) {
337     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
338   }
339   PetscFunctionReturn(0);
340 }
341 
342 #undef __FUNCT__
343 #define __FUNCT__ "MatConvert_AIJ_HYPRE"
344 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
345 {
346   Mat_HYPRE      *hB;
347   MPI_Comm       comm = PetscObjectComm((PetscObject)A);
348   PetscErrorCode ierr;
349 
350   PetscFunctionBegin;
351   if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_INPLACE_MATRIX");
352   if (reuse == MAT_REUSE_MATRIX) {
353     /* always destroy the old matrix and create a new memory;
354        hope this does not churn the memory too much. The problem
355        is I do not know if it is possible to put the matrix back to
356        its initial state so that we can directly copy the values
357        the second time through. */
358     hB = (Mat_HYPRE*)((*B)->data);
359     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
360   } else {
361     ierr = MatCreate(comm,B);CHKERRQ(ierr);
362     ierr = MatSetType(*B,MATHYPRE);CHKERRQ(ierr);
363     ierr = MatSetSizes(*B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
364     hB   = (Mat_HYPRE*)((*B)->data);
365   }
366   ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr);
367   ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr);
368   (*B)->preallocated = PETSC_TRUE;
369   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
370   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
371   PetscFunctionReturn(0);
372 }
373 
374 #undef __FUNCT__
375 #define __FUNCT__ "MatConvert_HYPRE_AIJ"
376 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
377 {
378   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
379   hypre_ParCSRMatrix *parcsr;
380   hypre_CSRMatrix    *hdiag,*hoffd;
381   MPI_Comm           comm;
382   PetscScalar        *da,*oa,*aptr;
383   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
384   PetscInt           i,dnnz,onnz,m,n;
385   HYPRE_Int          type;
386   PetscMPIInt        size;
387   PetscErrorCode     ierr;
388 
389   PetscFunctionBegin;
390   comm = PetscObjectComm((PetscObject)A);
391   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
392   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
393   if (reuse == MAT_REUSE_MATRIX) {
394     PetscBool ismpiaij,isseqaij;
395     ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
396     ierr = PetscObjectTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
397     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
398   }
399   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
400 
401   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
402   hdiag = hypre_ParCSRMatrixDiag(parcsr);
403   hoffd = hypre_ParCSRMatrixOffd(parcsr);
404   m     = hypre_CSRMatrixNumRows(hdiag);
405   n     = hypre_CSRMatrixNumCols(hdiag);
406   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
407   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
408   if (reuse == MAT_INITIAL_MATRIX) {
409     ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
410     ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
411     ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr);
412   } else if (reuse == MAT_REUSE_MATRIX) {
413     PetscInt  nr;
414     PetscBool done;
415     if (size > 1) {
416       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
417 
418       ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
419       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
420       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
421       ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr);
422     } else {
423       ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
424       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
425       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
426       ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr);
427     }
428   } else { /* MAT_INPLACE_MATRIX */
429     dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
430     djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
431     da  = hypre_CSRMatrixData(hdiag);
432   }
433   ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
434   ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr);
435   ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr);
436   iptr = djj;
437   aptr = da;
438   for (i=0; i<m; i++) {
439     PetscInt nc = dii[i+1]-dii[i];
440     ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
441     iptr += nc;
442     aptr += nc;
443   }
444   if (size > 1) {
445     HYPRE_Int *offdj,*coffd;
446 
447     if (reuse == MAT_INITIAL_MATRIX) {
448       ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
449       ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
450       ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr);
451     } else if (reuse == MAT_REUSE_MATRIX) {
452       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
453       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
454       PetscBool  done;
455 
456       ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr);
457       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
458       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
459       ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr);
460     } else { /* MAT_INPLACE_MATRIX */
461       oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
462       ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
463       oa  = hypre_CSRMatrixData(hoffd);
464     }
465     ierr  = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
466     offdj = hypre_CSRMatrixJ(hoffd);
467     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
468     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
469     ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr);
470     iptr = ojj;
471     aptr = oa;
472     for (i=0; i<m; i++) {
473        PetscInt nc = oii[i+1]-oii[i];
474        ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
475        iptr += nc;
476        aptr += nc;
477     }
478     if (reuse == MAT_INITIAL_MATRIX) {
479       Mat_MPIAIJ *b;
480       Mat_SeqAIJ *d,*o;
481 
482       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr);
483       /* hack MPIAIJ */
484       b          = (Mat_MPIAIJ*)((*B)->data);
485       d          = (Mat_SeqAIJ*)b->A->data;
486       o          = (Mat_SeqAIJ*)b->B->data;
487       d->free_a  = PETSC_TRUE;
488       d->free_ij = PETSC_TRUE;
489       o->free_a  = PETSC_TRUE;
490       o->free_ij = PETSC_TRUE;
491     } else if (reuse == MAT_INPLACE_MATRIX) {
492       Mat T;
493       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr);
494       hypre_CSRMatrixI(hdiag)    = NULL;
495       hypre_CSRMatrixJ(hdiag)    = NULL;
496       hypre_CSRMatrixData(hdiag) = NULL;
497       hypre_CSRMatrixI(hoffd)    = NULL;
498       hypre_CSRMatrixJ(hoffd)    = NULL;
499       hypre_CSRMatrixData(hoffd) = NULL;
500       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
501     }
502   } else {
503     oii  = NULL;
504     ojj  = NULL;
505     oa   = NULL;
506     if (reuse == MAT_INITIAL_MATRIX) {
507       Mat_SeqAIJ* b;
508       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr);
509       /* hack SeqAIJ */
510       b          = (Mat_SeqAIJ*)((*B)->data);
511       b->free_a  = PETSC_TRUE;
512       b->free_ij = PETSC_TRUE;
513     } else if (reuse == MAT_INPLACE_MATRIX) {
514       Mat T;
515       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr);
516       hypre_CSRMatrixI(hdiag)    = NULL;
517       hypre_CSRMatrixJ(hdiag)    = NULL;
518       hypre_CSRMatrixData(hdiag) = NULL;
519       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
520     }
521   }
522 
523   /* we have to use hypre_Tfree to free the arrays */
524   if (reuse == MAT_INPLACE_MATRIX) {
525     void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
526     const char *names[6] = {"_hypre_csr_dii",
527                             "_hypre_csr_djj",
528                             "_hypre_csr_da",
529                             "_hypre_csr_oii",
530                             "_hypre_csr_ojj",
531                             "_hypre_csr_oa"};
532     for (i=0; i<6; i++) {
533       PetscContainer c;
534 
535       ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr);
536       ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr);
537       ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr);
538       ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr);
539       ierr = PetscContainerDestroy(&c);CHKERRQ(ierr);
540     }
541   }
542   PetscFunctionReturn(0);
543 }
544 
545 #undef __FUNCT__
546 #define __FUNCT__ "MatAIJGetParCSR_Private"
547 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
548 {
549   hypre_ParCSRMatrix *tA;
550   hypre_CSRMatrix    *hdiag,*hoffd;
551   Mat_SeqAIJ         *diag,*offd;
552   PetscInt           *garray,noffd,dnnz,onnz,*row_starts,*col_starts;
553   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
554   PetscBool          ismpiaij,isseqaij;
555   PetscErrorCode     ierr;
556 
557   PetscFunctionBegin;
558   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
559   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
560   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type);
561   if (ismpiaij) {
562     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);
563 
564     diag   = (Mat_SeqAIJ*)a->A->data;
565     offd   = (Mat_SeqAIJ*)a->B->data;
566     garray = a->garray;
567     noffd  = a->B->cmap->N;
568     dnnz   = diag->nz;
569     onnz   = offd->nz;
570   } else {
571     diag   = (Mat_SeqAIJ*)A->data;
572     offd   = NULL;
573     garray = NULL;
574     noffd  = 0;
575     dnnz   = diag->nz;
576     onnz   = 0;
577   }
578 
579   /* create a temporary ParCSR */
580   if (HYPRE_AssumedPartitionCheck()) {
581     PetscMPIInt myid;
582 
583     ierr       = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
584     row_starts = A->rmap->range + myid;
585     col_starts = A->cmap->range + myid;
586   } else {
587     row_starts = A->rmap->range;
588     col_starts = A->cmap->range;
589   }
590   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz);
591   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
592   hypre_ParCSRMatrixSetColStartsOwner(tA,0);
593 
594   /* set diagonal part */
595   hdiag = hypre_ParCSRMatrixDiag(tA);
596   hypre_CSRMatrixI(hdiag)           = (HYPRE_Int*)diag->i;
597   hypre_CSRMatrixJ(hdiag)           = (HYPRE_Int*)diag->j;
598   hypre_CSRMatrixData(hdiag)        = diag->a;
599   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
600   hypre_CSRMatrixSetRownnz(hdiag);
601   hypre_CSRMatrixSetDataOwner(hdiag,0);
602 
603   /* set offdiagonal part */
604   hoffd = hypre_ParCSRMatrixOffd(tA);
605   if (offd) {
606     hypre_CSRMatrixI(hoffd)           = (HYPRE_Int*)offd->i;
607     hypre_CSRMatrixJ(hoffd)           = (HYPRE_Int*)offd->j;
608     hypre_CSRMatrixData(hoffd)        = offd->a;
609     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
610     hypre_CSRMatrixSetRownnz(hoffd);
611     hypre_CSRMatrixSetDataOwner(hoffd,0);
612     hypre_ParCSRMatrixSetNumNonzeros(tA);
613     hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray;
614   }
615   *hA = tA;
616   PetscFunctionReturn(0);
617 }
618 
619 #undef __FUNCT__
620 #define __FUNCT__ "MatAIJRestoreParCSR_Private"
621 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
622 {
623   hypre_CSRMatrix    *hdiag,*hoffd;
624 
625   PetscFunctionBegin;
626   hdiag = hypre_ParCSRMatrixDiag(*hA);
627   hoffd = hypre_ParCSRMatrixOffd(*hA);
628   /* set pointers to NULL before destroying tA */
629   hypre_CSRMatrixI(hdiag)           = NULL;
630   hypre_CSRMatrixJ(hdiag)           = NULL;
631   hypre_CSRMatrixData(hdiag)        = NULL;
632   hypre_CSRMatrixI(hoffd)           = NULL;
633   hypre_CSRMatrixJ(hoffd)           = NULL;
634   hypre_CSRMatrixData(hoffd)        = NULL;
635   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
636   hypre_ParCSRMatrixDestroy(*hA);
637   *hA = NULL;
638   PetscFunctionReturn(0);
639 }
640 
641 /* calls RAP from BoomerAMG:
642    the resulting ParCSR will not own the column and row starts
643    It looks like we don't need to have the diagonal entries
644    ordered first in the rows of the diagonal part
645    for boomerAMGBuildCoarseOperator to work */
646 #undef __FUNCT__
647 #define __FUNCT__ "MatHYPRE_ParCSR_RAP"
648 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,
649                                           hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
650 {
651   PetscErrorCode ierr;
652   HYPRE_Int      P_owns_col_starts,R_owns_row_starts;
653 
654   PetscFunctionBegin;
655   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
656   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
657   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
658   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
659   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
660   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
661   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
662   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
663   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
664   PetscFunctionReturn(0);
665 }
666 
667 #undef __FUNCT__
668 #define __FUNCT__ "MatPtAPNumeric_AIJ_AIJ_wHYPRE"
669 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
670 {
671   Mat                B;
672   hypre_ParCSRMatrix *hA,*hP,*hPtAP;
673   PetscErrorCode     ierr;
674 
675   PetscFunctionBegin;
676   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
677   ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr);
678   ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr);
679   ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
680   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
681   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
682   ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr);
683   PetscFunctionReturn(0);
684 }
685 
686 #undef __FUNCT__
687 #define __FUNCT__ "MatPtAPSymbolic_AIJ_AIJ_wHYPRE"
688 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C)
689 {
690   PetscErrorCode     ierr;
691   PetscFunctionBegin;
692   ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
693   ierr                   = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
694   (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
695   PetscFunctionReturn(0);
696 }
697 
698 #undef __FUNCT__
699 #define __FUNCT__ "MatPtAPNumeric_AIJ_HYPRE"
700 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
701 {
702   Mat                B;
703   Mat_HYPRE          *hP;
704   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr;
705   HYPRE_Int          type;
706   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
707   PetscBool          ishypre;
708   PetscErrorCode     ierr;
709 
710   PetscFunctionBegin;
711   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
712   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
713   hP = (Mat_HYPRE*)P->data;
714   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
715   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
716   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
717 
718   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
719   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
720   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
721 
722   /* create temporary matrix and merge to C */
723   ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
724   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
725   PetscFunctionReturn(0);
726 }
727 
728 #undef __FUNCT__
729 #define __FUNCT__ "MatPtAP_AIJ_HYPRE"
730 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
731 {
732   PetscErrorCode ierr;
733 
734   PetscFunctionBegin;
735   if (scall == MAT_INITIAL_MATRIX) {
736     const char *deft = MATAIJ;
737     char       type[256];
738     PetscBool  flg;
739 
740     ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr);
741     ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr);
742     ierr = PetscOptionsEnd();CHKERRQ(ierr);
743     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
744     ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
745     if (flg) {
746       ierr = MatSetType(*C,type);CHKERRQ(ierr);
747     } else {
748       ierr = MatSetType(*C,deft);CHKERRQ(ierr);
749     }
750     (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE;
751     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
752   }
753   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
754   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
755   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
756   PetscFunctionReturn(0);
757 }
758 
759 #undef __FUNCT__
760 #define __FUNCT__ "MatPtAPNumeric_HYPRE_HYPRE"
761 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
762 {
763   Mat                B;
764   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
765   Mat_HYPRE          *hA,*hP;
766   PetscBool          ishypre;
767   HYPRE_Int          type;
768   PetscErrorCode     ierr;
769 
770   PetscFunctionBegin;
771   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
772   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
773   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
774   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
775   hA = (Mat_HYPRE*)A->data;
776   hP = (Mat_HYPRE*)P->data;
777   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
778   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
779   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
780   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
781   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
782   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
783   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
784   ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
785   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
786   PetscFunctionReturn(0);
787 }
788 
789 #undef __FUNCT__
790 #define __FUNCT__ "MatPtAP_HYPRE_HYPRE"
791 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
792 {
793   PetscErrorCode ierr;
794 
795   PetscFunctionBegin;
796   if (scall == MAT_INITIAL_MATRIX) {
797     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
798     ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
799     ierr                   = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
800     (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE;
801     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
802   }
803   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
804   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
805   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
806   PetscFunctionReturn(0);
807 }
808 
809 /* calls hypre_ParMatmul
810    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
811    hypre_ParMatrixCreate does not duplicate the communicator
812    It looks like we don't need to have the diagonal entries
813    ordered first in the rows of the diagonal part
814    for boomerAMGBuildCoarseOperator to work */
815 #undef __FUNCT__
816 #define __FUNCT__ "MatHYPRE_ParCSR_MatMatMult"
817 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
818 {
819   PetscFunctionBegin;
820   PetscStackPush("hypre_ParMatmul");
821   *hAB = hypre_ParMatmul(hA,hB);
822   PetscStackPop;
823   PetscFunctionReturn(0);
824 }
825 
826 #undef __FUNCT__
827 #define __FUNCT__ "MatMatMultNumeric_AIJ_AIJ_wHYPRE"
828 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
829 {
830   Mat                D;
831   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
832   PetscErrorCode     ierr;
833 
834   PetscFunctionBegin;
835   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
836   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
837   ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr);
838   ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
839   ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr);
840   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
841   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
842   PetscFunctionReturn(0);
843 }
844 
845 #undef __FUNCT__
846 #define __FUNCT__ "MatMatMultSymbolic_AIJ_AIJ_wHYPRE"
847 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C)
848 {
849   PetscErrorCode ierr;
850 
851   PetscFunctionBegin;
852   ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
853   ierr                      = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
854   (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
855   PetscFunctionReturn(0);
856 }
857 
858 #undef __FUNCT__
859 #define __FUNCT__ "MatMatMultNumeric_HYPRE_HYPRE"
860 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
861 {
862   Mat                D;
863   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
864   Mat_HYPRE          *hA,*hB;
865   PetscBool          ishypre;
866   HYPRE_Int          type;
867   PetscErrorCode     ierr;
868 
869   PetscFunctionBegin;
870   ierr = PetscPrintf(PetscObjectComm((PetscObject)A),"USING %s\n",__FUNCT__);CHKERRQ(ierr);
871   ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr);
872   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
873   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
874   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
875   hA = (Mat_HYPRE*)A->data;
876   hB = (Mat_HYPRE*)B->data;
877   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
878   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
879   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
880   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
881   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
882   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
883   ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr);
884   ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
885   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
886   ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr);
887   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
888   PetscFunctionReturn(0);
889 }
890 
891 #undef __FUNCT__
892 #define __FUNCT__ "MatMatMult_HYPRE_HYPRE"
893 static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
894 {
895   PetscErrorCode ierr;
896 
897   PetscFunctionBegin;
898   ierr = PetscPrintf(PetscObjectComm((PetscObject)A),"USING %s\n",__FUNCT__);CHKERRQ(ierr);
899   if (scall == MAT_INITIAL_MATRIX) {
900     ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
901     ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
902     ierr                      = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
903     (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
904     ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
905   }
906   ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
907   ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr);
908   ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
909   PetscFunctionReturn(0);
910 }
911 
912 
913 #undef __FUNCT__
914 #define __FUNCT__ "MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE"
915 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
916 {
917   Mat                E;
918   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC;
919   PetscErrorCode     ierr;
920 
921   PetscFunctionBegin;
922   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
923   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
924   ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr);
925   ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr);
926   ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr);
927   ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr);
928   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
929   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
930   ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr);
931   PetscFunctionReturn(0);
932 }
933 
934 #undef __FUNCT__
935 #define __FUNCT__ "MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE"
936 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D)
937 {
938   PetscErrorCode ierr;
939 
940   PetscFunctionBegin;
941   ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr);
942   ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr);
943   PetscFunctionReturn(0);
944 }
945 
946 #undef __FUNCT__
947 #define __FUNCT__ "MatMultTranspose_HYPRE"
948 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
949 {
950   PetscErrorCode ierr;
951 
952   PetscFunctionBegin;
953   ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);CHKERRQ(ierr);
954   PetscFunctionReturn(0);
955 }
956 
957 #undef __FUNCT__
958 #define __FUNCT__ "MatMult_HYPRE"
959 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
960 {
961   PetscErrorCode ierr;
962 
963   PetscFunctionBegin;
964   ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);CHKERRQ(ierr);
965   PetscFunctionReturn(0);
966 }
967 
968 #undef __FUNCT__
969 #define __FUNCT__ "MatHYPRE_MultKernel_Private"
970 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans)
971 {
972   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
973   hypre_ParCSRMatrix *parcsr;
974   hypre_ParVector    *hx,*hy;
975   PetscScalar        *ax,*ay,*sax,*say;
976   PetscErrorCode     ierr;
977 
978   PetscFunctionBegin;
979   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
980   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
981   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
982   ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
983   ierr = VecGetArray(y,&ay);CHKERRQ(ierr);
984   if (trans) {
985     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
986     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
987     hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx);
988     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
989     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
990   } else {
991     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
992     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
993     hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy);
994     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
995     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
996   }
997   ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
998   ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr);
999   PetscFunctionReturn(0);
1000 }
1001 
1002 #undef __FUNCT__
1003 #define __FUNCT__ "MatDestroy_HYPRE"
1004 static PetscErrorCode MatDestroy_HYPRE(Mat A)
1005 {
1006   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1007   PetscErrorCode ierr;
1008 
1009   PetscFunctionBegin;
1010   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
1011   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
1012   if (hA->ij) {
1013     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1014     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
1015   }
1016   if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); }
1017   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr);
1018   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr);
1019   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr);
1020   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr);
1021   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr);
1022   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr);
1023   ierr = PetscFree(A->data);CHKERRQ(ierr);
1024   PetscFunctionReturn(0);
1025 }
1026 
1027 #undef __FUNCT__
1028 #define __FUNCT__ "MatSetUp_HYPRE"
1029 static PetscErrorCode MatSetUp_HYPRE(Mat A)
1030 {
1031   PetscErrorCode ierr;
1032 
1033   PetscFunctionBegin;
1034   ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr);
1035   PetscFunctionReturn(0);
1036 }
1037 
1038 #undef __FUNCT__
1039 #define __FUNCT__ "MatAssemblyEnd_HYPRE"
1040 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1041 {
1042   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1043   Vec                x,b;
1044   PetscErrorCode     ierr;
1045 
1046   PetscFunctionBegin;
1047   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
1048   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
1049   if (hA->x) PetscFunctionReturn(0);
1050   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1051   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1052   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr);
1053   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr);
1054   ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr);
1055   ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr);
1056   ierr = VecDestroy(&x);CHKERRQ(ierr);
1057   ierr = VecDestroy(&b);CHKERRQ(ierr);
1058   PetscFunctionReturn(0);
1059 }
1060 
1061 #define MATHYPRE_SCRATCH 2048
1062 
1063 #undef __FUNCT__
1064 #define __FUNCT__ "MatSetValues_HYPRE"
1065 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1066 {
1067   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1068   PetscScalar        *vals = (PetscScalar *)v;
1069   PetscScalar        sscr[MATHYPRE_SCRATCH];
1070   HYPRE_Int          cscr[2][MATHYPRE_SCRATCH];
1071   HYPRE_Int          i,nzc;
1072   PetscErrorCode     ierr;
1073 
1074   PetscFunctionBegin;
1075   for (i=0,nzc=0;i<nc;i++) {
1076     if (cols[i] >= 0) {
1077       cscr[0][nzc  ] = cols[i];
1078       cscr[1][nzc++] = i;
1079     }
1080   }
1081   if (!nzc) PetscFunctionReturn(0);
1082 
1083   if (ins == ADD_VALUES) {
1084     for (i=0;i<nr;i++) {
1085       if (rows[i] >= 0 && nzc) {
1086         PetscInt j;
1087         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1088         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr));
1089       }
1090       vals += nc;
1091     }
1092   } else { /* INSERT_VALUES */
1093 #if defined(PETSC_USE_DEBUG)
1094     /* Insert values cannot be used to insert offproc entries */
1095     PetscInt rst,ren;
1096     ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1097     for (i=0;i<nr;i++)
1098       if (rows[i] >= 0 && (rows[i] < rst || rows[i] >= ren)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use INSERT_VALUES for off-proc entries with MatHYPRE. Use ADD_VALUES instead");
1099 #endif
1100     for (i=0;i<nr;i++) {
1101       if (rows[i] >= 0 && nzc) {
1102         PetscInt j;
1103         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1104         PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr));
1105       }
1106       vals += nc;
1107     }
1108   }
1109   PetscFunctionReturn(0);
1110 }
1111 
1112 #undef __FUNCT__
1113 #define __FUNCT__ "MatHYPRESetPreallocation_HYPRE"
1114 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1115 {
1116   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1117   HYPRE_Int          *hdnnz,*honnz;
1118   PetscInt           i,rs,re,cs,ce,bs;
1119   PetscMPIInt        size;
1120   PetscErrorCode     ierr;
1121 
1122   PetscFunctionBegin;
1123   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1124   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1125   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1126   rs   = A->rmap->rstart;
1127   re   = A->rmap->rend;
1128   cs   = A->cmap->rstart;
1129   ce   = A->cmap->rend;
1130   if (!hA->ij) {
1131     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1132     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1133   } else {
1134     HYPRE_Int hrs,hre,hcs,hce;
1135     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1136     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re);
1137     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce);
1138   }
1139   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1140 
1141   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1142   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;
1143 
1144   if (!dnnz) {
1145     ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr);
1146     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1147   } else {
1148     hdnnz = (HYPRE_Int*)dnnz;
1149   }
1150   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1151   if (size > 1) {
1152     if (!onnz) {
1153       ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr);
1154       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1155     } else {
1156       honnz = (HYPRE_Int*)onnz;
1157     }
1158     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1159   } else {
1160     honnz = NULL;
1161     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1162   }
1163   if (!dnnz) {
1164     ierr = PetscFree(hdnnz);CHKERRQ(ierr);
1165   }
1166   if (!onnz && honnz) {
1167     ierr = PetscFree(honnz);CHKERRQ(ierr);
1168   }
1169   A->preallocated = PETSC_TRUE;
1170 
1171   /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0 */
1172   {
1173     hypre_AuxParCSRMatrix *aux_matrix;
1174     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1175     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
1176   }
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 /*@C
1181    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1182 
1183    Collective on Mat
1184 
1185    Input Parameters:
1186 +  A - the matrix
1187 .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1188           (same value is used for all local rows)
1189 .  dnnz - array containing the number of nonzeros in the various rows of the
1190           DIAGONAL portion of the local submatrix (possibly different for each row)
1191           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1192           The size of this array is equal to the number of local rows, i.e 'm'.
1193           For matrices that will be factored, you must leave room for (and set)
1194           the diagonal entry even if it is zero.
1195 .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1196           submatrix (same value is used for all local rows).
1197 -  onnz - array containing the number of nonzeros in the various rows of the
1198           OFF-DIAGONAL portion of the local submatrix (possibly different for
1199           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1200           structure. The size of this array is equal to the number
1201           of local rows, i.e 'm'.
1202 
1203    Notes: If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.
1204 
1205    Level: intermediate
1206 
1207 .keywords: matrix, aij, compressed row, sparse, parallel
1208 
1209 .seealso: MatCreate(), MatMPIAIJSetPreallocation, MATHYPRE
1210 @*/
1211 #undef __FUNCT__
1212 #define __FUNCT__ "MatHYPRESetPreallocation"
1213 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1214 {
1215   PetscErrorCode ierr;
1216 
1217   PetscFunctionBegin;
1218   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1219   PetscValidType(A,1);
1220   ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr);
1221   PetscFunctionReturn(0);
1222 }
1223 
1224 /*
1225    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix
1226 
1227    Collective
1228 
1229    Input Parameters:
1230 +  vparcsr  - the pointer to the hypre_ParCSRMatrix
1231 .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1232 -  copymode - PETSc copying options
1233 
1234    Output Parameter:
1235 .  A  - the matrix
1236 
1237    Level: intermediate
1238 
1239 .seealso: MatHYPRE, PetscCopyMode
1240 */
1241 #undef __FUNCT__
1242 #define __FUNCT__ "MatCreateFromParCSR"
1243 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *vparcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1244 {
1245   Mat                   T;
1246   Mat_HYPRE             *hA;
1247   hypre_ParCSRMatrix    *parcsr;
1248   MPI_Comm              comm;
1249   PetscInt              rstart,rend,cstart,cend,M,N;
1250   PetscBool             isseqaij,ismpiaij,isaij,ishyp,isis;
1251   PetscErrorCode        ierr;
1252 
1253   PetscFunctionBegin;
1254   parcsr = (hypre_ParCSRMatrix *)vparcsr;
1255   comm   = hypre_ParCSRMatrixComm(parcsr);
1256   ierr   = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
1257   ierr   = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
1258   ierr   = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr);
1259   ierr   = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr);
1260   ierr   = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr);
1261   isaij  = (PetscBool)(isseqaij || ismpiaij || isaij);
1262   if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
1263   if (ishyp && copymode == PETSC_COPY_VALUES) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported copymode PETSC_COPY_VALUES");
1264 
1265   /* access ParCSRMatrix */
1266   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1267   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1268   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1269   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1270   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1271   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1272 
1273   /* fix for empty local rows/columns */
1274   if (rend < rstart) rend = rstart;
1275   if (cend < cstart) cend = cstart;
1276 
1277   /* create PETSc matrix with MatHYPRE */
1278   ierr = MatCreate(comm,&T);CHKERRQ(ierr);
1279   ierr = MatSetSizes(T,rend-rstart+1,cend-cstart+1,M,N);CHKERRQ(ierr);
1280   ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr);
1281   hA   = (Mat_HYPRE*)(T->data);
1282 
1283   /* create HYPRE_IJMatrix */
1284   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1285 
1286   /* set ParCSR object */
1287   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1288   hypre_IJMatrixObject(hA->ij) = parcsr;
1289   T->preallocated = PETSC_TRUE;
1290 
1291   /* set assembled flag */
1292   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1293   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1294   if (ishyp) {
1295     PetscMPIInt myid = 0;
1296 
1297     /* make sure we always have row_starts and col_starts available */
1298     if (HYPRE_AssumedPartitionCheck()) {
1299       ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
1300     }
1301     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1302       PetscLayout map;
1303 
1304       ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr);
1305       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1306       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1307     }
1308     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1309       PetscLayout map;
1310 
1311       ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr);
1312       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1313       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1314     }
1315     /* prevent from freeing the pointer */
1316     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1317     *A   = T;
1318     ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1319     ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1320   } else if (isaij) {
1321     if (copymode != PETSC_OWN_POINTER) {
1322       /* prevent from freeing the pointer */
1323       hA->inner_free = PETSC_FALSE;
1324       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1325       ierr = MatDestroy(&T);CHKERRQ(ierr);
1326     } else { /* AIJ return type with PETSC_OWN_POINTER */
1327       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr);
1328       *A   = T;
1329     }
1330   } else if (isis) {
1331     ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1332     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1333     ierr = MatDestroy(&T);CHKERRQ(ierr);
1334   }
1335   PetscFunctionReturn(0);
1336 }
1337 
1338 #undef __FUNCT__
1339 #define __FUNCT__ "MatHYPREGetParCSR_HYPRE"
1340 PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1341 {
1342   Mat_HYPRE*            hA = (Mat_HYPRE*)A->data;
1343   HYPRE_Int             type;
1344   PetscErrorCode        ierr;
1345 
1346   PetscFunctionBegin;
1347   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1348   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1349   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1350   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1351   PetscFunctionReturn(0);
1352 }
1353 
1354 /*
1355    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1356 
1357    Not collective
1358 
1359    Input Parameters:
1360 +  A  - the MATHYPRE object
1361 
1362    Output Parameter:
1363 .  parcsr  - the pointer to the hypre_ParCSRMatrix
1364 
1365    Level: intermediate
1366 
1367 .seealso: MatHYPRE, PetscCopyMode
1368 */
1369 #undef __FUNCT__
1370 #define __FUNCT__ "MatHYPREGetParCSR"
1371 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1372 {
1373   PetscErrorCode ierr;
1374 
1375   PetscFunctionBegin;
1376   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1377   PetscValidType(A,1);
1378   ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr);
1379   PetscFunctionReturn(0);
1380 }
1381 
1382 #undef __FUNCT__
1383 #define __FUNCT__ "MatCreate_HYPRE"
1384 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
1385 {
1386   Mat_HYPRE      *hB;
1387   PetscErrorCode ierr;
1388 
1389   PetscFunctionBegin;
1390   ierr           = PetscNewLog(B,&hB);CHKERRQ(ierr);
1391   hB->inner_free = PETSC_TRUE;
1392 
1393   B->data       = (void*)hB;
1394   B->rmap->bs   = 1;
1395   B->assembled  = PETSC_FALSE;
1396 
1397   B->ops->mult          = MatMult_HYPRE;
1398   B->ops->multtranspose = MatMultTranspose_HYPRE;
1399   B->ops->setup         = MatSetUp_HYPRE;
1400   B->ops->destroy       = MatDestroy_HYPRE;
1401   B->ops->assemblyend   = MatAssemblyEnd_HYPRE;
1402   B->ops->ptap          = MatPtAP_HYPRE_HYPRE;
1403   B->ops->matmult       = MatMatMult_HYPRE_HYPRE;
1404   B->ops->setvalues     = MatSetValues_HYPRE;
1405 
1406   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr);
1407   ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr);
1408   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr);
1409   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr);
1410   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
1411   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
1412   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr);
1413   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr);
1414   PetscFunctionReturn(0);
1415 }
1416 
1417 #undef __FUNCT__
1418 #define __FUNCT__ "hypre_array_destroy"
1419 static PetscErrorCode hypre_array_destroy(void *ptr)
1420 {
1421    PetscFunctionBegin;
1422    hypre_TFree(ptr);
1423    PetscFunctionReturn(0);
1424 }
1425 
1426 #if 0
1427 /*
1428     Does NOT copy the data over, instead uses DIRECTLY the pointers from the PETSc MPIAIJ format
1429 
1430     This is UNFINISHED and does NOT work! The problem is that hypre puts the diagonal entry first
1431     which will corrupt the PETSc data structure if we did this. Need a work around to this problem.
1432 */
1433 #include <_hypre_IJ_mv.h>
1434 #include <HYPRE_IJ_mv.h>
1435 
1436 #undef __FUNCT__
1437 #define __FUNCT__ "MatHYPRE_IJMatrixLink"
1438 PetscErrorCode MatHYPRE_IJMatrixLink(Mat A, HYPRE_IJMatrix *ij)
1439 {
1440   PetscErrorCode        ierr;
1441   PetscInt              rstart,rend,cstart,cend;
1442   PetscBool             flg;
1443   hypre_AuxParCSRMatrix *aux_matrix;
1444 
1445   PetscFunctionBegin;
1446   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1447   PetscValidType(A,1);
1448   PetscValidPointer(ij,2);
1449   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
1450   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Can only use with PETSc MPIAIJ matrices");
1451   ierr = MatSetUp(A);CHKERRQ(ierr);
1452 
1453   ierr   = PetscLogEventBegin(MAT_Convert,A,0,0,0);CHKERRQ(ierr);
1454   rstart = A->rmap->rstart;
1455   rend   = A->rmap->rend;
1456   cstart = A->cmap->rstart;
1457   cend   = A->cmap->rend;
1458   PetscStackCallStandard(HYPRE_IJMatrixCreate,(PetscObjectComm((PetscObject)A),rstart,rend-1,cstart,cend-1,ij));
1459   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(*ij,HYPRE_PARCSR));
1460 
1461   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(*ij));
1462   PetscStackCall("hypre_IJMatrixTranslator",aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(*ij));
1463 
1464   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
1465 
1466   /* this is the Hack part where we monkey directly with the hypre datastructures */
1467 
1468   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(*ij));
1469   ierr = PetscLogEventEnd(MAT_Convert,A,0,0,0);CHKERRQ(ierr);
1470   PetscFunctionReturn(0);
1471 }
1472 #endif
1473