xref: /petsc/src/mat/impls/hypre/mhypre.c (revision 2cf14000413ceaa9c8c88da47cb48e334299226b)
1 
2 /*
3     Creates hypre ijmatrix from PETSc matrix
4 */
5 
6 #include <petscpkg_version.h>
7 #include <petscmathypre.h>
8 #include <petsc/private/matimpl.h>
9 #include <../src/mat/impls/hypre/mhypre.h>
10 #include <../src/mat/impls/aij/mpi/mpiaij.h>
11 #include <../src/vec/vec/impls/hypre/vhyp.h>
12 #include <HYPRE.h>
13 #include <HYPRE_utilities.h>
14 #include <_hypre_parcsr_ls.h>
15 #include <_hypre_sstruct_ls.h>
16 
17 /* from version 2.16 on, HYPRE_BigInt is 64 bit for 64bit installations
18    and 32 bit for 32bit installations -> not the best name for a variable */
19 #if PETSC_PKG_HYPRE_VERSION_LT(2,16,0)
20 typedef PetscInt HYPRE_BigInt
21 #endif
22 
23 PETSC_INTERN PetscErrorCode MatPtAP_IS_XAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);
24 
25 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
26 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
27 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
28 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
29 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,PetscScalar,Vec,PetscScalar,Vec,PetscBool);
30 static PetscErrorCode hypre_array_destroy(void*);
31 PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins);
32 
33 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
34 {
35   PetscErrorCode ierr;
36   PetscInt       i,n_d,n_o;
37   const PetscInt *ia_d,*ia_o;
38   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
39   HYPRE_Int      *nnz_d=NULL,*nnz_o=NULL;
40 
41   PetscFunctionBegin;
42   if (A_d) { /* determine number of nonzero entries in local diagonal part */
43     ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr);
44     if (done_d) {
45       ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr);
46       for (i=0; i<n_d; i++) {
47         nnz_d[i] = ia_d[i+1] - ia_d[i];
48       }
49     }
50     ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr);
51   }
52   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
53     ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
54     if (done_o) {
55       ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr);
56       for (i=0; i<n_o; i++) {
57         nnz_o[i] = ia_o[i+1] - ia_o[i];
58       }
59     }
60     ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
61   }
62   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
63     if (!done_o) { /* only diagonal part */
64       ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr);
65       for (i=0; i<n_d; i++) {
66         nnz_o[i] = 0;
67       }
68     }
69 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
70     { /* If we don't do this, the columns of the matrix will be all zeros! */
71       hypre_AuxParCSRMatrix *aux_matrix;
72       aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
73       hypre_AuxParCSRMatrixDestroy(aux_matrix);
74       hypre_IJMatrixTranslator(ij) = NULL;
75       PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
76       aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
77       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
78     }
79 #else
80     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
81 #endif
82     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
83     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
84   }
85   PetscFunctionReturn(0);
86 }
87 
88 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
89 {
90   PetscErrorCode ierr;
91   PetscInt       rstart,rend,cstart,cend;
92 
93   PetscFunctionBegin;
94   ierr   = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
95   ierr   = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
96   rstart = A->rmap->rstart;
97   rend   = A->rmap->rend;
98   cstart = A->cmap->rstart;
99   cend   = A->cmap->rend;
100   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
101   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
102   {
103     PetscBool      same;
104     Mat            A_d,A_o;
105     const PetscInt *colmap;
106     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr);
107     if (same) {
108       ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
109       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
110       PetscFunctionReturn(0);
111     }
112     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr);
113     if (same) {
114       ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
115       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
116       PetscFunctionReturn(0);
117     }
118     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr);
119     if (same) {
120       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
121       PetscFunctionReturn(0);
122     }
123     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr);
124     if (same) {
125       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
126       PetscFunctionReturn(0);
127     }
128   }
129   PetscFunctionReturn(0);
130 }
131 
132 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
133 {
134   PetscErrorCode    ierr;
135   PetscInt          i,rstart,rend,ncols,nr,nc;
136   const PetscScalar *values;
137   const PetscInt    *cols;
138   PetscBool         flg;
139 
140   PetscFunctionBegin;
141   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
142   ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr);
143   if (flg && nr == nc) {
144     ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr);
145     PetscFunctionReturn(0);
146   }
147   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
148   if (flg) {
149     ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr);
150     PetscFunctionReturn(0);
151   }
152 
153   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
154   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
155   for (i=rstart; i<rend; i++) {
156     ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
157     if (ncols) {
158       HYPRE_Int nc = (HYPRE_Int)ncols;
159 
160       if ((PetscInt)nc != ncols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",ncols,i);
161       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,&nc,(HYPRE_BigInt *)&i,(HYPRE_BigInt *)cols,values));
162     }
163     ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
164   }
165   PetscFunctionReturn(0);
166 }
167 
168 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
169 {
170   PetscErrorCode        ierr;
171   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
172   HYPRE_Int             type;
173   hypre_ParCSRMatrix    *par_matrix;
174   hypre_AuxParCSRMatrix *aux_matrix;
175   hypre_CSRMatrix       *hdiag;
176   PetscBool             sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
177 
178   PetscFunctionBegin;
179   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
180   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
181   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
182   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
183   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
184   /*
185        this is the Hack part where we monkey directly with the hypre datastructures
186   */
187   if (sameint) {
188     ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));CHKERRQ(ierr);
189     ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));CHKERRQ(ierr);
190   } else {
191     PetscInt i;
192 
193     for (i=0;i<A->rmap->n + 1;i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
194     for (i=0;i<pdiag->nz;i++)      hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
195   }
196   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));CHKERRQ(ierr);
197 
198   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
199   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
200   PetscFunctionReturn(0);
201 }
202 
203 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
204 {
205   PetscErrorCode        ierr;
206   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
207   Mat_SeqAIJ            *pdiag,*poffd;
208   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
209   HYPRE_Int             *hjj,type;
210   hypre_ParCSRMatrix    *par_matrix;
211   hypre_AuxParCSRMatrix *aux_matrix;
212   hypre_CSRMatrix       *hdiag,*hoffd;
213   PetscBool             sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
214 
215   PetscFunctionBegin;
216   pdiag = (Mat_SeqAIJ*) pA->A->data;
217   poffd = (Mat_SeqAIJ*) pA->B->data;
218   /* cstart is only valid for square MPIAIJ layed out in the usual way */
219   ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr);
220 
221   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
222   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
223   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
224   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
225   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
226   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
227 
228   /*
229        this is the Hack part where we monkey directly with the hypre datastructures
230   */
231   if (sameint) {
232     ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
233   } else {
234     for (i=0; i<pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]);
235   }
236   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
237   hjj = hdiag->j;
238   pjj = pdiag->j;
239 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
240   for (i=0; i<pdiag->nz; i++) hjj[i] = pjj[i];
241 #else
242   for (i=0; i<pdiag->nz; i++) hjj[i] = cstart + pjj[i];
243 #endif
244   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
245   if (sameint) {
246     ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
247   } else {
248     for (i=0; i<pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]);
249   }
250 
251   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
252      If we hacked a hypre a bit more we might be able to avoid this step */
253 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
254   PetscStackCallStandard(hypre_CSRMatrixBigInitialize,(hoffd));
255   jj  = (PetscInt*) hoffd->big_j;
256 #else
257   jj  = (PetscInt*) hoffd->j;
258 #endif
259   pjj = poffd->j;
260   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
261 
262   ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));
263 
264   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
265   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
266   PetscFunctionReturn(0);
267 }
268 
269 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
270 {
271   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
272   Mat                    lA;
273   ISLocalToGlobalMapping rl2g,cl2g;
274   IS                     is;
275   hypre_ParCSRMatrix     *hA;
276   hypre_CSRMatrix        *hdiag,*hoffd;
277   MPI_Comm               comm;
278   PetscScalar            *hdd,*hod,*aa,*data;
279   HYPRE_BigInt           *col_map_offd;
280   HYPRE_Int              *hdi,*hdj,*hoi,*hoj;
281   PetscInt               *ii,*jj,*iptr,*jptr;
282   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
283   HYPRE_Int              type;
284   PetscErrorCode         ierr;
285 
286   PetscFunctionBegin;
287   comm = PetscObjectComm((PetscObject)A);
288   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
289   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
290   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
291   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
292   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
293   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
294   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
295   hdiag = hypre_ParCSRMatrixDiag(hA);
296   hoffd = hypre_ParCSRMatrixOffd(hA);
297   dr    = hypre_CSRMatrixNumRows(hdiag);
298   dc    = hypre_CSRMatrixNumCols(hdiag);
299   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
300   hdi   = hypre_CSRMatrixI(hdiag);
301   hdj   = hypre_CSRMatrixJ(hdiag);
302   hdd   = hypre_CSRMatrixData(hdiag);
303   oc    = hypre_CSRMatrixNumCols(hoffd);
304   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
305   hoi   = hypre_CSRMatrixI(hoffd);
306   hoj   = hypre_CSRMatrixJ(hoffd);
307   hod   = hypre_CSRMatrixData(hoffd);
308   if (reuse != MAT_REUSE_MATRIX) {
309     PetscInt *aux;
310 
311     /* generate l2g maps for rows and cols */
312     ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr);
313     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
314     ierr = ISDestroy(&is);CHKERRQ(ierr);
315     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
316     ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr);
317     for (i=0; i<dc; i++) aux[i] = i+stc;
318     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
319     ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
320     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
321     ierr = ISDestroy(&is);CHKERRQ(ierr);
322     /* create MATIS object */
323     ierr = MatCreate(comm,B);CHKERRQ(ierr);
324     ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr);
325     ierr = MatSetType(*B,MATIS);CHKERRQ(ierr);
326     ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr);
327     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
328     ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
329 
330     /* allocate CSR for local matrix */
331     ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr);
332     ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr);
333     ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr);
334   } else {
335     PetscInt  nr;
336     PetscBool done;
337     ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr);
338     ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr);
339     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
340     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
341     ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr);
342   }
343   /* merge local matrices */
344   ii   = iptr;
345   jj   = jptr;
346   aa   = data;
347   *ii  = *(hdi++) + *(hoi++);
348   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
349     PetscScalar *aold = aa;
350     PetscInt    *jold = jj,nc = jd+jo;
351     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
352     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
353     *(++ii) = *(hdi++) + *(hoi++);
354     ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr);
355   }
356   for (; cum<dr; cum++) *(++ii) = nnz;
357   if (reuse != MAT_REUSE_MATRIX) {
358     Mat_SeqAIJ* a;
359 
360     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr);
361     ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr);
362     /* hack SeqAIJ */
363     a          = (Mat_SeqAIJ*)(lA->data);
364     a->free_a  = PETSC_TRUE;
365     a->free_ij = PETSC_TRUE;
366     ierr = MatDestroy(&lA);CHKERRQ(ierr);
367   }
368   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
369   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
370   if (reuse == MAT_INPLACE_MATRIX) {
371     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
372   }
373   PetscFunctionReturn(0);
374 }
375 
376 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
377 {
378   Mat            M = NULL;
379   Mat_HYPRE      *hB;
380   MPI_Comm       comm = PetscObjectComm((PetscObject)A);
381   PetscErrorCode ierr;
382 
383   PetscFunctionBegin;
384   if (reuse == MAT_REUSE_MATRIX) {
385     /* always destroy the old matrix and create a new memory;
386        hope this does not churn the memory too much. The problem
387        is I do not know if it is possible to put the matrix back to
388        its initial state so that we can directly copy the values
389        the second time through. */
390     hB = (Mat_HYPRE*)((*B)->data);
391     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
392   } else {
393     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
394     ierr = MatSetType(M,MATHYPRE);CHKERRQ(ierr);
395     ierr = MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
396     hB   = (Mat_HYPRE*)(M->data);
397     if (reuse == MAT_INITIAL_MATRIX) *B = M;
398   }
399   ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr);
400   ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr);
401   if (reuse == MAT_INPLACE_MATRIX) {
402     ierr = MatHeaderReplace(A,&M);CHKERRQ(ierr);
403   }
404   (*B)->preallocated = PETSC_TRUE;
405   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
406   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
407   PetscFunctionReturn(0);
408 }
409 
410 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
411 {
412   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
413   hypre_ParCSRMatrix *parcsr;
414   hypre_CSRMatrix    *hdiag,*hoffd;
415   MPI_Comm           comm;
416   PetscScalar        *da,*oa,*aptr;
417   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
418   PetscInt           i,dnnz,onnz,m,n;
419   HYPRE_Int          type;
420   PetscMPIInt        size;
421   PetscBool          sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
422   PetscErrorCode     ierr;
423 
424   PetscFunctionBegin;
425   comm = PetscObjectComm((PetscObject)A);
426   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
427   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
428   if (reuse == MAT_REUSE_MATRIX) {
429     PetscBool ismpiaij,isseqaij;
430     ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
431     ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
432     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
433   }
434   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
435 
436   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
437   hdiag = hypre_ParCSRMatrixDiag(parcsr);
438   hoffd = hypre_ParCSRMatrixOffd(parcsr);
439   m     = hypre_CSRMatrixNumRows(hdiag);
440   n     = hypre_CSRMatrixNumCols(hdiag);
441   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
442   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
443   if (reuse == MAT_INITIAL_MATRIX) {
444     ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
445     ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
446     ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr);
447   } else if (reuse == MAT_REUSE_MATRIX) {
448     PetscInt  nr;
449     PetscBool done;
450     if (size > 1) {
451       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
452 
453       ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
454       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
455       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
456       ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr);
457     } else {
458       ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
459       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
460       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
461       ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr);
462     }
463   } else { /* MAT_INPLACE_MATRIX */
464     if (!sameint) {
465       ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
466       ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
467     } else {
468       dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
469       djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
470     }
471     da = hypre_CSRMatrixData(hdiag);
472   }
473 
474   if (!sameint) {
475     for (i=0;i<m+1;i++)  dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]);
476     for (i=0;i<dnnz;i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]);
477   } else {
478     ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
479     ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr);
480   }
481   ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr);
482   iptr = djj;
483   aptr = da;
484   for (i=0; i<m; i++) {
485     PetscInt nc = dii[i+1]-dii[i];
486     ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
487     iptr += nc;
488     aptr += nc;
489   }
490   if (size > 1) {
491     HYPRE_BigInt *coffd;
492     HYPRE_Int    *offdj;
493 
494     if (reuse == MAT_INITIAL_MATRIX) {
495       ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
496       ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
497       ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr);
498     } else if (reuse == MAT_REUSE_MATRIX) {
499       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
500       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
501       PetscBool  done;
502 
503       ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr);
504       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
505       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
506       ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr);
507     } else { /* MAT_INPLACE_MATRIX */
508       if (!sameint) {
509         ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
510         ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
511       } else {
512         oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
513         ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
514       }
515       oa = hypre_CSRMatrixData(hoffd);
516     }
517     if (!sameint) {
518       for (i=0;i<m+1;i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]);
519     } else {
520       ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
521     }
522     offdj = hypre_CSRMatrixJ(hoffd);
523     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
524     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
525     ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr);
526     iptr = ojj;
527     aptr = oa;
528     for (i=0; i<m; i++) {
529        PetscInt nc = oii[i+1]-oii[i];
530        ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
531        iptr += nc;
532        aptr += nc;
533     }
534     if (reuse == MAT_INITIAL_MATRIX) {
535       Mat_MPIAIJ *b;
536       Mat_SeqAIJ *d,*o;
537 
538       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr);
539       /* hack MPIAIJ */
540       b          = (Mat_MPIAIJ*)((*B)->data);
541       d          = (Mat_SeqAIJ*)b->A->data;
542       o          = (Mat_SeqAIJ*)b->B->data;
543       d->free_a  = PETSC_TRUE;
544       d->free_ij = PETSC_TRUE;
545       o->free_a  = PETSC_TRUE;
546       o->free_ij = PETSC_TRUE;
547     } else if (reuse == MAT_INPLACE_MATRIX) {
548       Mat T;
549 
550       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr);
551       if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
552         hypre_CSRMatrixI(hdiag) = NULL;
553         hypre_CSRMatrixJ(hdiag) = NULL;
554         hypre_CSRMatrixI(hoffd) = NULL;
555         hypre_CSRMatrixJ(hoffd) = NULL;
556       } else { /* Hack MPIAIJ -> free ij but not a */
557         Mat_MPIAIJ *b = (Mat_MPIAIJ*)(T->data);
558         Mat_SeqAIJ *d = (Mat_SeqAIJ*)(b->A->data);
559         Mat_SeqAIJ *o = (Mat_SeqAIJ*)(b->B->data);
560 
561         d->free_ij = PETSC_TRUE;
562         o->free_ij = PETSC_TRUE;
563       }
564       hypre_CSRMatrixData(hdiag) = NULL;
565       hypre_CSRMatrixData(hoffd) = NULL;
566       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
567     }
568   } else {
569     oii  = NULL;
570     ojj  = NULL;
571     oa   = NULL;
572     if (reuse == MAT_INITIAL_MATRIX) {
573       Mat_SeqAIJ* b;
574 
575       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr);
576       /* hack SeqAIJ */
577       b          = (Mat_SeqAIJ*)((*B)->data);
578       b->free_a  = PETSC_TRUE;
579       b->free_ij = PETSC_TRUE;
580     } else if (reuse == MAT_INPLACE_MATRIX) {
581       Mat T;
582 
583       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr);
584       if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
585         hypre_CSRMatrixI(hdiag) = NULL;
586         hypre_CSRMatrixJ(hdiag) = NULL;
587       } else { /* free ij but not a */
588         Mat_SeqAIJ* b = (Mat_SeqAIJ*)(T->data);
589 
590         b->free_ij = PETSC_TRUE;
591       }
592       hypre_CSRMatrixData(hdiag) = NULL;
593       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
594     }
595   }
596 
597   /* we have to use hypre_Tfree to free the HYPRE arrays
598      that PETSc now onws */
599   if (reuse == MAT_INPLACE_MATRIX) {
600     PetscInt nh;
601     void *ptrs[6] = {da,oa,dii,djj,oii,ojj};
602     const char *names[6] = {"_hypre_csr_da",
603                             "_hypre_csr_oa",
604                             "_hypre_csr_dii",
605                             "_hypre_csr_djj",
606                             "_hypre_csr_oii",
607                             "_hypre_csr_ojj"};
608     nh = sameint ? 6 : 2;
609     for (i=0; i<nh; i++) {
610       PetscContainer c;
611 
612       ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr);
613       ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr);
614       ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr);
615       ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr);
616       ierr = PetscContainerDestroy(&c);CHKERRQ(ierr);
617     }
618   }
619   PetscFunctionReturn(0);
620 }
621 
622 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
623 {
624   hypre_ParCSRMatrix *tA;
625   hypre_CSRMatrix    *hdiag,*hoffd;
626   Mat_SeqAIJ         *diag,*offd;
627   PetscInt           *garray,i,noffd,dnnz,onnz,*row_starts,*col_starts;
628   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
629   PetscBool          ismpiaij,isseqaij;
630   PetscBool          sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
631   PetscErrorCode     ierr;
632 
633   PetscFunctionBegin;
634   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
635   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
636   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type);
637   if (ismpiaij) {
638     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);
639 
640     diag   = (Mat_SeqAIJ*)a->A->data;
641     offd   = (Mat_SeqAIJ*)a->B->data;
642     garray = a->garray;
643     noffd  = a->B->cmap->N;
644     dnnz   = diag->nz;
645     onnz   = offd->nz;
646   } else {
647     diag   = (Mat_SeqAIJ*)A->data;
648     offd   = NULL;
649     garray = NULL;
650     noffd  = 0;
651     dnnz   = diag->nz;
652     onnz   = 0;
653   }
654 
655   /* create a temporary ParCSR */
656   if (HYPRE_AssumedPartitionCheck()) {
657     PetscMPIInt myid;
658 
659     ierr       = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
660     row_starts = A->rmap->range + myid;
661     col_starts = A->cmap->range + myid;
662   } else {
663     row_starts = A->rmap->range;
664     col_starts = A->cmap->range;
665   }
666   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_BigInt*)row_starts,(HYPRE_BigInt*)col_starts,noffd,dnnz,onnz);
667   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
668   hypre_ParCSRMatrixSetColStartsOwner(tA,0);
669 
670   /* set diagonal part */
671   hdiag = hypre_ParCSRMatrixDiag(tA);
672   if (sameint) { /* reuse CSR pointers */
673     hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i;
674     hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j;
675   } else { /* malloc CSR pointers */
676     HYPRE_Int *hi,*hj;
677 
678     ierr = PetscMalloc2(A->rmap->n+1,&hi,dnnz,&hj);CHKERRQ(ierr);
679     for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(diag->i[i]);
680     for (i = 0; i < dnnz; i++)         hj[i] = (HYPRE_Int)(diag->j[i]);
681     hypre_CSRMatrixI(hdiag) = hi;
682     hypre_CSRMatrixJ(hdiag) = hj;
683   }
684   hypre_CSRMatrixData(hdiag)        = diag->a;
685   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
686   hypre_CSRMatrixSetRownnz(hdiag);
687   hypre_CSRMatrixSetDataOwner(hdiag,0);
688 
689   /* set offdiagonal part */
690   hoffd = hypre_ParCSRMatrixOffd(tA);
691   if (offd) {
692     if (sameint) { /* reuse CSR pointers */
693       hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i;
694       hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j;
695     } else { /* malloc CSR pointers */
696       HYPRE_Int *hi,*hj;
697 
698       ierr = PetscMalloc2(A->rmap->n+1,&hi,onnz,&hj);CHKERRQ(ierr);
699       for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(offd->i[i]);
700       for (i = 0; i < onnz; i++)         hj[i] = (HYPRE_Int)(offd->j[i]);
701       hypre_CSRMatrixI(hoffd) = hi;
702       hypre_CSRMatrixJ(hoffd) = hj;
703     }
704     hypre_CSRMatrixData(hoffd)        = offd->a;
705     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
706     hypre_CSRMatrixSetRownnz(hoffd);
707     hypre_CSRMatrixSetDataOwner(hoffd,0);
708     hypre_ParCSRMatrixSetNumNonzeros(tA);
709     hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt*)garray;
710   }
711   *hA = tA;
712   PetscFunctionReturn(0);
713 }
714 
715 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
716 {
717   hypre_CSRMatrix *hdiag,*hoffd;
718   PetscBool       sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
719   PetscErrorCode  ierr;
720 
721   PetscFunctionBegin;
722   hdiag = hypre_ParCSRMatrixDiag(*hA);
723   hoffd = hypre_ParCSRMatrixOffd(*hA);
724   /* free temporary memory allocated by PETSc */
725   if (!sameint) {
726     HYPRE_Int *hi,*hj;
727 
728     hi = hypre_CSRMatrixI(hdiag);
729     hj = hypre_CSRMatrixJ(hdiag);
730     ierr = PetscFree2(hi,hj);CHKERRQ(ierr);
731     if (hoffd) {
732       hi = hypre_CSRMatrixI(hoffd);
733       hj = hypre_CSRMatrixJ(hoffd);
734       ierr = PetscFree2(hi,hj);CHKERRQ(ierr);
735     }
736   }
737   /* set pointers to NULL before destroying tA */
738   hypre_CSRMatrixI(hdiag)           = NULL;
739   hypre_CSRMatrixJ(hdiag)           = NULL;
740   hypre_CSRMatrixData(hdiag)        = NULL;
741   hypre_CSRMatrixI(hoffd)           = NULL;
742   hypre_CSRMatrixJ(hoffd)           = NULL;
743   hypre_CSRMatrixData(hoffd)        = NULL;
744   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
745   hypre_ParCSRMatrixDestroy(*hA);
746   *hA = NULL;
747   PetscFunctionReturn(0);
748 }
749 
750 /* calls RAP from BoomerAMG:
751    the resulting ParCSR will not own the column and row starts
752    It looks like we don't need to have the diagonal entries
753    ordered first in the rows of the diagonal part
754    for boomerAMGBuildCoarseOperator to work */
755 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
756 {
757   HYPRE_Int P_owns_col_starts,R_owns_row_starts;
758 
759   PetscFunctionBegin;
760   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
761   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
762   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
763   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
764   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
765   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
766   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
767   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
768   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
769   PetscFunctionReturn(0);
770 }
771 
772 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
773 {
774   Mat                B;
775   hypre_ParCSRMatrix *hA,*hP,*hPtAP;
776   PetscErrorCode     ierr;
777 
778   PetscFunctionBegin;
779   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
780   ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr);
781   ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr);
782   ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
783   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
784   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
785   ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr);
786   PetscFunctionReturn(0);
787 }
788 
789 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C)
790 {
791   PetscErrorCode ierr;
792 
793   PetscFunctionBegin;
794   ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
795   ierr                   = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
796   (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
797   PetscFunctionReturn(0);
798 }
799 
800 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
801 {
802   Mat                B;
803   Mat_HYPRE          *hP;
804   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr;
805   HYPRE_Int          type;
806   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
807   PetscBool          ishypre;
808   PetscErrorCode     ierr;
809 
810   PetscFunctionBegin;
811   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
812   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
813   hP = (Mat_HYPRE*)P->data;
814   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
815   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
816   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
817 
818   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
819   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
820   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
821 
822   /* create temporary matrix and merge to C */
823   ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
824   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
825   PetscFunctionReturn(0);
826 }
827 
828 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
829 {
830   PetscErrorCode ierr;
831 
832   PetscFunctionBegin;
833   if (scall == MAT_INITIAL_MATRIX) {
834     const char *deft = MATAIJ;
835     char       type[256];
836     PetscBool  flg;
837 
838     ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr);
839     ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr);
840     ierr = PetscOptionsEnd();CHKERRQ(ierr);
841     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
842     ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
843     if (flg) {
844       ierr = MatSetType(*C,type);CHKERRQ(ierr);
845     } else {
846       ierr = MatSetType(*C,deft);CHKERRQ(ierr);
847     }
848     (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE;
849     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
850   }
851   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
852   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
853   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
854   PetscFunctionReturn(0);
855 }
856 
857 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
858 {
859   Mat                B;
860   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
861   Mat_HYPRE          *hA,*hP;
862   PetscBool          ishypre;
863   HYPRE_Int          type;
864   PetscErrorCode     ierr;
865 
866   PetscFunctionBegin;
867   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
868   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
869   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
870   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
871   hA = (Mat_HYPRE*)A->data;
872   hP = (Mat_HYPRE*)P->data;
873   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
874   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
875   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
876   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
877   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
878   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
879   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
880   ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
881   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
882   PetscFunctionReturn(0);
883 }
884 
885 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
886 {
887   PetscErrorCode ierr;
888 
889   PetscFunctionBegin;
890   if (scall == MAT_INITIAL_MATRIX) {
891     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
892     ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
893     ierr                   = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
894     (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE;
895     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
896   }
897   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
898   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
899   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
900   PetscFunctionReturn(0);
901 }
902 
903 /* calls hypre_ParMatmul
904    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
905    hypre_ParMatrixCreate does not duplicate the communicator
906    It looks like we don't need to have the diagonal entries
907    ordered first in the rows of the diagonal part
908    for boomerAMGBuildCoarseOperator to work */
909 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
910 {
911   PetscFunctionBegin;
912   PetscStackPush("hypre_ParMatmul");
913   *hAB = hypre_ParMatmul(hA,hB);
914   PetscStackPop;
915   PetscFunctionReturn(0);
916 }
917 
918 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
919 {
920   Mat                D;
921   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
922   PetscErrorCode     ierr;
923 
924   PetscFunctionBegin;
925   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
926   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
927   ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr);
928   ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
929   ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr);
930   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
931   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C)
936 {
937   PetscErrorCode ierr;
938 
939   PetscFunctionBegin;
940   ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
941   ierr                      = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
942   (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
943   PetscFunctionReturn(0);
944 }
945 
946 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
947 {
948   Mat                D;
949   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
950   Mat_HYPRE          *hA,*hB;
951   PetscBool          ishypre;
952   HYPRE_Int          type;
953   PetscErrorCode     ierr;
954 
955   PetscFunctionBegin;
956   ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr);
957   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
958   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
959   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
960   hA = (Mat_HYPRE*)A->data;
961   hB = (Mat_HYPRE*)B->data;
962   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
963   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
964   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
965   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
966   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
967   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
968   ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr);
969   ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
970   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
971   ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr);
972   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
973   PetscFunctionReturn(0);
974 }
975 
976 static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
977 {
978   PetscErrorCode ierr;
979 
980   PetscFunctionBegin;
981   if (scall == MAT_INITIAL_MATRIX) {
982     ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
983     ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
984     ierr                      = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
985     (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
986     ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
987   }
988   ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
989   ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr);
990   ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
991   PetscFunctionReturn(0);
992 }
993 
994 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
995 {
996   Mat                E;
997   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC;
998   PetscErrorCode     ierr;
999 
1000   PetscFunctionBegin;
1001   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
1002   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
1003   ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr);
1004   ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr);
1005   ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr);
1006   ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr);
1007   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
1008   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
1009   ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr);
1010   PetscFunctionReturn(0);
1011 }
1012 
1013 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D)
1014 {
1015   PetscErrorCode ierr;
1016 
1017   PetscFunctionBegin;
1018   ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr);
1019   ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr);
1020   PetscFunctionReturn(0);
1021 }
1022 
1023 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1024 {
1025   PetscErrorCode ierr;
1026 
1027   PetscFunctionBegin;
1028   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);CHKERRQ(ierr);
1029   PetscFunctionReturn(0);
1030 }
1031 
1032 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1033 {
1034   PetscErrorCode ierr;
1035 
1036   PetscFunctionBegin;
1037   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);CHKERRQ(ierr);
1038   PetscFunctionReturn(0);
1039 }
1040 
1041 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1042 {
1043   PetscErrorCode ierr;
1044 
1045   PetscFunctionBegin;
1046   if (y != z) {
1047     ierr = VecCopy(y,z);CHKERRQ(ierr);
1048   }
1049   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);CHKERRQ(ierr);
1050   PetscFunctionReturn(0);
1051 }
1052 
1053 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1054 {
1055   PetscErrorCode ierr;
1056 
1057   PetscFunctionBegin;
1058   if (y != z) {
1059     ierr = VecCopy(y,z);CHKERRQ(ierr);
1060   }
1061   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);CHKERRQ(ierr);
1062   PetscFunctionReturn(0);
1063 }
1064 
1065 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1066 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, PetscScalar a, Vec x, PetscScalar b, Vec y, PetscBool trans)
1067 {
1068   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1069   hypre_ParCSRMatrix *parcsr;
1070   hypre_ParVector    *hx,*hy;
1071   PetscScalar        *ax,*ay,*sax,*say;
1072   PetscErrorCode     ierr;
1073 
1074   PetscFunctionBegin;
1075   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
1076   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
1077   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
1078   ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
1079   ierr = VecGetArray(y,&ay);CHKERRQ(ierr);
1080   if (trans) {
1081     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
1082     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
1083     hypre_ParCSRMatrixMatvecT(a,parcsr,hy,b,hx);
1084     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
1085     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
1086   } else {
1087     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
1088     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
1089     hypre_ParCSRMatrixMatvec(a,parcsr,hx,b,hy);
1090     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
1091     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
1092   }
1093   ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
1094   ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr);
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 static PetscErrorCode MatDestroy_HYPRE(Mat A)
1099 {
1100   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1101   PetscErrorCode ierr;
1102 
1103   PetscFunctionBegin;
1104   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
1105   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
1106   if (hA->ij) {
1107     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1108     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
1109   }
1110   if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); }
1111 
1112   ierr = MatStashDestroy_Private(&A->stash);CHKERRQ(ierr);
1113 
1114   ierr = PetscFree(hA->array);CHKERRQ(ierr);
1115 
1116   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr);
1117   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr);
1118   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr);
1119   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr);
1120   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr);
1121   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr);
1122   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_is_hypre_C",NULL);CHKERRQ(ierr);
1123   ierr = PetscFree(A->data);CHKERRQ(ierr);
1124   PetscFunctionReturn(0);
1125 }
1126 
1127 static PetscErrorCode MatSetUp_HYPRE(Mat A)
1128 {
1129   PetscErrorCode ierr;
1130 
1131   PetscFunctionBegin;
1132   ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr);
1133   PetscFunctionReturn(0);
1134 }
1135 
1136 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1137 {
1138   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1139   Vec                x,b;
1140   PetscMPIInt        n;
1141   PetscInt           i,j,rstart,ncols,flg;
1142   PetscInt           *row,*col;
1143   PetscScalar        *val;
1144   PetscErrorCode     ierr;
1145 
1146   PetscFunctionBegin;
1147   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
1148 
1149   if (!A->nooffprocentries) {
1150     while (1) {
1151       ierr = MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
1152       if (!flg) break;
1153 
1154       for (i=0; i<n; ) {
1155         /* Now identify the consecutive vals belonging to the same row */
1156         for (j=i,rstart=row[j]; j<n; j++) {
1157           if (row[j] != rstart) break;
1158         }
1159         if (j < n) ncols = j-i;
1160         else       ncols = n-i;
1161         /* Now assemble all these values with a single function call */
1162         ierr = MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);CHKERRQ(ierr);
1163 
1164         i = j;
1165       }
1166     }
1167     ierr = MatStashScatterEnd_Private(&A->stash);CHKERRQ(ierr);
1168   }
1169 
1170   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
1171   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitalize */
1172   {
1173     hypre_AuxParCSRMatrix *aux_matrix;
1174 
1175     /* call destroy just to make sure we do not leak anything */
1176     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1177     PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix));
1178     hypre_IJMatrixTranslator(hA->ij) = NULL;
1179 
1180     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1181     PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1182     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1183     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1184     PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix));
1185   }
1186   if (hA->x) PetscFunctionReturn(0);
1187   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1188   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1189   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr);
1190   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr);
1191   ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr);
1192   ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr);
1193   ierr = VecDestroy(&x);CHKERRQ(ierr);
1194   ierr = VecDestroy(&b);CHKERRQ(ierr);
1195   PetscFunctionReturn(0);
1196 }
1197 
1198 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1199 {
1200   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1201   PetscErrorCode     ierr;
1202 
1203   PetscFunctionBegin;
1204   if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use");
1205 
1206   if (hA->size >= size) *array = hA->array;
1207   else {
1208     ierr = PetscFree(hA->array);CHKERRQ(ierr);
1209     hA->size = size;
1210     ierr = PetscMalloc(hA->size,&hA->array);CHKERRQ(ierr);
1211     *array = hA->array;
1212   }
1213 
1214   hA->available = PETSC_FALSE;
1215   PetscFunctionReturn(0);
1216 }
1217 
1218 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1219 {
1220   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1221 
1222   PetscFunctionBegin;
1223   *array = NULL;
1224   hA->available = PETSC_TRUE;
1225   PetscFunctionReturn(0);
1226 }
1227 
1228 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1229 {
1230   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1231   PetscScalar        *vals = (PetscScalar *)v;
1232   PetscScalar        *sscr;
1233   PetscInt           *cscr[2];
1234   PetscInt           i,nzc;
1235   void               *array = NULL;
1236   PetscErrorCode     ierr;
1237 
1238   PetscFunctionBegin;
1239   ierr = MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(PetscScalar)*nc*nr,&array);CHKERRQ(ierr);
1240   cscr[0] = (PetscInt*)array;
1241   cscr[1] = ((PetscInt*)array)+nc;
1242   sscr = (PetscScalar*)(((PetscInt*)array)+nc*2);
1243   for (i=0,nzc=0;i<nc;i++) {
1244     if (cols[i] >= 0) {
1245       cscr[0][nzc  ] = cols[i];
1246       cscr[1][nzc++] = i;
1247     }
1248   }
1249   if (!nzc) {
1250     ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr);
1251     PetscFunctionReturn(0);
1252   }
1253 
1254   if (ins == ADD_VALUES) {
1255     for (i=0;i<nr;i++) {
1256       if (rows[i] >= 0 && nzc) {
1257         PetscInt  j;
1258         HYPRE_Int hnc = (HYPRE_Int)nzc;
1259 
1260         if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]);
1261         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1262         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr));
1263       }
1264       vals += nc;
1265     }
1266   } else { /* INSERT_VALUES */
1267     PetscInt rst,ren;
1268 
1269     ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1270     for (i=0;i<nr;i++) {
1271       if (rows[i] >= 0 && nzc) {
1272         PetscInt  j;
1273         HYPRE_Int hnc = (HYPRE_Int)nzc;
1274 
1275         if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]);
1276         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1277         /* nonlocal values */
1278         if (rows[i] < rst || rows[i] >= ren) { ierr = MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],sscr,PETSC_FALSE);CHKERRQ(ierr); }
1279         /* local values */
1280         else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr));
1281       }
1282       vals += nc;
1283     }
1284   }
1285 
1286   ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr);
1287   PetscFunctionReturn(0);
1288 }
1289 
1290 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1291 {
1292   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1293   HYPRE_Int      *hdnnz,*honnz;
1294   PetscInt       i,rs,re,cs,ce,bs;
1295   PetscMPIInt    size;
1296   PetscErrorCode ierr;
1297 
1298   PetscFunctionBegin;
1299   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1300   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1301   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1302   rs   = A->rmap->rstart;
1303   re   = A->rmap->rend;
1304   cs   = A->cmap->rstart;
1305   ce   = A->cmap->rend;
1306   if (!hA->ij) {
1307     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1308     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1309   } else {
1310     HYPRE_BigInt hrs,hre,hcs,hce;
1311     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1312     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%D)",hrs,hre+1,rs,re);
1313     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%D)",hcs,hce+1,cs,ce);
1314   }
1315   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1316   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;
1317 
1318   if (!dnnz) {
1319     ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr);
1320     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1321   } else {
1322     hdnnz = (HYPRE_Int*)dnnz;
1323   }
1324   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1325   if (size > 1) {
1326     hypre_AuxParCSRMatrix *aux_matrix;
1327     if (!onnz) {
1328       ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr);
1329       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1330     } else {
1331       honnz = (HYPRE_Int*)onnz;
1332     }
1333     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1334        they assume the user will input the entire row values, properly sorted
1335        In PETSc, we don't make such an assumption, and we instead set this flag to 1
1336        Also, to avoid possible memory leaks, we destroy and recreate the translator
1337        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1338        the IJ matrix for us */
1339     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1340     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1341     hypre_IJMatrixTranslator(hA->ij) = NULL;
1342     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1343     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1344     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
1345   } else {
1346     honnz = NULL;
1347     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1348   }
1349 
1350   /* reset assembled flag and call the initialize method */
1351   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1352   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1353   if (!dnnz) {
1354     ierr = PetscFree(hdnnz);CHKERRQ(ierr);
1355   }
1356   if (!onnz && honnz) {
1357     ierr = PetscFree(honnz);CHKERRQ(ierr);
1358   }
1359 
1360   /* Match AIJ logic */
1361   A->preallocated = PETSC_TRUE;
1362   A->assembled    = PETSC_FALSE;
1363   PetscFunctionReturn(0);
1364 }
1365 
1366 /*@C
1367    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1368 
1369    Collective on Mat
1370 
1371    Input Parameters:
1372 +  A - the matrix
1373 .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1374           (same value is used for all local rows)
1375 .  dnnz - array containing the number of nonzeros in the various rows of the
1376           DIAGONAL portion of the local submatrix (possibly different for each row)
1377           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1378           The size of this array is equal to the number of local rows, i.e 'm'.
1379           For matrices that will be factored, you must leave room for (and set)
1380           the diagonal entry even if it is zero.
1381 .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1382           submatrix (same value is used for all local rows).
1383 -  onnz - array containing the number of nonzeros in the various rows of the
1384           OFF-DIAGONAL portion of the local submatrix (possibly different for
1385           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1386           structure. The size of this array is equal to the number
1387           of local rows, i.e 'm'.
1388 
1389    Notes:
1390     If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.
1391 
1392    Level: intermediate
1393 
1394 .keywords: matrix, aij, compressed row, sparse, parallel
1395 
1396 .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE
1397 @*/
1398 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1399 {
1400   PetscErrorCode ierr;
1401 
1402   PetscFunctionBegin;
1403   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1404   PetscValidType(A,1);
1405   ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr);
1406   PetscFunctionReturn(0);
1407 }
1408 
1409 /*
1410    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix
1411 
1412    Collective
1413 
1414    Input Parameters:
1415 +  parcsr   - the pointer to the hypre_ParCSRMatrix
1416 .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1417 -  copymode - PETSc copying options
1418 
1419    Output Parameter:
1420 .  A  - the matrix
1421 
1422    Level: intermediate
1423 
1424 .seealso: MatHYPRE, PetscCopyMode
1425 */
1426 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1427 {
1428   Mat                   T;
1429   Mat_HYPRE             *hA;
1430   MPI_Comm              comm;
1431   PetscInt              rstart,rend,cstart,cend,M,N;
1432   PetscBool             isseqaij,ismpiaij,isaij,ishyp,isis;
1433   PetscErrorCode        ierr;
1434 
1435   PetscFunctionBegin;
1436   comm   = hypre_ParCSRMatrixComm(parcsr);
1437   ierr   = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
1438   ierr   = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
1439   ierr   = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr);
1440   ierr   = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr);
1441   ierr   = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr);
1442   isaij  = (PetscBool)(isseqaij || ismpiaij || isaij);
1443   if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
1444   /* access ParCSRMatrix */
1445   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1446   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1447   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1448   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1449   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1450   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1451 
1452   /* fix for empty local rows/columns */
1453   if (rend < rstart) rend = rstart;
1454   if (cend < cstart) cend = cstart;
1455 
1456   /* PETSc convention */
1457   rend++;
1458   cend++;
1459   rend = PetscMin(rend,M);
1460   cend = PetscMin(cend,N);
1461 
1462   /* create PETSc matrix with MatHYPRE */
1463   ierr = MatCreate(comm,&T);CHKERRQ(ierr);
1464   ierr = MatSetSizes(T,rend-rstart,cend-cstart,M,N);CHKERRQ(ierr);
1465   ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr);
1466   hA   = (Mat_HYPRE*)(T->data);
1467 
1468   /* create HYPRE_IJMatrix */
1469   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1470   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1471 
1472   /* create new ParCSR object if needed */
1473   if (ishyp && copymode == PETSC_COPY_VALUES) {
1474     hypre_ParCSRMatrix *new_parcsr;
1475     hypre_CSRMatrix    *hdiag,*hoffd,*ndiag,*noffd;
1476 
1477     new_parcsr = hypre_ParCSRMatrixCompleteClone(parcsr);
1478     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1479     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1480     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1481     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1482     ierr       = PetscMemcpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag)*sizeof(PetscScalar));CHKERRQ(ierr);
1483     ierr       = PetscMemcpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd)*sizeof(PetscScalar));CHKERRQ(ierr);
1484     parcsr     = new_parcsr;
1485     copymode   = PETSC_OWN_POINTER;
1486   }
1487 
1488   /* set ParCSR object */
1489   hypre_IJMatrixObject(hA->ij) = parcsr;
1490   T->preallocated = PETSC_TRUE;
1491 
1492   /* set assembled flag */
1493   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1494   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1495   if (ishyp) {
1496     PetscMPIInt myid = 0;
1497 
1498     /* make sure we always have row_starts and col_starts available */
1499     if (HYPRE_AssumedPartitionCheck()) {
1500       ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
1501     }
1502     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1503       PetscLayout map;
1504 
1505       ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr);
1506       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1507       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1508     }
1509     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1510       PetscLayout map;
1511 
1512       ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr);
1513       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1514       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1515     }
1516     /* prevent from freeing the pointer */
1517     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1518     *A   = T;
1519     ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1520     ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1521   } else if (isaij) {
1522     if (copymode != PETSC_OWN_POINTER) {
1523       /* prevent from freeing the pointer */
1524       hA->inner_free = PETSC_FALSE;
1525       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1526       ierr = MatDestroy(&T);CHKERRQ(ierr);
1527     } else { /* AIJ return type with PETSC_OWN_POINTER */
1528       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr);
1529       *A   = T;
1530     }
1531   } else if (isis) {
1532     ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1533     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1534     ierr = MatDestroy(&T);CHKERRQ(ierr);
1535   }
1536   PetscFunctionReturn(0);
1537 }
1538 
1539 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1540 {
1541   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1542   HYPRE_Int type;
1543 
1544   PetscFunctionBegin;
1545   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1546   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1547   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1548   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1549   PetscFunctionReturn(0);
1550 }
1551 
1552 /*
1553    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1554 
1555    Not collective
1556 
1557    Input Parameters:
1558 +  A  - the MATHYPRE object
1559 
1560    Output Parameter:
1561 .  parcsr  - the pointer to the hypre_ParCSRMatrix
1562 
1563    Level: intermediate
1564 
1565 .seealso: MatHYPRE, PetscCopyMode
1566 */
1567 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1568 {
1569   PetscErrorCode ierr;
1570 
1571   PetscFunctionBegin;
1572   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1573   PetscValidType(A,1);
1574   ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr);
1575   PetscFunctionReturn(0);
1576 }
1577 
1578 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1579 {
1580   hypre_ParCSRMatrix *parcsr;
1581   hypre_CSRMatrix    *ha;
1582   PetscInt           rst;
1583   PetscErrorCode     ierr;
1584 
1585   PetscFunctionBegin;
1586   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks");
1587   ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr);
1588   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1589   if (missing) *missing = PETSC_FALSE;
1590   if (dd) *dd = -1;
1591   ha = hypre_ParCSRMatrixDiag(parcsr);
1592   if (ha) {
1593     PetscInt  size,i;
1594     HYPRE_Int *ii,*jj;
1595 
1596     size = hypre_CSRMatrixNumRows(ha);
1597     ii   = hypre_CSRMatrixI(ha);
1598     jj   = hypre_CSRMatrixJ(ha);
1599     for (i = 0; i < size; i++) {
1600       PetscInt  j;
1601       PetscBool found = PETSC_FALSE;
1602 
1603       for (j = ii[i]; j < ii[i+1] && !found; j++)
1604         found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;
1605 
1606       if (!found) {
1607         PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i);
1608         if (missing) *missing = PETSC_TRUE;
1609         if (dd) *dd = i+rst;
1610         PetscFunctionReturn(0);
1611       }
1612     }
1613     if (!size) {
1614       PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1615       if (missing) *missing = PETSC_TRUE;
1616       if (dd) *dd = rst;
1617     }
1618   } else {
1619     PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1620     if (missing) *missing = PETSC_TRUE;
1621     if (dd) *dd = rst;
1622   }
1623   PetscFunctionReturn(0);
1624 }
1625 
1626 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1627 {
1628   hypre_ParCSRMatrix *parcsr;
1629   hypre_CSRMatrix    *ha;
1630   PetscErrorCode     ierr;
1631 
1632   PetscFunctionBegin;
1633   ierr  = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1634   /* diagonal part */
1635   ha = hypre_ParCSRMatrixDiag(parcsr);
1636   if (ha) {
1637     PetscInt    size,i;
1638     HYPRE_Int   *ii;
1639     PetscScalar *a;
1640 
1641     size = hypre_CSRMatrixNumRows(ha);
1642     a    = hypre_CSRMatrixData(ha);
1643     ii   = hypre_CSRMatrixI(ha);
1644     for (i = 0; i < ii[size]; i++) a[i] *= s;
1645   }
1646   /* offdiagonal part */
1647   ha = hypre_ParCSRMatrixOffd(parcsr);
1648   if (ha) {
1649     PetscInt    size,i;
1650     HYPRE_Int   *ii;
1651     PetscScalar *a;
1652 
1653     size = hypre_CSRMatrixNumRows(ha);
1654     a    = hypre_CSRMatrixData(ha);
1655     ii   = hypre_CSRMatrixI(ha);
1656     for (i = 0; i < ii[size]; i++) a[i] *= s;
1657   }
1658   PetscFunctionReturn(0);
1659 }
1660 
1661 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1662 {
1663   hypre_ParCSRMatrix *parcsr;
1664   HYPRE_Int          *lrows;
1665   PetscInt           rst,ren,i;
1666   PetscErrorCode     ierr;
1667 
1668   PetscFunctionBegin;
1669   if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
1670   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1671   ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr);
1672   ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1673   for (i=0;i<numRows;i++) {
1674     if (rows[i] < rst || rows[i] >= ren)
1675       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported");
1676     lrows[i] = rows[i] - rst;
1677   }
1678   PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows));
1679   ierr = PetscFree(lrows);CHKERRQ(ierr);
1680   PetscFunctionReturn(0);
1681 }
1682 
1683 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1684 {
1685   PetscErrorCode      ierr;
1686 
1687   PetscFunctionBegin;
1688   if (ha) {
1689     HYPRE_Int     *ii, size;
1690     HYPRE_Complex *a;
1691 
1692     size = hypre_CSRMatrixNumRows(ha);
1693     a    = hypre_CSRMatrixData(ha);
1694     ii   = hypre_CSRMatrixI(ha);
1695 
1696     if (a) { ierr = PetscMemzero(a,(ii[size])*sizeof(HYPRE_Complex));CHKERRQ(ierr); }
1697   }
1698   PetscFunctionReturn(0);
1699 }
1700 
1701 PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1702 {
1703   hypre_ParCSRMatrix  *parcsr;
1704   PetscErrorCode      ierr;
1705 
1706   PetscFunctionBegin;
1707   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1708   /* diagonal part */
1709   ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));CHKERRQ(ierr);
1710   /* off-diagonal part */
1711   ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));CHKERRQ(ierr);
1712   PetscFunctionReturn(0);
1713 }
1714 
1715 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],PetscScalar diag)
1716 {
1717   PetscInt        ii, jj, ibeg, iend, irow;
1718   PetscInt        *i, *j;
1719   PetscScalar     *a;
1720 
1721   PetscFunctionBegin;
1722   if (!hA) PetscFunctionReturn(0);
1723 
1724   i = (PetscInt*) hypre_CSRMatrixI(hA);
1725   j = (PetscInt*) hypre_CSRMatrixJ(hA);
1726   a = hypre_CSRMatrixData(hA);
1727 
1728   for (ii = 0; ii < N; ii++) {
1729     irow = rows[ii];
1730     ibeg = i[irow];
1731     iend = i[irow+1];
1732     for (jj = ibeg; jj < iend; jj++)
1733       if (j[jj] == irow) a[jj] = diag;
1734       else a[jj] = 0.0;
1735    }
1736    PetscFunctionReturn(0);
1737 }
1738 
1739 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1740 {
1741   hypre_ParCSRMatrix  *parcsr;
1742   PetscInt            *lrows,len;
1743   PetscErrorCode      ierr;
1744 
1745   PetscFunctionBegin;
1746   if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
1747   /* retrieve the internal matrix */
1748   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1749   /* get locally owned rows */
1750   ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr);
1751   /* zero diagonal part */
1752   ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,diag);CHKERRQ(ierr);
1753   /* zero off-diagonal part */
1754   ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);CHKERRQ(ierr);
1755 
1756   ierr = PetscFree(lrows);CHKERRQ(ierr);
1757   PetscFunctionReturn(0);
1758 }
1759 
1760 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode)
1761 {
1762   PetscErrorCode ierr;
1763 
1764   PetscFunctionBegin;
1765   if (mat->nooffprocentries) PetscFunctionReturn(0);
1766 
1767   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
1768   PetscFunctionReturn(0);
1769 }
1770 
1771 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1772 {
1773   hypre_ParCSRMatrix  *parcsr;
1774   HYPRE_Int           hnz;
1775   PetscErrorCode      ierr;
1776 
1777   PetscFunctionBegin;
1778   /* retrieve the internal matrix */
1779   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1780   /* call HYPRE API */
1781   PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,v));
1782   if (nz) *nz = (PetscInt)hnz;
1783   PetscFunctionReturn(0);
1784 }
1785 
1786 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1787 {
1788   hypre_ParCSRMatrix  *parcsr;
1789   HYPRE_Int           hnz;
1790   PetscErrorCode      ierr;
1791 
1792   PetscFunctionBegin;
1793   /* retrieve the internal matrix */
1794   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1795   /* call HYPRE API */
1796   hnz = nz ? (HYPRE_Int)(*nz) : 0;
1797   PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,v));
1798   PetscFunctionReturn(0);
1799 }
1800 
1801 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1802 {
1803   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1804   PetscInt  i;
1805 
1806   PetscFunctionBegin;
1807   if (!m || !n) PetscFunctionReturn(0);
1808   /* Ignore negative row indices
1809    * And negative column indices should be automatically ignored in hypre
1810    * */
1811   for (i=0; i<m; i++) {
1812     if (idxm[i] >= 0) {
1813       HYPRE_Int hn = (HYPRE_Int)n;
1814       PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,&hn,(HYPRE_BigInt*)&idxm[i],(HYPRE_BigInt*)idxn,&v[i*n]));
1815     }
1816   }
1817   PetscFunctionReturn(0);
1818 }
1819 
1820 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg)
1821 {
1822   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1823 
1824   PetscFunctionBegin;
1825   switch (op) {
1826   case MAT_NO_OFF_PROC_ENTRIES:
1827     if (flg) {
1828       PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0));
1829     }
1830     break;
1831   default:
1832     break;
1833   }
1834   PetscFunctionReturn(0);
1835 }
1836 
1837 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
1838 {
1839   hypre_ParCSRMatrix *parcsr;
1840   PetscErrorCode     ierr;
1841   Mat                B;
1842   PetscViewerFormat  format;
1843   PetscErrorCode     (*mview)(Mat,PetscViewer) = NULL;
1844 
1845   PetscFunctionBegin;
1846   ierr = PetscViewerGetFormat(view,&format);CHKERRQ(ierr);
1847   if (format != PETSC_VIEWER_NATIVE) {
1848     ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1849     ierr = MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);CHKERRQ(ierr);
1850     ierr = MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);CHKERRQ(ierr);
1851     if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");CHKERRQ(ierr);
1852     ierr = (*mview)(B,view);CHKERRQ(ierr);
1853     ierr = MatDestroy(&B);CHKERRQ(ierr);
1854   } else {
1855     Mat_HYPRE  *hA = (Mat_HYPRE*)A->data;
1856     PetscMPIInt size;
1857     PetscBool   isascii;
1858     const char *filename;
1859 
1860     /* HYPRE uses only text files */
1861     ierr = PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr);
1862     if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name);
1863     ierr = PetscViewerFileGetName(view,&filename);CHKERRQ(ierr);
1864     PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename));
1865     ierr = MPI_Comm_size(hA->comm,&size);CHKERRQ(ierr);
1866     if (size > 1) {
1867       ierr = PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);CHKERRQ(ierr);
1868     } else {
1869       ierr = PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);CHKERRQ(ierr);
1870     }
1871   }
1872   PetscFunctionReturn(0);
1873 }
1874 
1875 static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B)
1876 {
1877   hypre_ParCSRMatrix *parcsr;
1878   PetscErrorCode     ierr;
1879   PetscCopyMode      cpmode;
1880 
1881   PetscFunctionBegin;
1882   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1883   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
1884     parcsr = hypre_ParCSRMatrixCompleteClone(parcsr);
1885     cpmode = PETSC_OWN_POINTER;
1886   } else {
1887     cpmode = PETSC_COPY_VALUES;
1888   }
1889   ierr = MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);CHKERRQ(ierr);
1890   PetscFunctionReturn(0);
1891 }
1892 
1893 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
1894 {
1895   hypre_ParCSRMatrix *acsr,*bcsr;
1896   PetscErrorCode     ierr;
1897 
1898   PetscFunctionBegin;
1899   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
1900     ierr = MatHYPREGetParCSR_HYPRE(A,&acsr);CHKERRQ(ierr);
1901     ierr = MatHYPREGetParCSR_HYPRE(B,&bcsr);CHKERRQ(ierr);
1902     PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1));
1903     ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1904     ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1905   } else {
1906     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1907   }
1908   PetscFunctionReturn(0);
1909 }
1910 
1911 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
1912 {
1913   hypre_ParCSRMatrix *parcsr;
1914   hypre_CSRMatrix    *dmat;
1915   PetscScalar        *a,*data = NULL;
1916   HYPRE_Int          *diag = NULL;
1917   PetscInt           i;
1918   PetscBool          cong;
1919   PetscErrorCode     ierr;
1920 
1921   PetscFunctionBegin;
1922   ierr = MatHasCongruentLayouts(A,&cong);CHKERRQ(ierr);
1923   if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns");
1924 #if defined(PETSC_USE_DEBUG)
1925   {
1926     PetscBool miss;
1927     ierr = MatMissingDiagonal(A,&miss,NULL);CHKERRQ(ierr);
1928     if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing");
1929   }
1930 #endif
1931   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1932   dmat = hypre_ParCSRMatrixDiag(parcsr);
1933   if (dmat) {
1934     ierr = VecGetArray(d,&a);CHKERRQ(ierr);
1935     diag = hypre_CSRMatrixI(dmat);
1936     data = (PetscScalar*)(hypre_CSRMatrixData(dmat));
1937     for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]];
1938     ierr = VecRestoreArray(d,&a);CHKERRQ(ierr);
1939   }
1940   PetscFunctionReturn(0);
1941 }
1942 
1943 #include <petscblaslapack.h>
1944 
1945 static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str)
1946 {
1947   PetscErrorCode ierr;
1948 
1949   PetscFunctionBegin;
1950   if (str == SAME_NONZERO_PATTERN) {
1951     hypre_ParCSRMatrix *x,*y;
1952     hypre_CSRMatrix    *xloc,*yloc;
1953     PetscInt           xnnz,ynnz;
1954     PetscScalar        *xarr,*yarr;
1955     PetscBLASInt       one=1,bnz;
1956 
1957     ierr = MatHYPREGetParCSR(Y,&y);CHKERRQ(ierr);
1958     ierr = MatHYPREGetParCSR(X,&x);CHKERRQ(ierr);
1959 
1960     /* diagonal block */
1961     xloc = hypre_ParCSRMatrixDiag(x);
1962     yloc = hypre_ParCSRMatrixDiag(y);
1963     xnnz = 0;
1964     ynnz = 0;
1965     xarr = NULL;
1966     yarr = NULL;
1967     if (xloc) {
1968       xarr = (PetscScalar*)(hypre_CSRMatrixData(xloc));
1969       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
1970     }
1971     if (yloc) {
1972       yarr = (PetscScalar*)(hypre_CSRMatrixData(yloc));
1973       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
1974     }
1975     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz);
1976     ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr);
1977     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,xarr,&one,yarr,&one));
1978 
1979     /* off-diagonal block */
1980     xloc = hypre_ParCSRMatrixOffd(x);
1981     yloc = hypre_ParCSRMatrixOffd(y);
1982     xnnz = 0;
1983     ynnz = 0;
1984     xarr = NULL;
1985     yarr = NULL;
1986     if (xloc) {
1987       xarr = (PetscScalar*)(hypre_CSRMatrixData(xloc));
1988       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
1989     }
1990     if (yloc) {
1991       yarr = (PetscScalar*)(hypre_CSRMatrixData(yloc));
1992       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
1993     }
1994     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz);
1995     ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr);
1996     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,xarr,&one,yarr,&one));
1997   } else if (str == SUBSET_NONZERO_PATTERN) {
1998     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
1999   } else {
2000     Mat B;
2001 
2002     ierr = MatAXPY_Basic_Preallocate(Y,X,&B);CHKERRQ(ierr);
2003     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
2004     ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr);
2005   }
2006   PetscFunctionReturn(0);
2007 }
2008 
2009 /*MC
2010    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2011           based on the hypre IJ interface.
2012 
2013    Level: intermediate
2014 
2015 .seealso: MatCreate()
2016 M*/
2017 
2018 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2019 {
2020   Mat_HYPRE      *hB;
2021   PetscErrorCode ierr;
2022 
2023   PetscFunctionBegin;
2024   ierr           = PetscNewLog(B,&hB);CHKERRQ(ierr);
2025   hB->inner_free = PETSC_TRUE;
2026   hB->available  = PETSC_TRUE;
2027   hB->size       = 0;
2028   hB->array      = NULL;
2029 
2030   B->data       = (void*)hB;
2031   B->rmap->bs   = 1;
2032   B->assembled  = PETSC_FALSE;
2033 
2034   ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
2035   B->ops->mult             = MatMult_HYPRE;
2036   B->ops->multtranspose    = MatMultTranspose_HYPRE;
2037   B->ops->multadd          = MatMultAdd_HYPRE;
2038   B->ops->multtransposeadd = MatMultTransposeAdd_HYPRE;
2039   B->ops->setup            = MatSetUp_HYPRE;
2040   B->ops->destroy          = MatDestroy_HYPRE;
2041   B->ops->assemblyend      = MatAssemblyEnd_HYPRE;
2042   B->ops->assemblybegin    = MatAssemblyBegin_HYPRE;
2043   B->ops->ptap             = MatPtAP_HYPRE_HYPRE;
2044   B->ops->matmult          = MatMatMult_HYPRE_HYPRE;
2045   B->ops->setvalues        = MatSetValues_HYPRE;
2046   B->ops->missingdiagonal  = MatMissingDiagonal_HYPRE;
2047   B->ops->scale            = MatScale_HYPRE;
2048   B->ops->zerorowscolumns  = MatZeroRowsColumns_HYPRE;
2049   B->ops->zeroentries      = MatZeroEntries_HYPRE;
2050   B->ops->zerorows         = MatZeroRows_HYPRE;
2051   B->ops->getrow           = MatGetRow_HYPRE;
2052   B->ops->restorerow       = MatRestoreRow_HYPRE;
2053   B->ops->getvalues        = MatGetValues_HYPRE;
2054   B->ops->setoption        = MatSetOption_HYPRE;
2055   B->ops->duplicate        = MatDuplicate_HYPRE;
2056   B->ops->copy             = MatCopy_HYPRE;
2057   B->ops->view             = MatView_HYPRE;
2058   B->ops->getdiagonal      = MatGetDiagonal_HYPRE;
2059   B->ops->axpy             = MatAXPY_HYPRE;
2060 
2061   /* build cache for off array entries formed */
2062   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
2063 
2064   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr);
2065   ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr);
2066   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr);
2067   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr);
2068   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
2069   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
2070   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr);
2071   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr);
2072   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_is_hypre_C",MatPtAP_IS_XAIJ);CHKERRQ(ierr);
2073   PetscFunctionReturn(0);
2074 }
2075 
2076 static PetscErrorCode hypre_array_destroy(void *ptr)
2077 {
2078    PetscFunctionBegin;
2079    hypre_TFree(ptr,HYPRE_MEMORY_HOST);
2080    PetscFunctionReturn(0);
2081 }
2082