xref: /petsc/src/mat/impls/hypre/mhypre.c (revision 7ea3e4caddcdc8968ce8b58701132bea2835aba8)
1 
2 /*
3     Creates hypre ijmatrix from PETSc matrix
4 */
5 
6 /*MC
7    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
8           based on the hypre IJ interface.
9 
10    Level: intermediate
11 
12 .seealso: MatCreate()
13 M*/
14 
15 #include <petscmathypre.h>
16 #include <petsc/private/matimpl.h>
17 #include <../src/mat/impls/hypre/mhypre.h>
18 #include <../src/mat/impls/aij/mpi/mpiaij.h>
19 #include <../src/vec/vec/impls/hypre/vhyp.h>
20 #include <HYPRE.h>
21 #include <HYPRE_utilities.h>
22 #include <_hypre_parcsr_ls.h>
23 
24 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
25 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
26 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
27 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
28 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool);
29 static PetscErrorCode hypre_array_destroy(void*);
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "MatHYPRE_IJMatrixPreallocate"
33 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
34 {
35   PetscErrorCode ierr;
36   PetscInt       i,n_d,n_o;
37   const PetscInt *ia_d,*ia_o;
38   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
39   PetscInt       *nnz_d=NULL,*nnz_o=NULL;
40 
41   PetscFunctionBegin;
42   if (A_d) { /* determine number of nonzero entries in local diagonal part */
43     ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr);
44     if (done_d) {
45       ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr);
46       for (i=0; i<n_d; i++) {
47         nnz_d[i] = ia_d[i+1] - ia_d[i];
48       }
49     }
50     ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr);
51   }
52   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
53     ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
54     if (done_o) {
55       ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr);
56       for (i=0; i<n_o; i++) {
57         nnz_o[i] = ia_o[i+1] - ia_o[i];
58       }
59     }
60     ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
61   }
62   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
63     if (!done_o) { /* only diagonal part */
64       ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr);
65       for (i=0; i<n_d; i++) {
66         nnz_o[i] = 0;
67       }
68     }
69     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o));
70     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
71     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
72   }
73   PetscFunctionReturn(0);
74 }
75 
76 #undef __FUNCT__
77 #define __FUNCT__ "MatHYPRE_CreateFromMat"
78 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
79 {
80   PetscErrorCode ierr;
81   PetscInt       rstart,rend,cstart,cend;
82 
83   PetscFunctionBegin;
84   ierr   = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
85   ierr   = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
86   rstart = A->rmap->rstart;
87   rend   = A->rmap->rend;
88   cstart = A->cmap->rstart;
89   cend   = A->cmap->rend;
90   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
91   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
92   {
93     PetscBool      same;
94     Mat            A_d,A_o;
95     const PetscInt *colmap;
96     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr);
97     if (same) {
98       ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
99       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
100       PetscFunctionReturn(0);
101     }
102     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr);
103     if (same) {
104       ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
105       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
106       PetscFunctionReturn(0);
107     }
108     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr);
109     if (same) {
110       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
111       PetscFunctionReturn(0);
112     }
113     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr);
114     if (same) {
115       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
116       PetscFunctionReturn(0);
117     }
118   }
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "MatHYPRE_IJMatrixCopy"
124 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
125 {
126   PetscErrorCode    ierr;
127   PetscInt          i,rstart,rend,ncols,nr,nc;
128   const PetscScalar *values;
129   const PetscInt    *cols;
130   PetscBool         flg;
131 
132   PetscFunctionBegin;
133   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
134   ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr);
135   if (flg && nr == nc) {
136     ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr);
137     PetscFunctionReturn(0);
138   }
139   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
140   if (flg) {
141     ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr);
142     PetscFunctionReturn(0);
143   }
144 
145   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
146   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
147   for (i=rstart; i<rend; i++) {
148     ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
149     PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values));
150     ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
151   }
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNCT__
156 #define __FUNCT__ "MatHYPRE_IJMatrixFastCopy_SeqAIJ"
157 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
158 {
159   PetscErrorCode        ierr;
160   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
161   HYPRE_Int             type;
162   hypre_ParCSRMatrix    *par_matrix;
163   hypre_AuxParCSRMatrix *aux_matrix;
164   hypre_CSRMatrix       *hdiag;
165 
166   PetscFunctionBegin;
167   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
168   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
169   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
170   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
171   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
172   /*
173        this is the Hack part where we monkey directly with the hypre datastructures
174   */
175   ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));
176   ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));
177   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
178 
179   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
180   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
181   PetscFunctionReturn(0);
182 }
183 
184 #undef __FUNCT__
185 #define __FUNCT__ "MatHYPRE_IJMatrixFastCopy_MPIAIJ"
186 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
187 {
188   PetscErrorCode        ierr;
189   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
190   Mat_SeqAIJ            *pdiag,*poffd;
191   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
192   HYPRE_Int             type;
193   hypre_ParCSRMatrix    *par_matrix;
194   hypre_AuxParCSRMatrix *aux_matrix;
195   hypre_CSRMatrix       *hdiag,*hoffd;
196 
197   PetscFunctionBegin;
198   pdiag = (Mat_SeqAIJ*) pA->A->data;
199   poffd = (Mat_SeqAIJ*) pA->B->data;
200   /* cstart is only valid for square MPIAIJ layed out in the usual way */
201   ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr);
202 
203   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
204   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
205   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
206   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
207   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
208   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
209 
210   /*
211        this is the Hack part where we monkey directly with the hypre datastructures
212   */
213   ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
214   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
215   jj  = (PetscInt*)hdiag->j;
216   pjj = (PetscInt*)pdiag->j;
217   for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i];
218   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
219   ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
220   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
221      If we hacked a hypre a bit more we might be able to avoid this step */
222   jj  = (PetscInt*) hoffd->j;
223   pjj = (PetscInt*) poffd->j;
224   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
225   ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));
226 
227   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
228   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
229   PetscFunctionReturn(0);
230 }
231 
232 #undef __FUNCT__
233 #define __FUNCT__ "MatConvert_HYPRE_IS"
234 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
235 {
236   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
237   Mat                    lA;
238   ISLocalToGlobalMapping rl2g,cl2g;
239   IS                     is;
240   hypre_ParCSRMatrix     *hA;
241   hypre_CSRMatrix        *hdiag,*hoffd;
242   MPI_Comm               comm;
243   PetscScalar            *hdd,*hod,*aa,*data;
244   HYPRE_Int              *col_map_offd,*hdi,*hdj,*hoi,*hoj;
245   PetscInt               *ii,*jj,*iptr,*jptr;
246   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
247   HYPRE_Int              type;
248   PetscErrorCode         ierr;
249 
250   PetscFunctionBegin;
251   comm = PetscObjectComm((PetscObject)A);
252   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
253   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
254   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
255   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
256   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
257   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
258   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
259   hdiag = hypre_ParCSRMatrixDiag(hA);
260   hoffd = hypre_ParCSRMatrixOffd(hA);
261   dr    = hypre_CSRMatrixNumRows(hdiag);
262   dc    = hypre_CSRMatrixNumCols(hdiag);
263   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
264   hdi   = hypre_CSRMatrixI(hdiag);
265   hdj   = hypre_CSRMatrixJ(hdiag);
266   hdd   = hypre_CSRMatrixData(hdiag);
267   oc    = hypre_CSRMatrixNumCols(hoffd);
268   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
269   hoi   = hypre_CSRMatrixI(hoffd);
270   hoj   = hypre_CSRMatrixJ(hoffd);
271   hod   = hypre_CSRMatrixData(hoffd);
272   if (reuse != MAT_REUSE_MATRIX) {
273     PetscInt *aux;
274 
275     /* generate l2g maps for rows and cols */
276     ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr);
277     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
278     ierr = ISDestroy(&is);CHKERRQ(ierr);
279     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
280     ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr);
281     for (i=0; i<dc; i++) aux[i] = i+stc;
282     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
283     ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
284     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
285     ierr = ISDestroy(&is);CHKERRQ(ierr);
286     /* create MATIS object */
287     ierr = MatCreate(comm,B);CHKERRQ(ierr);
288     ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr);
289     ierr = MatSetType(*B,MATIS);CHKERRQ(ierr);
290     ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr);
291     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
292     ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
293 
294     /* allocate CSR for local matrix */
295     ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr);
296     ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr);
297     ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr);
298   } else {
299     PetscInt  nr;
300     PetscBool done;
301     ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr);
302     ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr);
303     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
304     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
305     ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr);
306   }
307   /* merge local matrices */
308   ii   = iptr;
309   jj   = jptr;
310   aa   = data;
311   *ii  = *(hdi++) + *(hoi++);
312   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
313     PetscScalar *aold = aa;
314     PetscInt    *jold = jj,nc = jd+jo;
315     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
316     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
317     *(++ii) = *(hdi++) + *(hoi++);
318     ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr);
319   }
320   for (; cum<dr; cum++) *(++ii) = nnz;
321   if (reuse != MAT_REUSE_MATRIX) {
322     Mat_SeqAIJ* a;
323 
324     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr);
325     ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr);
326     /* hack SeqAIJ */
327     a          = (Mat_SeqAIJ*)(lA->data);
328     a->free_a  = PETSC_TRUE;
329     a->free_ij = PETSC_TRUE;
330     ierr = MatDestroy(&lA);CHKERRQ(ierr);
331   }
332   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
333   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
334   if (reuse == MAT_INPLACE_MATRIX) {
335     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
336   }
337   PetscFunctionReturn(0);
338 }
339 
340 #undef __FUNCT__
341 #define __FUNCT__ "MatConvert_AIJ_HYPRE"
342 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
343 {
344   Mat_HYPRE      *hB;
345   MPI_Comm       comm = PetscObjectComm((PetscObject)A);
346   PetscErrorCode ierr;
347 
348   PetscFunctionBegin;
349   if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_INPLACE_MATRIX");
350   if (reuse == MAT_REUSE_MATRIX) {
351     /* always destroy the old matrix and create a new memory;
352        hope this does not churn the memory too much. The problem
353        is I do not know if it is possible to put the matrix back to
354        its initial state so that we can directly copy the values
355        the second time through. */
356     hB = (Mat_HYPRE*)((*B)->data);
357     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
358   } else {
359     ierr = MatCreate(comm,B);CHKERRQ(ierr);
360     ierr = MatSetType(*B,MATHYPRE);CHKERRQ(ierr);
361     ierr = MatSetSizes(*B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
362     ierr = MatSetUp(*B);CHKERRQ(ierr);
363     hB   = (Mat_HYPRE*)((*B)->data);
364   }
365   ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr);
366   ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr);
367   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
368   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
369   PetscFunctionReturn(0);
370 }
371 
372 #undef __FUNCT__
373 #define __FUNCT__ "MatConvert_HYPRE_AIJ"
374 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
375 {
376   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
377   hypre_ParCSRMatrix *parcsr;
378   hypre_CSRMatrix    *hdiag,*hoffd;
379   MPI_Comm           comm;
380   PetscScalar        *da,*oa,*aptr;
381   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
382   PetscInt           i,dnnz,onnz,m,n;
383   HYPRE_Int          type;
384   PetscMPIInt        size;
385   PetscErrorCode     ierr;
386 
387   PetscFunctionBegin;
388   comm = PetscObjectComm((PetscObject)A);
389   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
390   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
391   if (reuse == MAT_REUSE_MATRIX) {
392     PetscBool ismpiaij,isseqaij;
393     ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
394     ierr = PetscObjectTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
395     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
396   }
397   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
398 
399   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
400   hdiag = hypre_ParCSRMatrixDiag(parcsr);
401   hoffd = hypre_ParCSRMatrixOffd(parcsr);
402   m     = hypre_CSRMatrixNumRows(hdiag);
403   n     = hypre_CSRMatrixNumCols(hdiag);
404   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
405   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
406   if (reuse == MAT_INITIAL_MATRIX) {
407     ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
408     ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
409     ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr);
410   } else if (reuse == MAT_REUSE_MATRIX) {
411     PetscInt  nr;
412     PetscBool done;
413     if (size > 1) {
414       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
415 
416       ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
417       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
418       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
419       ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr);
420     } else {
421       ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
422       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
423       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
424       ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr);
425     }
426   } else { /* MAT_INPLACE_MATRIX */
427     dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
428     djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
429     da  = hypre_CSRMatrixData(hdiag);
430   }
431   ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
432   ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr);
433   ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr);
434   iptr = djj;
435   aptr = da;
436   for (i=0; i<m; i++) {
437     PetscInt nc = dii[i+1]-dii[i];
438     ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
439     iptr += nc;
440     aptr += nc;
441   }
442   if (size > 1) {
443     HYPRE_Int *offdj,*coffd;
444 
445     if (reuse == MAT_INITIAL_MATRIX) {
446       ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
447       ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
448       ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr);
449     } else if (reuse == MAT_REUSE_MATRIX) {
450       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
451       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
452       PetscBool  done;
453 
454       ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr);
455       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
456       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
457       ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr);
458     } else { /* MAT_INPLACE_MATRIX */
459       oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
460       ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
461       oa  = hypre_CSRMatrixData(hoffd);
462     }
463     ierr  = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
464     offdj = hypre_CSRMatrixJ(hoffd);
465     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
466     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
467     ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr);
468     iptr = ojj;
469     aptr = oa;
470     for (i=0; i<m; i++) {
471        PetscInt nc = oii[i+1]-oii[i];
472        ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
473        iptr += nc;
474        aptr += nc;
475     }
476     if (reuse == MAT_INITIAL_MATRIX) {
477       Mat_MPIAIJ *b;
478       Mat_SeqAIJ *d,*o;
479 
480       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr);
481       /* hack MPIAIJ */
482       b          = (Mat_MPIAIJ*)((*B)->data);
483       d          = (Mat_SeqAIJ*)b->A->data;
484       o          = (Mat_SeqAIJ*)b->B->data;
485       d->free_a  = PETSC_TRUE;
486       d->free_ij = PETSC_TRUE;
487       o->free_a  = PETSC_TRUE;
488       o->free_ij = PETSC_TRUE;
489     } else if (reuse == MAT_INPLACE_MATRIX) {
490       Mat T;
491       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr);
492       hypre_CSRMatrixI(hdiag)    = NULL;
493       hypre_CSRMatrixJ(hdiag)    = NULL;
494       hypre_CSRMatrixData(hdiag) = NULL;
495       hypre_CSRMatrixI(hoffd)    = NULL;
496       hypre_CSRMatrixJ(hoffd)    = NULL;
497       hypre_CSRMatrixData(hoffd) = NULL;
498       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
499     }
500   } else {
501     oii  = NULL;
502     ojj  = NULL;
503     oa   = NULL;
504     if (reuse == MAT_INITIAL_MATRIX) {
505       Mat_SeqAIJ* b;
506       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr);
507       /* hack SeqAIJ */
508       b          = (Mat_SeqAIJ*)((*B)->data);
509       b->free_a  = PETSC_TRUE;
510       b->free_ij = PETSC_TRUE;
511     } else if (reuse == MAT_INPLACE_MATRIX) {
512       Mat T;
513       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr);
514       hypre_CSRMatrixI(hdiag)    = NULL;
515       hypre_CSRMatrixJ(hdiag)    = NULL;
516       hypre_CSRMatrixData(hdiag) = NULL;
517       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
518     }
519   }
520 
521   /* we have to use hypre_Tfree to free the arrays */
522   if (reuse == MAT_INPLACE_MATRIX) {
523     void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
524     const char *names[6] = {"_hypre_csr_dii",
525                             "_hypre_csr_djj",
526                             "_hypre_csr_da",
527                             "_hypre_csr_oii",
528                             "_hypre_csr_ojj",
529                             "_hypre_csr_oa"};
530     for (i=0; i<6; i++) {
531       PetscContainer c;
532 
533       ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr);
534       ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr);
535       ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr);
536       ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr);
537       ierr = PetscContainerDestroy(&c);CHKERRQ(ierr);
538     }
539   }
540   PetscFunctionReturn(0);
541 }
542 
543 #undef __FUNCT__
544 #define __FUNCT__ "MatPtAP_AIJ_HYPRE"
545 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
546 {
547   Mat_HYPRE          *hP = (Mat_HYPRE*)P->data;
548   hypre_ParCSRMatrix *tA,*Pparcsr,*ptapparcsr;
549   hypre_CSRMatrix    *hdiag,*hoffd;
550   Mat_SeqAIJ         *diag,*offd;
551   PetscInt           *garray,noffd,dnnz,onnz,*row_starts,*col_starts;
552   HYPRE_Int          type,P_owns_col_starts;
553   PetscBool          ismpiaij,isseqaij;
554   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
555   char               mtype[256];
556   PetscErrorCode     ierr;
557 
558   PetscFunctionBegin;
559   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
560   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
561   if (scall == MAT_REUSE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_REUSE_MATRIX");
562   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
563   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
564   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type);
565 
566   /* It looks like we don't need to have the diagonal entries
567      ordered first in the rows of the diagonal part
568      for boomerAMGBuildCoarseOperator to work */
569   if (ismpiaij) {
570     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);
571 
572     diag   = (Mat_SeqAIJ*)a->A->data;
573     offd   = (Mat_SeqAIJ*)a->B->data;
574     garray = a->garray;
575     noffd  = a->B->cmap->N;
576     dnnz   = diag->nz;
577     onnz   = offd->nz;
578   } else {
579     diag    = (Mat_SeqAIJ*)A->data;
580     offd    = NULL;
581     garray  = NULL;
582     noffd   = 0;
583     dnnz    = diag->nz;
584     onnz    = 0;
585   }
586 
587   /* create a temporary ParCSR */
588   if (HYPRE_AssumedPartitionCheck()) {
589    PetscMPIInt myid;
590 
591    ierr       = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
592    row_starts = A->rmap->range + myid;
593    col_starts = A->cmap->range + myid;
594   } else {
595    row_starts = A->rmap->range;
596    col_starts = A->cmap->range;
597   }
598   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz);
599   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
600   hypre_ParCSRMatrixSetColStartsOwner(tA,0);
601 
602   /* set diagonal part */
603   hdiag = hypre_ParCSRMatrixDiag(tA);
604   hypre_CSRMatrixI(hdiag)           = (HYPRE_Int*)diag->i;
605   hypre_CSRMatrixJ(hdiag)           = (HYPRE_Int*)diag->j;
606   hypre_CSRMatrixData(hdiag)        = diag->a;
607   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
608   hypre_CSRMatrixSetRownnz(hdiag);
609   hypre_CSRMatrixSetDataOwner(hdiag,0);
610 
611   /* set offdiagonal part */
612   hoffd = hypre_ParCSRMatrixOffd(tA);
613   if (offd) {
614     hypre_CSRMatrixI(hoffd)           = (HYPRE_Int*)offd->i;
615     hypre_CSRMatrixJ(hoffd)           = (HYPRE_Int*)offd->j;
616     hypre_CSRMatrixData(hoffd)        = offd->a;
617     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
618     hypre_CSRMatrixSetRownnz(hoffd);
619     hypre_CSRMatrixSetDataOwner(hoffd,0);
620     hypre_ParCSRMatrixSetNumNonzeros(tA);
621     hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray;
622   }
623 
624   /* call RAP from BoomerAMG */
625   /* Warning: hypre_BoomerAMGBuildCoarseOperator steals the col_starts
626      from Pparcsr (even if it does not own them)! */
627   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
628   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(Pparcsr);
629   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
630   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(Pparcsr,tA,Pparcsr,&ptapparcsr));
631   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
632   hypre_ParCSRMatrixSetRowStartsOwner(ptapparcsr,0);
633   hypre_ParCSRMatrixSetColStartsOwner(ptapparcsr,0);
634   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(Pparcsr, 1);
635 
636   /* set pointers to NULL before destroying tA */
637   hypre_CSRMatrixI(hdiag)          = NULL;
638   hypre_CSRMatrixJ(hdiag)          = NULL;
639   hypre_CSRMatrixData(hdiag)       = NULL;
640   hypre_CSRMatrixI(hoffd)          = NULL;
641   hypre_CSRMatrixJ(hoffd)          = NULL;
642   hypre_CSRMatrixData(hoffd)       = NULL;
643   hypre_ParCSRMatrixColMapOffd(tA) = NULL;
644   hypre_ParCSRMatrixDestroy(tA);
645 
646   /* create C depending on mtype */
647   sprintf(mtype,MATAIJ);
648   ierr = PetscOptionsGetString(((PetscObject)A)->options,((PetscObject)A)->prefix,"-matptap_hypre_outtype",mtype,256,NULL);CHKERRQ(ierr);
649   ierr = MatCreateFromParCSR(ptapparcsr,mtype,PETSC_OWN_POINTER,C);CHKERRQ(ierr);
650   PetscFunctionReturn(0);
651 }
652 
653 #undef __FUNCT__
654 #define __FUNCT__ "MatPtAP_HYPRE_HYPRE"
655 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
656 {
657   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
658   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data, *hP = (Mat_HYPRE*)P->data;
659   HYPRE_Int          type,P_owns_col_starts;
660   PetscErrorCode     ierr;
661 
662   PetscFunctionBegin;
663   if (scall == MAT_REUSE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Unsupported MAT_REUSE_MATRIX");
664   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
665   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
666   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
667   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
668   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
669   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
670 
671   /* call RAP from BoomerAMG */
672   /* Warning: hypre_BoomerAMGBuildCoarseOperator steals the col_starts
673      from Pparcsr (even if it does not own them)! */
674   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(Pparcsr);
675   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
676   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr));
677   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
678   hypre_ParCSRMatrixSetRowStartsOwner(ptapparcsr,0);
679   hypre_ParCSRMatrixSetColStartsOwner(ptapparcsr,0);
680   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(Pparcsr, 1);
681 
682   /* create MatHYPRE */
683   ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,C);CHKERRQ(ierr);
684   PetscFunctionReturn(0);
685 }
686 
687 #undef __FUNCT__
688 #define __FUNCT__ "MatMultTranspose_HYPRE"
689 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
690 {
691   PetscErrorCode ierr;
692 
693   PetscFunctionBegin;
694   ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);CHKERRQ(ierr);
695   PetscFunctionReturn(0);
696 }
697 
698 #undef __FUNCT__
699 #define __FUNCT__ "MatMult_HYPRE"
700 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
701 {
702   PetscErrorCode ierr;
703 
704   PetscFunctionBegin;
705   ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);CHKERRQ(ierr);
706   PetscFunctionReturn(0);
707 }
708 
709 #undef __FUNCT__
710 #define __FUNCT__ "MatHYPRE_MultKernel_Private"
711 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans)
712 {
713   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
714   hypre_ParCSRMatrix *parcsr;
715   hypre_ParVector    *hx,*hy;
716   PetscScalar        *ax,*ay,*sax,*say;
717   PetscErrorCode     ierr;
718 
719   PetscFunctionBegin;
720   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
721   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
722   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
723   ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
724   ierr = VecGetArray(y,&ay);CHKERRQ(ierr);
725   if (trans) {
726     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
727     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
728     hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx);
729     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
730     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
731   } else {
732     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
733     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
734     hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy);
735     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
736     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
737   }
738   ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
739   ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr);
740   PetscFunctionReturn(0);
741 }
742 
743 #undef __FUNCT__
744 #define __FUNCT__ "MatDestroy_HYPRE"
745 static PetscErrorCode MatDestroy_HYPRE(Mat A)
746 {
747   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
748   PetscErrorCode ierr;
749 
750   PetscFunctionBegin;
751   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
752   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
753   if (hA->ij) {
754     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
755     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
756   }
757   if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); }
758   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr);
759   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr);
760   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr);
761   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr);
762   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr);
763   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr);
764   ierr = PetscFree(A->data);CHKERRQ(ierr);
765   PetscFunctionReturn(0);
766 }
767 
768 #undef __FUNCT__
769 #define __FUNCT__ "MatSetUp_HYPRE"
770 static PetscErrorCode MatSetUp_HYPRE(Mat A)
771 {
772   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
773   Vec                x,b;
774   PetscErrorCode     ierr;
775 
776   PetscFunctionBegin;
777   if (!A->preallocated) {
778     ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr);
779   }
780   if (hA->x) PetscFunctionReturn(0);
781   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
782   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
783   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr);
784   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr);
785   ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr);
786   ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr);
787   ierr = VecDestroy(&x);CHKERRQ(ierr);
788   ierr = VecDestroy(&b);CHKERRQ(ierr);
789   PetscFunctionReturn(0);
790 }
791 
792 #undef __FUNCT__
793 #define __FUNCT__ "MatAssemblyEnd_HYPRE"
794 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
795 {
796   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
797   PetscErrorCode     ierr;
798 
799   PetscFunctionBegin;
800   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
801   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
802   PetscFunctionReturn(0);
803 }
804 
805 #define MATHYPRE_SCRATCH 2048
806 
807 #undef __FUNCT__
808 #define __FUNCT__ "MatSetValues_HYPRE"
809 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
810 {
811   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
812   PetscScalar        *vals = (PetscScalar *)v;
813   PetscScalar        sscr[MATHYPRE_SCRATCH];
814   HYPRE_Int          cscr[2][MATHYPRE_SCRATCH];
815   HYPRE_Int          i,nzc;
816   PetscErrorCode     ierr;
817 
818   PetscFunctionBegin;
819   for (i=0,nzc=0;i<nc;i++) {
820     if (cols[i] >= 0) {
821       cscr[0][nzc  ] = cols[i];
822       cscr[1][nzc++] = i;
823     }
824   }
825   if (!nzc) PetscFunctionReturn(0);
826 
827   if (ins == ADD_VALUES) {
828     for (i=0;i<nr;i++) {
829       if (rows[i] >= 0) {
830         PetscInt j;
831         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
832         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr));
833       }
834       vals += nc;
835     }
836   } else { /* INSERT_VALUES */
837 #if defined(PETSC_USE_DEBUG)
838     /* Insert values cannot be used to insert offproc entries */
839     PetscInt rst,ren;
840     ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
841     for (i=0;i<nr;i++)
842       if (rows[i] >= 0 && (rows[i] < rst || rows[i] >= ren)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use INSERT_VALUES for off-proc entries with MatHYPRE. Use ADD_VALUES instead");
843 #endif
844     for (i=0;i<nr;i++) {
845       if (rows[i] >= 0) {
846         PetscInt j;
847         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
848         PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr));
849       }
850       vals += nc;
851     }
852   }
853   PetscFunctionReturn(0);
854 }
855 
856 #undef __FUNCT__
857 #define __FUNCT__ "MatHYPRESetPreallocation_HYPRE"
858 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
859 {
860   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
861   HYPRE_Int          *hdnnz,*honnz;
862   PetscInt           i,rs,re,cs,ce,bs;
863   PetscMPIInt        size;
864   PetscErrorCode     ierr;
865 
866   PetscFunctionBegin;
867   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
868   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
869   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
870   rs   = A->rmap->rstart;
871   re   = A->rmap->rend;
872   cs   = A->cmap->rstart;
873   ce   = A->cmap->rend;
874   if (!hA->ij) {
875     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
876     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
877   } else {
878     HYPRE_Int hrs,hre,hcs,hce;
879     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
880     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re);
881     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce);
882   }
883   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
884 
885   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
886   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;
887 
888   if (!dnnz) {
889     ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr);
890     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
891   } else {
892     hdnnz = (HYPRE_Int*)dnnz;
893   }
894   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
895   if (size > 1) {
896     if (!onnz) {
897       ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr);
898       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
899     } else {
900       honnz = (HYPRE_Int*)onnz;
901     }
902     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
903   } else {
904     honnz = NULL;
905     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
906   }
907   if (!dnnz) {
908     ierr = PetscFree(hdnnz);CHKERRQ(ierr);
909   }
910   if (!onnz && honnz) {
911     ierr = PetscFree(honnz);CHKERRQ(ierr);
912   }
913   A->preallocated = PETSC_TRUE;
914 
915   /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0 */
916   {
917     hypre_AuxParCSRMatrix *aux_matrix;
918     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
919     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
920   }
921 
922   ierr = MatSetUp(A);CHKERRQ(ierr);
923   PetscFunctionReturn(0);
924 }
925 
926 /*@C
927    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
928 
929    Collective on Mat
930 
931    Input Parameters:
932 +  A - the matrix
933 .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
934           (same value is used for all local rows)
935 .  dnnz - array containing the number of nonzeros in the various rows of the
936           DIAGONAL portion of the local submatrix (possibly different for each row)
937           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
938           The size of this array is equal to the number of local rows, i.e 'm'.
939           For matrices that will be factored, you must leave room for (and set)
940           the diagonal entry even if it is zero.
941 .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
942           submatrix (same value is used for all local rows).
943 -  onnz - array containing the number of nonzeros in the various rows of the
944           OFF-DIAGONAL portion of the local submatrix (possibly different for
945           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
946           structure. The size of this array is equal to the number
947           of local rows, i.e 'm'.
948 
949    Notes: If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.
950 
951    Level: intermediate
952 
953 .keywords: matrix, aij, compressed row, sparse, parallel
954 
955 .seealso: MatCreate(), MatMPIAIJSetPreallocation, MATHYPRE
956 @*/
957 #undef __FUNCT__
958 #define __FUNCT__ "MatHYPRESetPreallocation"
959 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
960 {
961   PetscErrorCode ierr;
962 
963   PetscFunctionBegin;
964   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
965   PetscValidType(A,1);
966   ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr);
967   PetscFunctionReturn(0);
968 }
969 
970 /*
971    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix
972 
973    Collective
974 
975    Input Parameters:
976 +  vparcsr  - the pointer to the hypre_ParCSRMatrix
977 .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
978 -  copymode - PETSc copying options
979 
980    Output Parameter:
981 .  A  - the matrix
982 
983    Level: intermediate
984 
985 .seealso: MatHYPRE, PetscCopyMode
986 */
987 #undef __FUNCT__
988 #define __FUNCT__ "MatCreateFromParCSR"
989 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *vparcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
990 {
991   Mat                   T;
992   Mat_HYPRE             *hA;
993   hypre_ParCSRMatrix    *parcsr;
994   MPI_Comm              comm;
995   PetscInt              rstart,rend,cstart,cend,M,N;
996   PetscBool             isseqaij,ismpiaij,isaij,ishyp,isis;
997   PetscErrorCode        ierr;
998 
999   PetscFunctionBegin;
1000   parcsr = (hypre_ParCSRMatrix *)vparcsr;
1001   comm   = hypre_ParCSRMatrixComm(parcsr);
1002   ierr   = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
1003   ierr   = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
1004   ierr   = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr);
1005   ierr   = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr);
1006   ierr   = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr);
1007   isaij  = (PetscBool)(isseqaij || ismpiaij || isaij);
1008   if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
1009   if (ishyp && copymode == PETSC_COPY_VALUES) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported copymode PETSC_COPY_VALUES");
1010 
1011   /* access ParCSRMatrix */
1012   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1013   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1014   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1015   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1016   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1017   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1018 
1019   /* create PETSc matrix with MatHYPRE */
1020   ierr = MatCreate(comm,&T);CHKERRQ(ierr);
1021   ierr = MatSetSizes(T,rend-rstart+1,cend-cstart+1,M,N);CHKERRQ(ierr);
1022   ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr);
1023   ierr = MatSetUp(T);CHKERRQ(ierr);
1024   hA   = (Mat_HYPRE*)(T->data);
1025 
1026   /* create HYPRE_IJMatrix */
1027   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1028 
1029   /* set ParCSR object */
1030   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1031   hypre_IJMatrixObject(hA->ij) = parcsr;
1032 
1033   /* set assembled flag */
1034   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1035   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1036   if (ishyp) {
1037     /* prevent from freeing the pointer */
1038     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1039     *A = T;
1040     ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1041     ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1042   } else if (isaij) {
1043     if (copymode != PETSC_OWN_POINTER) {
1044       /* prevent from freeing the pointer */
1045       hA->inner_free = PETSC_FALSE;
1046       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1047       ierr = MatDestroy(&T);CHKERRQ(ierr);
1048     } else { /* AIJ return type with PETSC_OWN_POINTER */
1049       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr);
1050       *A   = T;
1051     }
1052   } else if (isis) {
1053     ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1054     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1055     ierr = MatDestroy(&T);CHKERRQ(ierr);
1056   }
1057   PetscFunctionReturn(0);
1058 }
1059 
1060 #undef __FUNCT__
1061 #define __FUNCT__ "MatHYPREGetParCSR_HYPRE"
1062 PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1063 {
1064   Mat_HYPRE*            hA = (Mat_HYPRE*)A->data;
1065   HYPRE_Int             type;
1066   PetscErrorCode        ierr;
1067 
1068   PetscFunctionBegin;
1069   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1070   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1071   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1072   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1073   PetscFunctionReturn(0);
1074 }
1075 
1076 /*
1077    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1078 
1079    Not collective
1080 
1081    Input Parameters:
1082 +  A  - the MATHYPRE object
1083 
1084    Output Parameter:
1085 .  parcsr  - the pointer to the hypre_ParCSRMatrix
1086 
1087    Level: intermediate
1088 
1089 .seealso: MatHYPRE, PetscCopyMode
1090 */
1091 #undef __FUNCT__
1092 #define __FUNCT__ "MatHYPREGetParCSR"
1093 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1094 {
1095   PetscErrorCode ierr;
1096 
1097   PetscFunctionBegin;
1098   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1099   PetscValidType(A,1);
1100   ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr);
1101   PetscFunctionReturn(0);
1102 }
1103 
1104 #undef __FUNCT__
1105 #define __FUNCT__ "MatCreate_HYPRE"
1106 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
1107 {
1108   Mat_HYPRE      *hB;
1109   PetscErrorCode ierr;
1110 
1111   PetscFunctionBegin;
1112   ierr           = PetscNewLog(B,&hB);CHKERRQ(ierr);
1113   hB->inner_free = PETSC_TRUE;
1114 
1115   B->data       = (void*)hB;
1116   B->rmap->bs   = 1;
1117   B->assembled  = PETSC_FALSE;
1118 
1119   B->ops->mult          = MatMult_HYPRE;
1120   B->ops->multtranspose = MatMultTranspose_HYPRE;
1121   B->ops->setup         = MatSetUp_HYPRE;
1122   B->ops->destroy       = MatDestroy_HYPRE;
1123   B->ops->assemblyend   = MatAssemblyEnd_HYPRE;
1124   B->ops->ptap          = MatPtAP_HYPRE_HYPRE;
1125   B->ops->setvalues     = MatSetValues_HYPRE;
1126 
1127   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr);
1128   ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr);
1129   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr);
1130   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr);
1131   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
1132   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
1133   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr);
1134   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr);
1135   PetscFunctionReturn(0);
1136 }
1137 
1138 #undef __FUNCT__
1139 #define __FUNCT__ "hypre_array_destroy"
1140 static PetscErrorCode hypre_array_destroy(void *ptr)
1141 {
1142    PetscFunctionBegin;
1143    hypre_TFree(ptr);
1144    PetscFunctionReturn(0);
1145 }
1146 
1147 #if 0
1148 /*
1149     Does NOT copy the data over, instead uses DIRECTLY the pointers from the PETSc MPIAIJ format
1150 
1151     This is UNFINISHED and does NOT work! The problem is that hypre puts the diagonal entry first
1152     which will corrupt the PETSc data structure if we did this. Need a work around to this problem.
1153 */
1154 #include <_hypre_IJ_mv.h>
1155 #include <HYPRE_IJ_mv.h>
1156 
1157 #undef __FUNCT__
1158 #define __FUNCT__ "MatHYPRE_IJMatrixLink"
1159 PetscErrorCode MatHYPRE_IJMatrixLink(Mat A, HYPRE_IJMatrix *ij)
1160 {
1161   PetscErrorCode        ierr;
1162   PetscInt              rstart,rend,cstart,cend;
1163   PetscBool             flg;
1164   hypre_AuxParCSRMatrix *aux_matrix;
1165 
1166   PetscFunctionBegin;
1167   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1168   PetscValidType(A,1);
1169   PetscValidPointer(ij,2);
1170   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
1171   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Can only use with PETSc MPIAIJ matrices");
1172   ierr = MatSetUp(A);CHKERRQ(ierr);
1173 
1174   ierr   = PetscLogEventBegin(MAT_Convert,A,0,0,0);CHKERRQ(ierr);
1175   rstart = A->rmap->rstart;
1176   rend   = A->rmap->rend;
1177   cstart = A->cmap->rstart;
1178   cend   = A->cmap->rend;
1179   PetscStackCallStandard(HYPRE_IJMatrixCreate,(PetscObjectComm((PetscObject)A),rstart,rend-1,cstart,cend-1,ij));
1180   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(*ij,HYPRE_PARCSR));
1181 
1182   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(*ij));
1183   PetscStackCall("hypre_IJMatrixTranslator",aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(*ij));
1184 
1185   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
1186 
1187   /* this is the Hack part where we monkey directly with the hypre datastructures */
1188 
1189   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(*ij));
1190   ierr = PetscLogEventEnd(MAT_Convert,A,0,0,0);CHKERRQ(ierr);
1191   PetscFunctionReturn(0);
1192 }
1193 #endif
1194