xref: /petsc/src/mat/impls/hypre/mhypre.c (revision ddbeb582aecdc2b45035d3207f6e8998d8902f5d)
1 
2 /*
3     Creates hypre ijmatrix from PETSc matrix
4 */
5 
6 #include <petscmathypre.h>
7 #include <petsc/private/matimpl.h>
8 #include <../src/mat/impls/hypre/mhypre.h>
9 #include <../src/mat/impls/aij/mpi/mpiaij.h>
10 #include <../src/vec/vec/impls/hypre/vhyp.h>
11 #include <HYPRE.h>
12 #include <HYPRE_utilities.h>
13 #include <_hypre_parcsr_ls.h>
14 #include <_hypre_sstruct_ls.h>
15 
16 PETSC_INTERN PetscErrorCode MatPtAP_IS_XAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);
17 
18 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
19 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
20 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
21 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
22 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool);
23 static PetscErrorCode hypre_array_destroy(void*);
24 PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins);
25 
26 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
27 {
28   PetscErrorCode ierr;
29   PetscInt       i,n_d,n_o;
30   const PetscInt *ia_d,*ia_o;
31   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
32   PetscInt       *nnz_d=NULL,*nnz_o=NULL;
33 
34   PetscFunctionBegin;
35   if (A_d) { /* determine number of nonzero entries in local diagonal part */
36     ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr);
37     if (done_d) {
38       ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr);
39       for (i=0; i<n_d; i++) {
40         nnz_d[i] = ia_d[i+1] - ia_d[i];
41       }
42     }
43     ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr);
44   }
45   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
46     ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
47     if (done_o) {
48       ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr);
49       for (i=0; i<n_o; i++) {
50         nnz_o[i] = ia_o[i+1] - ia_o[i];
51       }
52     }
53     ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
54   }
55   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
56     if (!done_o) { /* only diagonal part */
57       ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr);
58       for (i=0; i<n_d; i++) {
59         nnz_o[i] = 0;
60       }
61     }
62     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o));
63     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
64     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
65   }
66   PetscFunctionReturn(0);
67 }
68 
69 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
70 {
71   PetscErrorCode ierr;
72   PetscInt       rstart,rend,cstart,cend;
73 
74   PetscFunctionBegin;
75   ierr   = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
76   ierr   = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
77   rstart = A->rmap->rstart;
78   rend   = A->rmap->rend;
79   cstart = A->cmap->rstart;
80   cend   = A->cmap->rend;
81   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
82   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
83   {
84     PetscBool      same;
85     Mat            A_d,A_o;
86     const PetscInt *colmap;
87     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr);
88     if (same) {
89       ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
90       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
91       PetscFunctionReturn(0);
92     }
93     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr);
94     if (same) {
95       ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
96       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
97       PetscFunctionReturn(0);
98     }
99     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr);
100     if (same) {
101       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
102       PetscFunctionReturn(0);
103     }
104     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr);
105     if (same) {
106       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
107       PetscFunctionReturn(0);
108     }
109   }
110   PetscFunctionReturn(0);
111 }
112 
113 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
114 {
115   PetscErrorCode    ierr;
116   PetscInt          i,rstart,rend,ncols,nr,nc;
117   const PetscScalar *values;
118   const PetscInt    *cols;
119   PetscBool         flg;
120 
121   PetscFunctionBegin;
122   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
123   ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr);
124   if (flg && nr == nc) {
125     ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr);
126     PetscFunctionReturn(0);
127   }
128   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
129   if (flg) {
130     ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr);
131     PetscFunctionReturn(0);
132   }
133 
134   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
135   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
136   for (i=rstart; i<rend; i++) {
137     ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
138     if (ncols) {
139       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values));
140     }
141     ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
142   }
143   PetscFunctionReturn(0);
144 }
145 
146 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
147 {
148   PetscErrorCode        ierr;
149   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
150   HYPRE_Int             type;
151   hypre_ParCSRMatrix    *par_matrix;
152   hypre_AuxParCSRMatrix *aux_matrix;
153   hypre_CSRMatrix       *hdiag;
154 
155   PetscFunctionBegin;
156   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
157   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
158   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
159   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
160   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
161   /*
162        this is the Hack part where we monkey directly with the hypre datastructures
163   */
164   ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));CHKERRQ(ierr);
165   ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));CHKERRQ(ierr);
166   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));CHKERRQ(ierr);
167 
168   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
169   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
170   PetscFunctionReturn(0);
171 }
172 
173 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
174 {
175   PetscErrorCode        ierr;
176   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
177   Mat_SeqAIJ            *pdiag,*poffd;
178   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
179   HYPRE_Int             type;
180   hypre_ParCSRMatrix    *par_matrix;
181   hypre_AuxParCSRMatrix *aux_matrix;
182   hypre_CSRMatrix       *hdiag,*hoffd;
183 
184   PetscFunctionBegin;
185   pdiag = (Mat_SeqAIJ*) pA->A->data;
186   poffd = (Mat_SeqAIJ*) pA->B->data;
187   /* cstart is only valid for square MPIAIJ layed out in the usual way */
188   ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr);
189 
190   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
191   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
192   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
193   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
194   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
195   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
196 
197   /*
198        this is the Hack part where we monkey directly with the hypre datastructures
199   */
200   ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
201   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
202   jj  = (PetscInt*)hdiag->j;
203   pjj = (PetscInt*)pdiag->j;
204   for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i];
205   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
206   ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
207   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
208      If we hacked a hypre a bit more we might be able to avoid this step */
209   jj  = (PetscInt*) hoffd->j;
210   pjj = (PetscInt*) poffd->j;
211   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
212   ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));
213 
214   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
215   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
216   PetscFunctionReturn(0);
217 }
218 
219 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
220 {
221   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
222   Mat                    lA;
223   ISLocalToGlobalMapping rl2g,cl2g;
224   IS                     is;
225   hypre_ParCSRMatrix     *hA;
226   hypre_CSRMatrix        *hdiag,*hoffd;
227   MPI_Comm               comm;
228   PetscScalar            *hdd,*hod,*aa,*data;
229   HYPRE_Int              *col_map_offd,*hdi,*hdj,*hoi,*hoj;
230   PetscInt               *ii,*jj,*iptr,*jptr;
231   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
232   HYPRE_Int              type;
233   PetscErrorCode         ierr;
234 
235   PetscFunctionBegin;
236   comm = PetscObjectComm((PetscObject)A);
237   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
238   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
239   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
240   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
241   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
242   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
243   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
244   hdiag = hypre_ParCSRMatrixDiag(hA);
245   hoffd = hypre_ParCSRMatrixOffd(hA);
246   dr    = hypre_CSRMatrixNumRows(hdiag);
247   dc    = hypre_CSRMatrixNumCols(hdiag);
248   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
249   hdi   = hypre_CSRMatrixI(hdiag);
250   hdj   = hypre_CSRMatrixJ(hdiag);
251   hdd   = hypre_CSRMatrixData(hdiag);
252   oc    = hypre_CSRMatrixNumCols(hoffd);
253   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
254   hoi   = hypre_CSRMatrixI(hoffd);
255   hoj   = hypre_CSRMatrixJ(hoffd);
256   hod   = hypre_CSRMatrixData(hoffd);
257   if (reuse != MAT_REUSE_MATRIX) {
258     PetscInt *aux;
259 
260     /* generate l2g maps for rows and cols */
261     ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr);
262     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
263     ierr = ISDestroy(&is);CHKERRQ(ierr);
264     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
265     ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr);
266     for (i=0; i<dc; i++) aux[i] = i+stc;
267     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
268     ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
269     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
270     ierr = ISDestroy(&is);CHKERRQ(ierr);
271     /* create MATIS object */
272     ierr = MatCreate(comm,B);CHKERRQ(ierr);
273     ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr);
274     ierr = MatSetType(*B,MATIS);CHKERRQ(ierr);
275     ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr);
276     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
277     ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
278 
279     /* allocate CSR for local matrix */
280     ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr);
281     ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr);
282     ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr);
283   } else {
284     PetscInt  nr;
285     PetscBool done;
286     ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr);
287     ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr);
288     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
289     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
290     ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr);
291   }
292   /* merge local matrices */
293   ii   = iptr;
294   jj   = jptr;
295   aa   = data;
296   *ii  = *(hdi++) + *(hoi++);
297   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
298     PetscScalar *aold = aa;
299     PetscInt    *jold = jj,nc = jd+jo;
300     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
301     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
302     *(++ii) = *(hdi++) + *(hoi++);
303     ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr);
304   }
305   for (; cum<dr; cum++) *(++ii) = nnz;
306   if (reuse != MAT_REUSE_MATRIX) {
307     Mat_SeqAIJ* a;
308 
309     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr);
310     ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr);
311     /* hack SeqAIJ */
312     a          = (Mat_SeqAIJ*)(lA->data);
313     a->free_a  = PETSC_TRUE;
314     a->free_ij = PETSC_TRUE;
315     ierr = MatDestroy(&lA);CHKERRQ(ierr);
316   }
317   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
318   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
319   if (reuse == MAT_INPLACE_MATRIX) {
320     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
321   }
322   PetscFunctionReturn(0);
323 }
324 
325 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
326 {
327   Mat_HYPRE      *hB;
328   MPI_Comm       comm = PetscObjectComm((PetscObject)A);
329   PetscErrorCode ierr;
330 
331   PetscFunctionBegin;
332   if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_INPLACE_MATRIX");
333   if (reuse == MAT_REUSE_MATRIX) {
334     /* always destroy the old matrix and create a new memory;
335        hope this does not churn the memory too much. The problem
336        is I do not know if it is possible to put the matrix back to
337        its initial state so that we can directly copy the values
338        the second time through. */
339     hB = (Mat_HYPRE*)((*B)->data);
340     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
341   } else {
342     ierr = MatCreate(comm,B);CHKERRQ(ierr);
343     ierr = MatSetType(*B,MATHYPRE);CHKERRQ(ierr);
344     ierr = MatSetSizes(*B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
345     hB   = (Mat_HYPRE*)((*B)->data);
346   }
347   ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr);
348   ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr);
349   (*B)->preallocated = PETSC_TRUE;
350   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
351   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
352   PetscFunctionReturn(0);
353 }
354 
355 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
356 {
357   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
358   hypre_ParCSRMatrix *parcsr;
359   hypre_CSRMatrix    *hdiag,*hoffd;
360   MPI_Comm           comm;
361   PetscScalar        *da,*oa,*aptr;
362   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
363   PetscInt           i,dnnz,onnz,m,n;
364   HYPRE_Int          type;
365   PetscMPIInt        size;
366   PetscErrorCode     ierr;
367 
368   PetscFunctionBegin;
369   comm = PetscObjectComm((PetscObject)A);
370   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
371   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
372   if (reuse == MAT_REUSE_MATRIX) {
373     PetscBool ismpiaij,isseqaij;
374     ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
375     ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
376     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
377   }
378   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
379 
380   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
381   hdiag = hypre_ParCSRMatrixDiag(parcsr);
382   hoffd = hypre_ParCSRMatrixOffd(parcsr);
383   m     = hypre_CSRMatrixNumRows(hdiag);
384   n     = hypre_CSRMatrixNumCols(hdiag);
385   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
386   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
387   if (reuse == MAT_INITIAL_MATRIX) {
388     ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
389     ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
390     ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr);
391   } else if (reuse == MAT_REUSE_MATRIX) {
392     PetscInt  nr;
393     PetscBool done;
394     if (size > 1) {
395       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
396 
397       ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
398       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
399       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
400       ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr);
401     } else {
402       ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
403       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
404       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
405       ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr);
406     }
407   } else { /* MAT_INPLACE_MATRIX */
408     dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
409     djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
410     da  = hypre_CSRMatrixData(hdiag);
411   }
412   ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
413   ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr);
414   ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr);
415   iptr = djj;
416   aptr = da;
417   for (i=0; i<m; i++) {
418     PetscInt nc = dii[i+1]-dii[i];
419     ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
420     iptr += nc;
421     aptr += nc;
422   }
423   if (size > 1) {
424     HYPRE_Int *offdj,*coffd;
425 
426     if (reuse == MAT_INITIAL_MATRIX) {
427       ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
428       ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
429       ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr);
430     } else if (reuse == MAT_REUSE_MATRIX) {
431       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
432       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
433       PetscBool  done;
434 
435       ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr);
436       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
437       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
438       ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr);
439     } else { /* MAT_INPLACE_MATRIX */
440       oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
441       ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
442       oa  = hypre_CSRMatrixData(hoffd);
443     }
444     ierr  = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
445     offdj = hypre_CSRMatrixJ(hoffd);
446     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
447     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
448     ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr);
449     iptr = ojj;
450     aptr = oa;
451     for (i=0; i<m; i++) {
452        PetscInt nc = oii[i+1]-oii[i];
453        ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
454        iptr += nc;
455        aptr += nc;
456     }
457     if (reuse == MAT_INITIAL_MATRIX) {
458       Mat_MPIAIJ *b;
459       Mat_SeqAIJ *d,*o;
460 
461       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr);
462       /* hack MPIAIJ */
463       b          = (Mat_MPIAIJ*)((*B)->data);
464       d          = (Mat_SeqAIJ*)b->A->data;
465       o          = (Mat_SeqAIJ*)b->B->data;
466       d->free_a  = PETSC_TRUE;
467       d->free_ij = PETSC_TRUE;
468       o->free_a  = PETSC_TRUE;
469       o->free_ij = PETSC_TRUE;
470     } else if (reuse == MAT_INPLACE_MATRIX) {
471       Mat T;
472       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr);
473       hypre_CSRMatrixI(hdiag)    = NULL;
474       hypre_CSRMatrixJ(hdiag)    = NULL;
475       hypre_CSRMatrixData(hdiag) = NULL;
476       hypre_CSRMatrixI(hoffd)    = NULL;
477       hypre_CSRMatrixJ(hoffd)    = NULL;
478       hypre_CSRMatrixData(hoffd) = NULL;
479       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
480     }
481   } else {
482     oii  = NULL;
483     ojj  = NULL;
484     oa   = NULL;
485     if (reuse == MAT_INITIAL_MATRIX) {
486       Mat_SeqAIJ* b;
487       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr);
488       /* hack SeqAIJ */
489       b          = (Mat_SeqAIJ*)((*B)->data);
490       b->free_a  = PETSC_TRUE;
491       b->free_ij = PETSC_TRUE;
492     } else if (reuse == MAT_INPLACE_MATRIX) {
493       Mat T;
494       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr);
495       hypre_CSRMatrixI(hdiag)    = NULL;
496       hypre_CSRMatrixJ(hdiag)    = NULL;
497       hypre_CSRMatrixData(hdiag) = NULL;
498       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
499     }
500   }
501 
502   /* we have to use hypre_Tfree to free the arrays */
503   if (reuse == MAT_INPLACE_MATRIX) {
504     void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
505     const char *names[6] = {"_hypre_csr_dii",
506                             "_hypre_csr_djj",
507                             "_hypre_csr_da",
508                             "_hypre_csr_oii",
509                             "_hypre_csr_ojj",
510                             "_hypre_csr_oa"};
511     for (i=0; i<6; i++) {
512       PetscContainer c;
513 
514       ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr);
515       ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr);
516       ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr);
517       ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr);
518       ierr = PetscContainerDestroy(&c);CHKERRQ(ierr);
519     }
520   }
521   PetscFunctionReturn(0);
522 }
523 
524 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
525 {
526   hypre_ParCSRMatrix *tA;
527   hypre_CSRMatrix    *hdiag,*hoffd;
528   Mat_SeqAIJ         *diag,*offd;
529   PetscInt           *garray,noffd,dnnz,onnz,*row_starts,*col_starts;
530   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
531   PetscBool          ismpiaij,isseqaij;
532   PetscErrorCode     ierr;
533 
534   PetscFunctionBegin;
535   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
536   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
537   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type);
538   if (ismpiaij) {
539     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);
540 
541     diag   = (Mat_SeqAIJ*)a->A->data;
542     offd   = (Mat_SeqAIJ*)a->B->data;
543     garray = a->garray;
544     noffd  = a->B->cmap->N;
545     dnnz   = diag->nz;
546     onnz   = offd->nz;
547   } else {
548     diag   = (Mat_SeqAIJ*)A->data;
549     offd   = NULL;
550     garray = NULL;
551     noffd  = 0;
552     dnnz   = diag->nz;
553     onnz   = 0;
554   }
555 
556   /* create a temporary ParCSR */
557   if (HYPRE_AssumedPartitionCheck()) {
558     PetscMPIInt myid;
559 
560     ierr       = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
561     row_starts = A->rmap->range + myid;
562     col_starts = A->cmap->range + myid;
563   } else {
564     row_starts = A->rmap->range;
565     col_starts = A->cmap->range;
566   }
567   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz);
568   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
569   hypre_ParCSRMatrixSetColStartsOwner(tA,0);
570 
571   /* set diagonal part */
572   hdiag = hypre_ParCSRMatrixDiag(tA);
573   hypre_CSRMatrixI(hdiag)           = (HYPRE_Int*)diag->i;
574   hypre_CSRMatrixJ(hdiag)           = (HYPRE_Int*)diag->j;
575   hypre_CSRMatrixData(hdiag)        = diag->a;
576   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
577   hypre_CSRMatrixSetRownnz(hdiag);
578   hypre_CSRMatrixSetDataOwner(hdiag,0);
579 
580   /* set offdiagonal part */
581   hoffd = hypre_ParCSRMatrixOffd(tA);
582   if (offd) {
583     hypre_CSRMatrixI(hoffd)           = (HYPRE_Int*)offd->i;
584     hypre_CSRMatrixJ(hoffd)           = (HYPRE_Int*)offd->j;
585     hypre_CSRMatrixData(hoffd)        = offd->a;
586     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
587     hypre_CSRMatrixSetRownnz(hoffd);
588     hypre_CSRMatrixSetDataOwner(hoffd,0);
589     hypre_ParCSRMatrixSetNumNonzeros(tA);
590     hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray;
591   }
592   *hA = tA;
593   PetscFunctionReturn(0);
594 }
595 
596 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
597 {
598   hypre_CSRMatrix    *hdiag,*hoffd;
599 
600   PetscFunctionBegin;
601   hdiag = hypre_ParCSRMatrixDiag(*hA);
602   hoffd = hypre_ParCSRMatrixOffd(*hA);
603   /* set pointers to NULL before destroying tA */
604   hypre_CSRMatrixI(hdiag)           = NULL;
605   hypre_CSRMatrixJ(hdiag)           = NULL;
606   hypre_CSRMatrixData(hdiag)        = NULL;
607   hypre_CSRMatrixI(hoffd)           = NULL;
608   hypre_CSRMatrixJ(hoffd)           = NULL;
609   hypre_CSRMatrixData(hoffd)        = NULL;
610   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
611   hypre_ParCSRMatrixDestroy(*hA);
612   *hA = NULL;
613   PetscFunctionReturn(0);
614 }
615 
616 /* calls RAP from BoomerAMG:
617    the resulting ParCSR will not own the column and row starts
618    It looks like we don't need to have the diagonal entries
619    ordered first in the rows of the diagonal part
620    for boomerAMGBuildCoarseOperator to work */
621 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
622 {
623   HYPRE_Int P_owns_col_starts,R_owns_row_starts;
624 
625   PetscFunctionBegin;
626   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
627   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
628   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
629   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
630   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
631   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
632   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
633   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
634   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
635   PetscFunctionReturn(0);
636 }
637 
638 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
639 {
640   Mat                B;
641   hypre_ParCSRMatrix *hA,*hP,*hPtAP;
642   PetscErrorCode     ierr;
643 
644   PetscFunctionBegin;
645   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
646   ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr);
647   ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr);
648   ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
649   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
650   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
651   ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr);
652   PetscFunctionReturn(0);
653 }
654 
655 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C)
656 {
657   PetscErrorCode ierr;
658 
659   PetscFunctionBegin;
660   ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
661   ierr                   = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
662   (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
663   PetscFunctionReturn(0);
664 }
665 
666 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
667 {
668   Mat                B;
669   Mat_HYPRE          *hP;
670   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr;
671   HYPRE_Int          type;
672   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
673   PetscBool          ishypre;
674   PetscErrorCode     ierr;
675 
676   PetscFunctionBegin;
677   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
678   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
679   hP = (Mat_HYPRE*)P->data;
680   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
681   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
682   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
683 
684   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
685   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
686   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
687 
688   /* create temporary matrix and merge to C */
689   ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
690   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
691   PetscFunctionReturn(0);
692 }
693 
694 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
695 {
696   PetscErrorCode ierr;
697 
698   PetscFunctionBegin;
699   if (scall == MAT_INITIAL_MATRIX) {
700     const char *deft = MATAIJ;
701     char       type[256];
702     PetscBool  flg;
703 
704     ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr);
705     ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr);
706     ierr = PetscOptionsEnd();CHKERRQ(ierr);
707     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
708     ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
709     if (flg) {
710       ierr = MatSetType(*C,type);CHKERRQ(ierr);
711     } else {
712       ierr = MatSetType(*C,deft);CHKERRQ(ierr);
713     }
714     (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE;
715     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
716   }
717   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
718   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
719   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
720   PetscFunctionReturn(0);
721 }
722 
723 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
724 {
725   Mat                B;
726   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
727   Mat_HYPRE          *hA,*hP;
728   PetscBool          ishypre;
729   HYPRE_Int          type;
730   PetscErrorCode     ierr;
731 
732   PetscFunctionBegin;
733   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
734   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
735   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
736   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
737   hA = (Mat_HYPRE*)A->data;
738   hP = (Mat_HYPRE*)P->data;
739   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
740   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
741   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
742   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
743   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
744   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
745   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
746   ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
747   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
748   PetscFunctionReturn(0);
749 }
750 
751 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
752 {
753   PetscErrorCode ierr;
754 
755   PetscFunctionBegin;
756   if (scall == MAT_INITIAL_MATRIX) {
757     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
758     ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
759     ierr                   = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
760     (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE;
761     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
762   }
763   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
764   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
765   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
766   PetscFunctionReturn(0);
767 }
768 
769 /* calls hypre_ParMatmul
770    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
771    hypre_ParMatrixCreate does not duplicate the communicator
772    It looks like we don't need to have the diagonal entries
773    ordered first in the rows of the diagonal part
774    for boomerAMGBuildCoarseOperator to work */
775 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
776 {
777   PetscFunctionBegin;
778   PetscStackPush("hypre_ParMatmul");
779   *hAB = hypre_ParMatmul(hA,hB);
780   PetscStackPop;
781   PetscFunctionReturn(0);
782 }
783 
784 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
785 {
786   Mat                D;
787   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
788   PetscErrorCode     ierr;
789 
790   PetscFunctionBegin;
791   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
792   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
793   ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr);
794   ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
795   ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr);
796   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
797   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
798   PetscFunctionReturn(0);
799 }
800 
801 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C)
802 {
803   PetscErrorCode ierr;
804 
805   PetscFunctionBegin;
806   ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
807   ierr                      = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
808   (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
809   PetscFunctionReturn(0);
810 }
811 
812 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
813 {
814   Mat                D;
815   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
816   Mat_HYPRE          *hA,*hB;
817   PetscBool          ishypre;
818   HYPRE_Int          type;
819   PetscErrorCode     ierr;
820 
821   PetscFunctionBegin;
822   ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr);
823   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
824   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
825   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
826   hA = (Mat_HYPRE*)A->data;
827   hB = (Mat_HYPRE*)B->data;
828   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
829   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
830   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
831   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
832   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
833   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
834   ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr);
835   ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
836   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
837   ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr);
838   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
839   PetscFunctionReturn(0);
840 }
841 
842 static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
843 {
844   PetscErrorCode ierr;
845 
846   PetscFunctionBegin;
847   if (scall == MAT_INITIAL_MATRIX) {
848     ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
849     ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
850     ierr                      = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
851     (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
852     ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
853   }
854   ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
855   ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr);
856   ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
857   PetscFunctionReturn(0);
858 }
859 
860 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
861 {
862   Mat                E;
863   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC;
864   PetscErrorCode     ierr;
865 
866   PetscFunctionBegin;
867   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
868   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
869   ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr);
870   ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr);
871   ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr);
872   ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr);
873   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
874   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
875   ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr);
876   PetscFunctionReturn(0);
877 }
878 
879 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D)
880 {
881   PetscErrorCode ierr;
882 
883   PetscFunctionBegin;
884   ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr);
885   ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr);
886   PetscFunctionReturn(0);
887 }
888 
889 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
890 {
891   PetscErrorCode ierr;
892 
893   PetscFunctionBegin;
894   ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);CHKERRQ(ierr);
895   PetscFunctionReturn(0);
896 }
897 
898 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
899 {
900   PetscErrorCode ierr;
901 
902   PetscFunctionBegin;
903   ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);CHKERRQ(ierr);
904   PetscFunctionReturn(0);
905 }
906 
907 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans)
908 {
909   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
910   hypre_ParCSRMatrix *parcsr;
911   hypre_ParVector    *hx,*hy;
912   PetscScalar        *ax,*ay,*sax,*say;
913   PetscErrorCode     ierr;
914 
915   PetscFunctionBegin;
916   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
917   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
918   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
919   ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
920   ierr = VecGetArray(y,&ay);CHKERRQ(ierr);
921   if (trans) {
922     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
923     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
924     hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx);
925     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
926     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
927   } else {
928     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
929     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
930     hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy);
931     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
932     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
933   }
934   ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
935   ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr);
936   PetscFunctionReturn(0);
937 }
938 
939 static PetscErrorCode MatDestroy_HYPRE(Mat A)
940 {
941   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
942   PetscErrorCode ierr;
943 
944   PetscFunctionBegin;
945   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
946   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
947   if (hA->ij) {
948     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
949     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
950   }
951   if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); }
952 
953   ierr = MatStashDestroy_Private(&A->stash);CHKERRQ(ierr);
954 
955   ierr = PetscFree(hA->array);CHKERRQ(ierr);
956 
957   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr);
958   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr);
959   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr);
960   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr);
961   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr);
962   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr);
963   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_is_hypre_C",NULL);CHKERRQ(ierr);
964   ierr = PetscFree(A->data);CHKERRQ(ierr);
965   PetscFunctionReturn(0);
966 }
967 
968 static PetscErrorCode MatSetUp_HYPRE(Mat A)
969 {
970   PetscErrorCode ierr;
971 
972   PetscFunctionBegin;
973   ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr);
974   PetscFunctionReturn(0);
975 }
976 
977 
978 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
979 {
980   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
981   Vec                x,b;
982   PetscMPIInt        n;
983   PetscInt           i,j,rstart,ncols,flg;
984   PetscInt           *row,*col;
985   PetscScalar        *val;
986   PetscErrorCode     ierr;
987 
988   PetscFunctionBegin;
989   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
990 
991   if (!A->nooffprocentries) {
992     while (1) {
993       ierr = MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
994       if (!flg) break;
995 
996       for (i=0; i<n; ) {
997         /* Now identify the consecutive vals belonging to the same row */
998         for (j=i,rstart=row[j]; j<n; j++) {
999           if (row[j] != rstart) break;
1000         }
1001         if (j < n) ncols = j-i;
1002         else       ncols = n-i;
1003         /* Now assemble all these values with a single function call */
1004         ierr = MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);CHKERRQ(ierr);
1005 
1006         i = j;
1007       }
1008     }
1009     ierr = MatStashScatterEnd_Private(&A->stash);CHKERRQ(ierr);
1010   }
1011 
1012   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
1013   if (hA->x) PetscFunctionReturn(0);
1014   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1015   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1016   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr);
1017   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr);
1018   ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr);
1019   ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr);
1020   ierr = VecDestroy(&x);CHKERRQ(ierr);
1021   ierr = VecDestroy(&b);CHKERRQ(ierr);
1022   PetscFunctionReturn(0);
1023 }
1024 
1025 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1026 {
1027   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1028   PetscErrorCode     ierr;
1029 
1030   PetscFunctionBegin;
1031   if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use");
1032 
1033   if (hA->size >= size) *array = hA->array;
1034   else {
1035     ierr = PetscFree(hA->array);CHKERRQ(ierr);
1036     hA->size = size;
1037     ierr = PetscMalloc(hA->size,&hA->array);CHKERRQ(ierr);
1038     *array = hA->array;
1039   }
1040 
1041   hA->available = PETSC_FALSE;
1042   PetscFunctionReturn(0);
1043 }
1044 
1045 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1046 {
1047   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1048 
1049   PetscFunctionBegin;
1050   *array = NULL;
1051   hA->available = PETSC_TRUE;
1052   PetscFunctionReturn(0);
1053 }
1054 
1055 
1056 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1057 {
1058   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1059   PetscScalar        *vals = (PetscScalar *)v;
1060   PetscScalar        *sscr;
1061   PetscInt           *cscr[2];
1062   PetscInt           i,nzc;
1063   void               *array = NULL;
1064   PetscErrorCode     ierr;
1065 
1066   PetscFunctionBegin;
1067   ierr = MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(PetscScalar)*nc*nr,&array);CHKERRQ(ierr);
1068   cscr[0] = (PetscInt*)array;
1069   cscr[1] = ((PetscInt*)array)+nc;
1070   sscr = (PetscScalar*)(((PetscInt*)array)+nc*2);
1071   for (i=0,nzc=0;i<nc;i++) {
1072     if (cols[i] >= 0) {
1073       cscr[0][nzc  ] = cols[i];
1074       cscr[1][nzc++] = i;
1075     }
1076   }
1077   if (!nzc) {
1078     ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr);
1079     PetscFunctionReturn(0);
1080   }
1081 
1082   if (ins == ADD_VALUES) {
1083     for (i=0;i<nr;i++) {
1084       if (rows[i] >= 0 && nzc) {
1085         PetscInt j;
1086         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1087         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,(HYPRE_Int*)&nzc,(HYPRE_Int*)(rows+i),(HYPRE_Int*)cscr[0],sscr));
1088       }
1089       vals += nc;
1090     }
1091   } else { /* INSERT_VALUES */
1092 
1093     PetscInt rst,ren;
1094     ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1095 
1096     for (i=0;i<nr;i++) {
1097       if (rows[i] >= 0 && nzc) {
1098         PetscInt j;
1099         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1100         /* nonlocal values */
1101         if (rows[i] < rst || rows[i] >= ren) { ierr = MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],sscr, PETSC_FALSE);CHKERRQ(ierr); }
1102         /* local values */
1103         else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,(HYPRE_Int*)&nzc,(HYPRE_Int*)(rows+i),(HYPRE_Int*)cscr[0],sscr));
1104       }
1105       vals += nc;
1106     }
1107   }
1108 
1109   ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr);
1110   PetscFunctionReturn(0);
1111 }
1112 
1113 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1114 {
1115   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1116   HYPRE_Int      *hdnnz,*honnz;
1117   PetscInt       i,rs,re,cs,ce,bs;
1118   PetscMPIInt    size;
1119   PetscErrorCode ierr;
1120 
1121   PetscFunctionBegin;
1122   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1123   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1124   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1125   rs   = A->rmap->rstart;
1126   re   = A->rmap->rend;
1127   cs   = A->cmap->rstart;
1128   ce   = A->cmap->rend;
1129   if (!hA->ij) {
1130     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1131     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1132   } else {
1133     HYPRE_Int hrs,hre,hcs,hce;
1134     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1135     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re);
1136     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce);
1137   }
1138   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1139   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;
1140 
1141   if (!dnnz) {
1142     ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr);
1143     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1144   } else {
1145     hdnnz = (HYPRE_Int*)dnnz;
1146   }
1147   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1148   if (size > 1) {
1149     hypre_AuxParCSRMatrix *aux_matrix;
1150     if (!onnz) {
1151       ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr);
1152       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1153     } else {
1154       honnz = (HYPRE_Int*)onnz;
1155     }
1156     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1157        they assume the user will input the entire row values, properly sorted
1158        In PETSc, we don't make such an assumption, and we instead set this flag to 1
1159        Also, to avoid possible memory leaks, we destroy and recreate the translator
1160        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1161        the IJ matrix for us */
1162     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1163     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1164     hypre_IJMatrixTranslator(hA->ij) = NULL;
1165     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1166     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1167     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
1168   } else {
1169     honnz = NULL;
1170     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1171   }
1172 
1173   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1174   if (!dnnz) {
1175     ierr = PetscFree(hdnnz);CHKERRQ(ierr);
1176   }
1177   if (!onnz && honnz) {
1178     ierr = PetscFree(honnz);CHKERRQ(ierr);
1179   }
1180   A->preallocated = PETSC_TRUE;
1181   PetscFunctionReturn(0);
1182 }
1183 
1184 /*@C
1185    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1186 
1187    Collective on Mat
1188 
1189    Input Parameters:
1190 +  A - the matrix
1191 .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1192           (same value is used for all local rows)
1193 .  dnnz - array containing the number of nonzeros in the various rows of the
1194           DIAGONAL portion of the local submatrix (possibly different for each row)
1195           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1196           The size of this array is equal to the number of local rows, i.e 'm'.
1197           For matrices that will be factored, you must leave room for (and set)
1198           the diagonal entry even if it is zero.
1199 .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1200           submatrix (same value is used for all local rows).
1201 -  onnz - array containing the number of nonzeros in the various rows of the
1202           OFF-DIAGONAL portion of the local submatrix (possibly different for
1203           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1204           structure. The size of this array is equal to the number
1205           of local rows, i.e 'm'.
1206 
1207    Notes:
1208     If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.
1209 
1210    Level: intermediate
1211 
1212 .keywords: matrix, aij, compressed row, sparse, parallel
1213 
1214 .seealso: MatCreate(), MatMPIAIJSetPreallocation, MATHYPRE
1215 @*/
1216 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1217 {
1218   PetscErrorCode ierr;
1219 
1220   PetscFunctionBegin;
1221   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1222   PetscValidType(A,1);
1223   ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr);
1224   PetscFunctionReturn(0);
1225 }
1226 
1227 /*
1228    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix
1229 
1230    Collective
1231 
1232    Input Parameters:
1233 +  vparcsr  - the pointer to the hypre_ParCSRMatrix
1234 .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1235 -  copymode - PETSc copying options
1236 
1237    Output Parameter:
1238 .  A  - the matrix
1239 
1240    Level: intermediate
1241 
1242 .seealso: MatHYPRE, PetscCopyMode
1243 */
1244 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *vparcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1245 {
1246   Mat                   T;
1247   Mat_HYPRE             *hA;
1248   hypre_ParCSRMatrix    *parcsr;
1249   MPI_Comm              comm;
1250   PetscInt              rstart,rend,cstart,cend,M,N;
1251   PetscBool             isseqaij,ismpiaij,isaij,ishyp,isis;
1252   PetscErrorCode        ierr;
1253 
1254   PetscFunctionBegin;
1255   parcsr = (hypre_ParCSRMatrix *)vparcsr;
1256   comm   = hypre_ParCSRMatrixComm(parcsr);
1257   ierr   = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
1258   ierr   = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
1259   ierr   = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr);
1260   ierr   = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr);
1261   ierr   = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr);
1262   isaij  = (PetscBool)(isseqaij || ismpiaij || isaij);
1263   if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
1264   if (ishyp && copymode == PETSC_COPY_VALUES) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported copymode PETSC_COPY_VALUES");
1265 
1266   /* access ParCSRMatrix */
1267   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1268   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1269   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1270   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1271   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1272   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1273 
1274   /* fix for empty local rows/columns */
1275   if (rend < rstart) rend = rstart;
1276   if (cend < cstart) cend = cstart;
1277 
1278   /* create PETSc matrix with MatHYPRE */
1279   ierr = MatCreate(comm,&T);CHKERRQ(ierr);
1280   ierr = MatSetSizes(T,rend-rstart+1,cend-cstart+1,M,N);CHKERRQ(ierr);
1281   ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr);
1282   hA   = (Mat_HYPRE*)(T->data);
1283 
1284   /* create HYPRE_IJMatrix */
1285   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1286 
1287   /* set ParCSR object */
1288   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1289   hypre_IJMatrixObject(hA->ij) = parcsr;
1290   T->preallocated = PETSC_TRUE;
1291 
1292   /* set assembled flag */
1293   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1294   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1295   if (ishyp) {
1296     PetscMPIInt myid = 0;
1297 
1298     /* make sure we always have row_starts and col_starts available */
1299     if (HYPRE_AssumedPartitionCheck()) {
1300       ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
1301     }
1302     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1303       PetscLayout map;
1304 
1305       ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr);
1306       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1307       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1308     }
1309     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1310       PetscLayout map;
1311 
1312       ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr);
1313       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1314       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1315     }
1316     /* prevent from freeing the pointer */
1317     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1318     *A   = T;
1319     ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1320     ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1321   } else if (isaij) {
1322     if (copymode != PETSC_OWN_POINTER) {
1323       /* prevent from freeing the pointer */
1324       hA->inner_free = PETSC_FALSE;
1325       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1326       ierr = MatDestroy(&T);CHKERRQ(ierr);
1327     } else { /* AIJ return type with PETSC_OWN_POINTER */
1328       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr);
1329       *A   = T;
1330     }
1331   } else if (isis) {
1332     ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1333     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1334     ierr = MatDestroy(&T);CHKERRQ(ierr);
1335   }
1336   PetscFunctionReturn(0);
1337 }
1338 
1339 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1340 {
1341   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1342   HYPRE_Int type;
1343 
1344   PetscFunctionBegin;
1345   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1346   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1347   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1348   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1349   PetscFunctionReturn(0);
1350 }
1351 
1352 /*
1353    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1354 
1355    Not collective
1356 
1357    Input Parameters:
1358 +  A  - the MATHYPRE object
1359 
1360    Output Parameter:
1361 .  parcsr  - the pointer to the hypre_ParCSRMatrix
1362 
1363    Level: intermediate
1364 
1365 .seealso: MatHYPRE, PetscCopyMode
1366 */
1367 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1368 {
1369   PetscErrorCode ierr;
1370 
1371   PetscFunctionBegin;
1372   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1373   PetscValidType(A,1);
1374   ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr);
1375   PetscFunctionReturn(0);
1376 }
1377 
1378 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1379 {
1380   hypre_ParCSRMatrix *parcsr;
1381   hypre_CSRMatrix    *ha;
1382   PetscInt           rst;
1383   PetscErrorCode     ierr;
1384 
1385   PetscFunctionBegin;
1386   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks");
1387   ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr);
1388   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1389   if (missing) *missing = PETSC_FALSE;
1390   if (dd) *dd = -1;
1391   ha = hypre_ParCSRMatrixDiag(parcsr);
1392   if (ha) {
1393     PetscInt  size,i;
1394     HYPRE_Int *ii,*jj;
1395 
1396     size = hypre_CSRMatrixNumRows(ha);
1397     ii   = hypre_CSRMatrixI(ha);
1398     jj   = hypre_CSRMatrixJ(ha);
1399     for (i = 0; i < size; i++) {
1400       PetscInt  j;
1401       PetscBool found = PETSC_FALSE;
1402 
1403       for (j = ii[i]; j < ii[i+1] && !found; j++)
1404         found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;
1405 
1406       if (!found) {
1407         PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i);
1408         if (missing) *missing = PETSC_TRUE;
1409         if (dd) *dd = i+rst;
1410         PetscFunctionReturn(0);
1411       }
1412     }
1413     if (!size) {
1414       PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1415       if (missing) *missing = PETSC_TRUE;
1416       if (dd) *dd = rst;
1417     }
1418   } else {
1419     PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1420     if (missing) *missing = PETSC_TRUE;
1421     if (dd) *dd = rst;
1422   }
1423   PetscFunctionReturn(0);
1424 }
1425 
1426 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1427 {
1428   hypre_ParCSRMatrix *parcsr;
1429   hypre_CSRMatrix    *ha;
1430   PetscErrorCode     ierr;
1431 
1432   PetscFunctionBegin;
1433   ierr  = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1434   /* diagonal part */
1435   ha = hypre_ParCSRMatrixDiag(parcsr);
1436   if (ha) {
1437     PetscInt    size,i;
1438     HYPRE_Int   *ii;
1439     PetscScalar *a;
1440 
1441     size = hypre_CSRMatrixNumRows(ha);
1442     a    = hypre_CSRMatrixData(ha);
1443     ii   = hypre_CSRMatrixI(ha);
1444     for (i = 0; i < ii[size]; i++) a[i] *= s;
1445   }
1446   /* offdiagonal part */
1447   ha = hypre_ParCSRMatrixOffd(parcsr);
1448   if (ha) {
1449     PetscInt    size,i;
1450     HYPRE_Int   *ii;
1451     PetscScalar *a;
1452 
1453     size = hypre_CSRMatrixNumRows(ha);
1454     a    = hypre_CSRMatrixData(ha);
1455     ii   = hypre_CSRMatrixI(ha);
1456     for (i = 0; i < ii[size]; i++) a[i] *= s;
1457   }
1458   PetscFunctionReturn(0);
1459 }
1460 
1461 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1462 {
1463   hypre_ParCSRMatrix *parcsr;
1464   HYPRE_Int          *lrows;
1465   PetscInt           rst,ren,i;
1466   PetscErrorCode     ierr;
1467 
1468   PetscFunctionBegin;
1469   if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
1470   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1471   ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr);
1472   ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1473   for (i=0;i<numRows;i++) {
1474     if (rows[i] < rst || rows[i] >= ren)
1475       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported");
1476     lrows[i] = rows[i] - rst;
1477   }
1478   PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows));
1479   ierr = PetscFree(lrows);CHKERRQ(ierr);
1480   PetscFunctionReturn(0);
1481 }
1482 
1483 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1484 {
1485   PetscErrorCode      ierr;
1486 
1487   PetscFunctionBegin;
1488   if (ha) {
1489     HYPRE_Int     *ii, size;
1490     HYPRE_Complex *a;
1491 
1492     size = hypre_CSRMatrixNumRows(ha);
1493     a    = hypre_CSRMatrixData(ha);
1494     ii   = hypre_CSRMatrixI(ha);
1495 
1496     if (a) { ierr = PetscMemzero(a,(ii[size])*sizeof(HYPRE_Complex));CHKERRQ(ierr); }
1497   }
1498   PetscFunctionReturn(0);
1499 }
1500 
1501 PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1502 {
1503   hypre_ParCSRMatrix  *parcsr;
1504   PetscErrorCode      ierr;
1505 
1506   PetscFunctionBegin;
1507   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1508   /* diagonal part */
1509   ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));CHKERRQ(ierr);
1510   /* off-diagonal part */
1511   ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));CHKERRQ(ierr);
1512   PetscFunctionReturn(0);
1513 }
1514 
1515 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],PetscScalar diag)
1516 {
1517   PetscInt        ii, jj, ibeg, iend, irow;
1518   PetscInt        *i, *j;
1519   PetscScalar     *a;
1520 
1521   PetscFunctionBegin;
1522 
1523   if (!hA) PetscFunctionReturn(0);
1524 
1525   i = (PetscInt*) hypre_CSRMatrixI(hA);
1526   j = (PetscInt*) hypre_CSRMatrixJ(hA);
1527   a = hypre_CSRMatrixData(hA);
1528 
1529   for (ii = 0; ii < N; ii++) {
1530     irow = rows[ii];
1531     ibeg = i[irow];
1532     iend = i[irow+1];
1533     for (jj = ibeg; jj < iend; jj++)
1534       if (j[jj] == irow) a[jj] = diag;
1535       else a[jj] = 0.0;
1536    }
1537 
1538    PetscFunctionReturn(0);
1539 }
1540 
1541 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1542 {
1543   hypre_ParCSRMatrix  *parcsr;
1544   PetscInt            *lrows,len;
1545   PetscErrorCode      ierr;
1546 
1547   PetscFunctionBegin;
1548   if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size \n");
1549   /* retrieve the internal matrix */
1550   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1551   /* get locally owned rows */
1552   ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr);
1553   /* zero diagonal part */
1554   ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,diag);CHKERRQ(ierr);
1555   /* zero off-diagonal part */
1556   ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);CHKERRQ(ierr);
1557 
1558   ierr = PetscFree(lrows);CHKERRQ(ierr);
1559   PetscFunctionReturn(0);
1560 }
1561 
1562 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode)
1563 {
1564   PetscErrorCode ierr;
1565 
1566   PetscFunctionBegin;
1567   if (mat->nooffprocentries) PetscFunctionReturn(0);
1568 
1569   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
1570   PetscFunctionReturn(0);
1571 }
1572 
1573 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1574 {
1575   hypre_ParCSRMatrix  *parcsr;
1576   PetscErrorCode      ierr;
1577 
1578   PetscFunctionBegin;
1579   /* retrieve the internal matrix */
1580   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1581   /* call HYPRE API */
1582   PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,(HYPRE_Int*)nz,(HYPRE_Int**)idx,v));
1583   PetscFunctionReturn(0);
1584 }
1585 
1586 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1587 {
1588   hypre_ParCSRMatrix  *parcsr;
1589   PetscErrorCode      ierr;
1590 
1591   PetscFunctionBegin;
1592   /* retrieve the internal matrix */
1593   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1594   /* call HYPRE API */
1595   PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,(HYPRE_Int*)nz,(HYPRE_Int**)idx,v));
1596   PetscFunctionReturn(0);
1597 }
1598 
1599 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1600 {
1601   HYPRE_IJMatrix *hIJ = (HYPRE_IJMatrix*)A->data;
1602   PetscInt        i;
1603 
1604   PetscFunctionBegin;
1605   if (!m || !n) PetscFunctionReturn(0);
1606   /* Ignore negative row indices
1607    * And negative column indices should be automatically ignored in hypre
1608    * */
1609   for (i=0; i<m; i++)
1610     if (idxm[i] >= 0) PetscStackCallStandard(HYPRE_IJMatrixGetValues,(*hIJ,1,(HYPRE_Int*)&n,(HYPRE_Int*)&idxm[i],(HYPRE_Int*)idxn,&v[i*n]));
1611   PetscFunctionReturn(0);
1612 }
1613 
1614 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg)
1615 {
1616   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1617 
1618   PetscFunctionBegin;
1619   switch (op)
1620   {
1621   case MAT_NO_OFF_PROC_ENTRIES:
1622     if (flg) {
1623       PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0));
1624     }
1625     break;
1626   default:
1627     break;
1628   }
1629   PetscFunctionReturn(0);
1630 }
1631 
1632 /*MC
1633    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
1634           based on the hypre IJ interface.
1635 
1636    Level: intermediate
1637 
1638 .seealso: MatCreate()
1639 M*/
1640 
1641 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
1642 {
1643   Mat_HYPRE      *hB;
1644   PetscErrorCode ierr;
1645 
1646   PetscFunctionBegin;
1647   ierr           = PetscNewLog(B,&hB);CHKERRQ(ierr);
1648   hB->inner_free = PETSC_TRUE;
1649   hB->available  = PETSC_TRUE;
1650   hB->size       = 0;
1651   hB->array      = NULL;
1652 
1653   B->data       = (void*)hB;
1654   B->rmap->bs   = 1;
1655   B->assembled  = PETSC_FALSE;
1656 
1657   ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1658   B->ops->mult            = MatMult_HYPRE;
1659   B->ops->multtranspose   = MatMultTranspose_HYPRE;
1660   B->ops->setup           = MatSetUp_HYPRE;
1661   B->ops->destroy         = MatDestroy_HYPRE;
1662 
1663   /* build cache for off array entries formed */
1664   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
1665   B->ops->assemblyend     = MatAssemblyEnd_HYPRE;
1666   B->ops->assemblybegin   = MatAssemblyBegin_HYPRE;
1667 
1668   B->ops->ptap            = MatPtAP_HYPRE_HYPRE;
1669   B->ops->matmult         = MatMatMult_HYPRE_HYPRE;
1670   B->ops->setvalues       = MatSetValues_HYPRE;
1671   B->ops->missingdiagonal = MatMissingDiagonal_HYPRE;
1672   B->ops->scale           = MatScale_HYPRE;
1673   B->ops->zerorowscolumns = MatZeroRowsColumns_HYPRE;
1674   B->ops->zeroentries     = MatZeroEntries_HYPRE;
1675   B->ops->zerorows        = MatZeroRows_HYPRE;
1676   B->ops->getrow          = MatGetRow_HYPRE;
1677   B->ops->restorerow      = MatRestoreRow_HYPRE;
1678   B->ops->getvalues       = MatGetValues_HYPRE;
1679   B->ops->setoption       = MatSetOption_HYPRE;
1680 
1681   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr);
1682   ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr);
1683   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr);
1684   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr);
1685   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
1686   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
1687   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr);
1688   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr);
1689   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_is_hypre_C",MatPtAP_IS_XAIJ);CHKERRQ(ierr);
1690   PetscFunctionReturn(0);
1691 }
1692 
1693 static PetscErrorCode hypre_array_destroy(void *ptr)
1694 {
1695    PetscFunctionBegin;
1696    hypre_TFree(ptr,HYPRE_MEMORY_HOST);
1697    PetscFunctionReturn(0);
1698 }
1699