xref: /petsc/src/mat/impls/hypre/mhypre.c (revision fbfcfee5110779e3d6a9465ca0a2e0f9a1a6e5e3)
1 
2 /*
3     Creates hypre ijmatrix from PETSc matrix
4 */
5 
6 /*MC
7    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
8           based on the hypre IJ interface.
9 
10    Level: intermediate
11 
12 .seealso: MatCreate()
13 M*/
14 
15 #include <petscmathypre.h>
16 #include <petsc/private/matimpl.h>
17 #include <../src/mat/impls/hypre/mhypre.h>
18 #include <../src/mat/impls/aij/mpi/mpiaij.h>
19 #include <../src/vec/vec/impls/hypre/vhyp.h>
20 #include <HYPRE.h>
21 #include <HYPRE_utilities.h>
22 #include <_hypre_parcsr_ls.h>
23 
24 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
25 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
26 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
27 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
28 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool);
29 static PetscErrorCode hypre_array_destroy(void*);
30 
31 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
32 {
33   PetscErrorCode ierr;
34   PetscInt       i,n_d,n_o;
35   const PetscInt *ia_d,*ia_o;
36   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
37   PetscInt       *nnz_d=NULL,*nnz_o=NULL;
38 
39   PetscFunctionBegin;
40   if (A_d) { /* determine number of nonzero entries in local diagonal part */
41     ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr);
42     if (done_d) {
43       ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr);
44       for (i=0; i<n_d; i++) {
45         nnz_d[i] = ia_d[i+1] - ia_d[i];
46       }
47     }
48     ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr);
49   }
50   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
51     ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
52     if (done_o) {
53       ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr);
54       for (i=0; i<n_o; i++) {
55         nnz_o[i] = ia_o[i+1] - ia_o[i];
56       }
57     }
58     ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
59   }
60   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
61     if (!done_o) { /* only diagonal part */
62       ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr);
63       for (i=0; i<n_d; i++) {
64         nnz_o[i] = 0;
65       }
66     }
67     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o));
68     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
69     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
70   }
71   PetscFunctionReturn(0);
72 }
73 
74 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
75 {
76   PetscErrorCode ierr;
77   PetscInt       rstart,rend,cstart,cend;
78 
79   PetscFunctionBegin;
80   ierr   = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
81   ierr   = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
82   rstart = A->rmap->rstart;
83   rend   = A->rmap->rend;
84   cstart = A->cmap->rstart;
85   cend   = A->cmap->rend;
86   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
87   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
88   {
89     PetscBool      same;
90     Mat            A_d,A_o;
91     const PetscInt *colmap;
92     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr);
93     if (same) {
94       ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
95       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
96       PetscFunctionReturn(0);
97     }
98     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr);
99     if (same) {
100       ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
101       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
102       PetscFunctionReturn(0);
103     }
104     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr);
105     if (same) {
106       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
107       PetscFunctionReturn(0);
108     }
109     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr);
110     if (same) {
111       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
112       PetscFunctionReturn(0);
113     }
114   }
115   PetscFunctionReturn(0);
116 }
117 
118 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
119 {
120   PetscErrorCode    ierr;
121   PetscInt          i,rstart,rend,ncols,nr,nc;
122   const PetscScalar *values;
123   const PetscInt    *cols;
124   PetscBool         flg;
125 
126   PetscFunctionBegin;
127   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
128   ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr);
129   if (flg && nr == nc) {
130     ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr);
131     PetscFunctionReturn(0);
132   }
133   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
134   if (flg) {
135     ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr);
136     PetscFunctionReturn(0);
137   }
138 
139   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
140   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
141   for (i=rstart; i<rend; i++) {
142     ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
143     if (ncols) {
144       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values));
145     }
146     ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
147   }
148   PetscFunctionReturn(0);
149 }
150 
151 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
152 {
153   PetscErrorCode        ierr;
154   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
155   HYPRE_Int             type;
156   hypre_ParCSRMatrix    *par_matrix;
157   hypre_AuxParCSRMatrix *aux_matrix;
158   hypre_CSRMatrix       *hdiag;
159 
160   PetscFunctionBegin;
161   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
162   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
163   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
164   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
165   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
166   /*
167        this is the Hack part where we monkey directly with the hypre datastructures
168   */
169   ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));
170   ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));
171   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
172 
173   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
174   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
175   PetscFunctionReturn(0);
176 }
177 
178 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
179 {
180   PetscErrorCode        ierr;
181   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
182   Mat_SeqAIJ            *pdiag,*poffd;
183   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
184   HYPRE_Int             type;
185   hypre_ParCSRMatrix    *par_matrix;
186   hypre_AuxParCSRMatrix *aux_matrix;
187   hypre_CSRMatrix       *hdiag,*hoffd;
188 
189   PetscFunctionBegin;
190   pdiag = (Mat_SeqAIJ*) pA->A->data;
191   poffd = (Mat_SeqAIJ*) pA->B->data;
192   /* cstart is only valid for square MPIAIJ layed out in the usual way */
193   ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr);
194 
195   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
196   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
197   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
198   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
199   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
200   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
201 
202   /*
203        this is the Hack part where we monkey directly with the hypre datastructures
204   */
205   ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
206   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
207   jj  = (PetscInt*)hdiag->j;
208   pjj = (PetscInt*)pdiag->j;
209   for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i];
210   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
211   ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
212   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
213      If we hacked a hypre a bit more we might be able to avoid this step */
214   jj  = (PetscInt*) hoffd->j;
215   pjj = (PetscInt*) poffd->j;
216   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
217   ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));
218 
219   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
220   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
221   PetscFunctionReturn(0);
222 }
223 
224 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
225 {
226   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
227   Mat                    lA;
228   ISLocalToGlobalMapping rl2g,cl2g;
229   IS                     is;
230   hypre_ParCSRMatrix     *hA;
231   hypre_CSRMatrix        *hdiag,*hoffd;
232   MPI_Comm               comm;
233   PetscScalar            *hdd,*hod,*aa,*data;
234   HYPRE_Int              *col_map_offd,*hdi,*hdj,*hoi,*hoj;
235   PetscInt               *ii,*jj,*iptr,*jptr;
236   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
237   HYPRE_Int              type;
238   PetscErrorCode         ierr;
239 
240   PetscFunctionBegin;
241   comm = PetscObjectComm((PetscObject)A);
242   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
243   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
244   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
245   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
246   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
247   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
248   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
249   hdiag = hypre_ParCSRMatrixDiag(hA);
250   hoffd = hypre_ParCSRMatrixOffd(hA);
251   dr    = hypre_CSRMatrixNumRows(hdiag);
252   dc    = hypre_CSRMatrixNumCols(hdiag);
253   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
254   hdi   = hypre_CSRMatrixI(hdiag);
255   hdj   = hypre_CSRMatrixJ(hdiag);
256   hdd   = hypre_CSRMatrixData(hdiag);
257   oc    = hypre_CSRMatrixNumCols(hoffd);
258   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
259   hoi   = hypre_CSRMatrixI(hoffd);
260   hoj   = hypre_CSRMatrixJ(hoffd);
261   hod   = hypre_CSRMatrixData(hoffd);
262   if (reuse != MAT_REUSE_MATRIX) {
263     PetscInt *aux;
264 
265     /* generate l2g maps for rows and cols */
266     ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr);
267     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
268     ierr = ISDestroy(&is);CHKERRQ(ierr);
269     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
270     ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr);
271     for (i=0; i<dc; i++) aux[i] = i+stc;
272     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
273     ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
274     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
275     ierr = ISDestroy(&is);CHKERRQ(ierr);
276     /* create MATIS object */
277     ierr = MatCreate(comm,B);CHKERRQ(ierr);
278     ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr);
279     ierr = MatSetType(*B,MATIS);CHKERRQ(ierr);
280     ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr);
281     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
282     ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
283 
284     /* allocate CSR for local matrix */
285     ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr);
286     ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr);
287     ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr);
288   } else {
289     PetscInt  nr;
290     PetscBool done;
291     ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr);
292     ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr);
293     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
294     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
295     ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr);
296   }
297   /* merge local matrices */
298   ii   = iptr;
299   jj   = jptr;
300   aa   = data;
301   *ii  = *(hdi++) + *(hoi++);
302   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
303     PetscScalar *aold = aa;
304     PetscInt    *jold = jj,nc = jd+jo;
305     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
306     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
307     *(++ii) = *(hdi++) + *(hoi++);
308     ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr);
309   }
310   for (; cum<dr; cum++) *(++ii) = nnz;
311   if (reuse != MAT_REUSE_MATRIX) {
312     Mat_SeqAIJ* a;
313 
314     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr);
315     ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr);
316     /* hack SeqAIJ */
317     a          = (Mat_SeqAIJ*)(lA->data);
318     a->free_a  = PETSC_TRUE;
319     a->free_ij = PETSC_TRUE;
320     ierr = MatDestroy(&lA);CHKERRQ(ierr);
321   }
322   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
323   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
324   if (reuse == MAT_INPLACE_MATRIX) {
325     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
326   }
327   PetscFunctionReturn(0);
328 }
329 
330 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
331 {
332   Mat_HYPRE      *hB;
333   MPI_Comm       comm = PetscObjectComm((PetscObject)A);
334   PetscErrorCode ierr;
335 
336   PetscFunctionBegin;
337   if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_INPLACE_MATRIX");
338   if (reuse == MAT_REUSE_MATRIX) {
339     /* always destroy the old matrix and create a new memory;
340        hope this does not churn the memory too much. The problem
341        is I do not know if it is possible to put the matrix back to
342        its initial state so that we can directly copy the values
343        the second time through. */
344     hB = (Mat_HYPRE*)((*B)->data);
345     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
346   } else {
347     ierr = MatCreate(comm,B);CHKERRQ(ierr);
348     ierr = MatSetType(*B,MATHYPRE);CHKERRQ(ierr);
349     ierr = MatSetSizes(*B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
350     hB   = (Mat_HYPRE*)((*B)->data);
351   }
352   ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr);
353   ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr);
354   (*B)->preallocated = PETSC_TRUE;
355   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
356   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
357   PetscFunctionReturn(0);
358 }
359 
360 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
361 {
362   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
363   hypre_ParCSRMatrix *parcsr;
364   hypre_CSRMatrix    *hdiag,*hoffd;
365   MPI_Comm           comm;
366   PetscScalar        *da,*oa,*aptr;
367   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
368   PetscInt           i,dnnz,onnz,m,n;
369   HYPRE_Int          type;
370   PetscMPIInt        size;
371   PetscErrorCode     ierr;
372 
373   PetscFunctionBegin;
374   comm = PetscObjectComm((PetscObject)A);
375   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
376   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
377   if (reuse == MAT_REUSE_MATRIX) {
378     PetscBool ismpiaij,isseqaij;
379     ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
380     ierr = PetscObjectTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
381     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
382   }
383   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
384 
385   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
386   hdiag = hypre_ParCSRMatrixDiag(parcsr);
387   hoffd = hypre_ParCSRMatrixOffd(parcsr);
388   m     = hypre_CSRMatrixNumRows(hdiag);
389   n     = hypre_CSRMatrixNumCols(hdiag);
390   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
391   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
392   if (reuse == MAT_INITIAL_MATRIX) {
393     ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
394     ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
395     ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr);
396   } else if (reuse == MAT_REUSE_MATRIX) {
397     PetscInt  nr;
398     PetscBool done;
399     if (size > 1) {
400       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
401 
402       ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
403       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
404       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
405       ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr);
406     } else {
407       ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
408       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
409       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
410       ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr);
411     }
412   } else { /* MAT_INPLACE_MATRIX */
413     dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
414     djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
415     da  = hypre_CSRMatrixData(hdiag);
416   }
417   ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
418   ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr);
419   ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr);
420   iptr = djj;
421   aptr = da;
422   for (i=0; i<m; i++) {
423     PetscInt nc = dii[i+1]-dii[i];
424     ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
425     iptr += nc;
426     aptr += nc;
427   }
428   if (size > 1) {
429     HYPRE_Int *offdj,*coffd;
430 
431     if (reuse == MAT_INITIAL_MATRIX) {
432       ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
433       ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
434       ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr);
435     } else if (reuse == MAT_REUSE_MATRIX) {
436       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
437       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
438       PetscBool  done;
439 
440       ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr);
441       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
442       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
443       ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr);
444     } else { /* MAT_INPLACE_MATRIX */
445       oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
446       ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
447       oa  = hypre_CSRMatrixData(hoffd);
448     }
449     ierr  = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
450     offdj = hypre_CSRMatrixJ(hoffd);
451     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
452     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
453     ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr);
454     iptr = ojj;
455     aptr = oa;
456     for (i=0; i<m; i++) {
457        PetscInt nc = oii[i+1]-oii[i];
458        ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
459        iptr += nc;
460        aptr += nc;
461     }
462     if (reuse == MAT_INITIAL_MATRIX) {
463       Mat_MPIAIJ *b;
464       Mat_SeqAIJ *d,*o;
465 
466       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr);
467       /* hack MPIAIJ */
468       b          = (Mat_MPIAIJ*)((*B)->data);
469       d          = (Mat_SeqAIJ*)b->A->data;
470       o          = (Mat_SeqAIJ*)b->B->data;
471       d->free_a  = PETSC_TRUE;
472       d->free_ij = PETSC_TRUE;
473       o->free_a  = PETSC_TRUE;
474       o->free_ij = PETSC_TRUE;
475     } else if (reuse == MAT_INPLACE_MATRIX) {
476       Mat T;
477       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr);
478       hypre_CSRMatrixI(hdiag)    = NULL;
479       hypre_CSRMatrixJ(hdiag)    = NULL;
480       hypre_CSRMatrixData(hdiag) = NULL;
481       hypre_CSRMatrixI(hoffd)    = NULL;
482       hypre_CSRMatrixJ(hoffd)    = NULL;
483       hypre_CSRMatrixData(hoffd) = NULL;
484       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
485     }
486   } else {
487     oii  = NULL;
488     ojj  = NULL;
489     oa   = NULL;
490     if (reuse == MAT_INITIAL_MATRIX) {
491       Mat_SeqAIJ* b;
492       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr);
493       /* hack SeqAIJ */
494       b          = (Mat_SeqAIJ*)((*B)->data);
495       b->free_a  = PETSC_TRUE;
496       b->free_ij = PETSC_TRUE;
497     } else if (reuse == MAT_INPLACE_MATRIX) {
498       Mat T;
499       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr);
500       hypre_CSRMatrixI(hdiag)    = NULL;
501       hypre_CSRMatrixJ(hdiag)    = NULL;
502       hypre_CSRMatrixData(hdiag) = NULL;
503       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
504     }
505   }
506 
507   /* we have to use hypre_Tfree to free the arrays */
508   if (reuse == MAT_INPLACE_MATRIX) {
509     void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
510     const char *names[6] = {"_hypre_csr_dii",
511                             "_hypre_csr_djj",
512                             "_hypre_csr_da",
513                             "_hypre_csr_oii",
514                             "_hypre_csr_ojj",
515                             "_hypre_csr_oa"};
516     for (i=0; i<6; i++) {
517       PetscContainer c;
518 
519       ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr);
520       ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr);
521       ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr);
522       ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr);
523       ierr = PetscContainerDestroy(&c);CHKERRQ(ierr);
524     }
525   }
526   PetscFunctionReturn(0);
527 }
528 
529 #undef __FUNCT__
530 #define __FUNCT__ "MatAIJGetParCSR_Private"
531 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
532 {
533   hypre_ParCSRMatrix *tA;
534   hypre_CSRMatrix    *hdiag,*hoffd;
535   Mat_SeqAIJ         *diag,*offd;
536   PetscInt           *garray,noffd,dnnz,onnz,*row_starts,*col_starts;
537   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
538   PetscBool          ismpiaij,isseqaij;
539   PetscErrorCode     ierr;
540 
541   PetscFunctionBegin;
542   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
543   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
544   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type);
545   if (ismpiaij) {
546     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);
547 
548     diag   = (Mat_SeqAIJ*)a->A->data;
549     offd   = (Mat_SeqAIJ*)a->B->data;
550     garray = a->garray;
551     noffd  = a->B->cmap->N;
552     dnnz   = diag->nz;
553     onnz   = offd->nz;
554   } else {
555     diag   = (Mat_SeqAIJ*)A->data;
556     offd   = NULL;
557     garray = NULL;
558     noffd  = 0;
559     dnnz   = diag->nz;
560     onnz   = 0;
561   }
562 
563   /* create a temporary ParCSR */
564   if (HYPRE_AssumedPartitionCheck()) {
565     PetscMPIInt myid;
566 
567     ierr       = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
568     row_starts = A->rmap->range + myid;
569     col_starts = A->cmap->range + myid;
570   } else {
571     row_starts = A->rmap->range;
572     col_starts = A->cmap->range;
573   }
574   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz);
575   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
576   hypre_ParCSRMatrixSetColStartsOwner(tA,0);
577 
578   /* set diagonal part */
579   hdiag = hypre_ParCSRMatrixDiag(tA);
580   hypre_CSRMatrixI(hdiag)           = (HYPRE_Int*)diag->i;
581   hypre_CSRMatrixJ(hdiag)           = (HYPRE_Int*)diag->j;
582   hypre_CSRMatrixData(hdiag)        = diag->a;
583   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
584   hypre_CSRMatrixSetRownnz(hdiag);
585   hypre_CSRMatrixSetDataOwner(hdiag,0);
586 
587   /* set offdiagonal part */
588   hoffd = hypre_ParCSRMatrixOffd(tA);
589   if (offd) {
590     hypre_CSRMatrixI(hoffd)           = (HYPRE_Int*)offd->i;
591     hypre_CSRMatrixJ(hoffd)           = (HYPRE_Int*)offd->j;
592     hypre_CSRMatrixData(hoffd)        = offd->a;
593     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
594     hypre_CSRMatrixSetRownnz(hoffd);
595     hypre_CSRMatrixSetDataOwner(hoffd,0);
596     hypre_ParCSRMatrixSetNumNonzeros(tA);
597     hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray;
598   }
599   *hA = tA;
600   PetscFunctionReturn(0);
601 }
602 
603 #undef __FUNCT__
604 #define __FUNCT__ "MatAIJRestoreParCSR_Private"
605 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
606 {
607   hypre_CSRMatrix    *hdiag,*hoffd;
608 
609   PetscFunctionBegin;
610   hdiag = hypre_ParCSRMatrixDiag(*hA);
611   hoffd = hypre_ParCSRMatrixOffd(*hA);
612   /* set pointers to NULL before destroying tA */
613   hypre_CSRMatrixI(hdiag)           = NULL;
614   hypre_CSRMatrixJ(hdiag)           = NULL;
615   hypre_CSRMatrixData(hdiag)        = NULL;
616   hypre_CSRMatrixI(hoffd)           = NULL;
617   hypre_CSRMatrixJ(hoffd)           = NULL;
618   hypre_CSRMatrixData(hoffd)        = NULL;
619   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
620   hypre_ParCSRMatrixDestroy(*hA);
621   *hA = NULL;
622   PetscFunctionReturn(0);
623 }
624 
625 /* calls RAP from BoomerAMG:
626    the resulting ParCSR will not own the column and row starts
627    It looks like we don't need to have the diagonal entries
628    ordered first in the rows of the diagonal part
629    for boomerAMGBuildCoarseOperator to work */
630 #undef __FUNCT__
631 #define __FUNCT__ "MatHYPRE_ParCSR_RAP"
632 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,
633                                           hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
634 {
635   PetscErrorCode ierr;
636   HYPRE_Int      P_owns_col_starts,R_owns_row_starts;
637 
638   PetscFunctionBegin;
639   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
640   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
641   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
642   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
643   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
644   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
645   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
646   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
647   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
648   PetscFunctionReturn(0);
649 }
650 
651 #undef __FUNCT__
652 #define __FUNCT__ "MatPtAPNumeric_AIJ_AIJ_wHYPRE"
653 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
654 {
655   Mat                B;
656   hypre_ParCSRMatrix *hA,*hP,*hPtAP;
657   PetscErrorCode     ierr;
658 
659   PetscFunctionBegin;
660   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
661   ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr);
662   ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr);
663   ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
664   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
665   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
666   ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr);
667   PetscFunctionReturn(0);
668 }
669 
670 #undef __FUNCT__
671 #define __FUNCT__ "MatPtAPSymbolic_AIJ_AIJ_wHYPRE"
672 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C)
673 {
674   PetscErrorCode     ierr;
675   PetscFunctionBegin;
676   ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
677   ierr                   = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
678   (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
679   PetscFunctionReturn(0);
680 }
681 
682 #undef __FUNCT__
683 #define __FUNCT__ "MatPtAPNumeric_AIJ_HYPRE"
684 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
685 {
686   Mat                B;
687   Mat_HYPRE          *hP;
688   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr;
689   HYPRE_Int          type;
690   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
691   PetscBool          ishypre;
692   PetscErrorCode     ierr;
693 
694   PetscFunctionBegin;
695   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
696   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
697   hP = (Mat_HYPRE*)P->data;
698   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
699   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
700   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
701 
702   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
703   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
704   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
705 
706   /* create temporary matrix and merge to C */
707   ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
708   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
709   PetscFunctionReturn(0);
710 }
711 
712 #undef __FUNCT__
713 #define __FUNCT__ "MatPtAP_AIJ_HYPRE"
714 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
715 {
716   PetscErrorCode ierr;
717 
718   PetscFunctionBegin;
719   if (scall == MAT_INITIAL_MATRIX) {
720     const char *deft = MATAIJ;
721     char       type[256];
722     PetscBool  flg;
723 
724     ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr);
725     ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr);
726     ierr = PetscOptionsEnd();CHKERRQ(ierr);
727     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
728     ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
729     if (flg) {
730       ierr = MatSetType(*C,type);CHKERRQ(ierr);
731     } else {
732       ierr = MatSetType(*C,deft);CHKERRQ(ierr);
733     }
734     (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE;
735     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
736   }
737   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
738   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
739   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
740   PetscFunctionReturn(0);
741 }
742 
743 #undef __FUNCT__
744 #define __FUNCT__ "MatPtAPNumeric_HYPRE_HYPRE"
745 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
746 {
747   Mat                B;
748   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
749   Mat_HYPRE          *hA,*hP;
750   PetscBool          ishypre;
751   HYPRE_Int          type;
752   PetscErrorCode     ierr;
753 
754   PetscFunctionBegin;
755   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
756   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
757   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
758   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
759   hA = (Mat_HYPRE*)A->data;
760   hP = (Mat_HYPRE*)P->data;
761   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
762   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
763   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
764   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
765   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
766   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
767   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
768   ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
769   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
770   PetscFunctionReturn(0);
771 }
772 
773 #undef __FUNCT__
774 #define __FUNCT__ "MatPtAP_HYPRE_HYPRE"
775 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
776 {
777   PetscErrorCode ierr;
778 
779   PetscFunctionBegin;
780   if (scall == MAT_INITIAL_MATRIX) {
781     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
782     ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
783     ierr                   = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
784     (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE;
785     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
786   }
787   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
788   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
789   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
790   PetscFunctionReturn(0);
791 }
792 
793 /* calls hypre_ParMatmul
794    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
795    hypre_ParMatrixCreate does not duplicate the communicator
796    It looks like we don't need to have the diagonal entries
797    ordered first in the rows of the diagonal part
798    for boomerAMGBuildCoarseOperator to work */
799 #undef __FUNCT__
800 #define __FUNCT__ "MatHYPRE_ParCSR_MatMatMult"
801 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
802 {
803   PetscFunctionBegin;
804   PetscStackPush("hypre_ParMatmul");
805   *hAB = hypre_ParMatmul(hA,hB);
806   PetscStackPop;
807   PetscFunctionReturn(0);
808 }
809 
810 #undef __FUNCT__
811 #define __FUNCT__ "MatMatMultNumeric_AIJ_AIJ_wHYPRE"
812 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
813 {
814   Mat                D;
815   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
816   PetscErrorCode     ierr;
817 
818   PetscFunctionBegin;
819   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
820   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
821   ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr);
822   ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
823   ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr);
824   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
825   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
826   PetscFunctionReturn(0);
827 }
828 
829 #undef __FUNCT__
830 #define __FUNCT__ "MatMatMultSymbolic_AIJ_AIJ_wHYPRE"
831 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C)
832 {
833   PetscErrorCode ierr;
834 
835   PetscFunctionBegin;
836   ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
837   ierr                      = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
838   (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
839   PetscFunctionReturn(0);
840 }
841 
842 #undef __FUNCT__
843 #define __FUNCT__ "MatMatMultNumeric_HYPRE_HYPRE"
844 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
845 {
846   Mat                D;
847   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
848   Mat_HYPRE          *hA,*hB;
849   PetscBool          ishypre;
850   HYPRE_Int          type;
851   PetscErrorCode     ierr;
852 
853   PetscFunctionBegin;
854   ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr);
855   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
856   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
857   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
858   hA = (Mat_HYPRE*)A->data;
859   hB = (Mat_HYPRE*)B->data;
860   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
861   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
862   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
863   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
864   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
865   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
866   ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr);
867   ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
868   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
869   ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr);
870   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
871   PetscFunctionReturn(0);
872 }
873 
874 #undef __FUNCT__
875 #define __FUNCT__ "MatMatMult_HYPRE_HYPRE"
876 static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
877 {
878   PetscErrorCode ierr;
879 
880   PetscFunctionBegin;
881   if (scall == MAT_INITIAL_MATRIX) {
882     ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
883     ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
884     ierr                      = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
885     (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
886     ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
887   }
888   ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
889   ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr);
890   ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
891   PetscFunctionReturn(0);
892 }
893 
894 
895 #undef __FUNCT__
896 #define __FUNCT__ "MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE"
897 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
898 {
899   Mat                E;
900   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC;
901   PetscErrorCode     ierr;
902 
903   PetscFunctionBegin;
904   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
905   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
906   ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr);
907   ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr);
908   ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr);
909   ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr);
910   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
911   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
912   ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr);
913   PetscFunctionReturn(0);
914 }
915 
916 #undef __FUNCT__
917 #define __FUNCT__ "MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE"
918 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D)
919 {
920   PetscErrorCode ierr;
921 
922   PetscFunctionBegin;
923   ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr);
924   ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr);
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "MatMultTranspose_HYPRE"
930 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
931 {
932   PetscErrorCode ierr;
933 
934   PetscFunctionBegin;
935   ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);CHKERRQ(ierr);
936   PetscFunctionReturn(0);
937 }
938 
939 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
940 {
941   PetscErrorCode ierr;
942 
943   PetscFunctionBegin;
944   ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);CHKERRQ(ierr);
945   PetscFunctionReturn(0);
946 }
947 
948 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans)
949 {
950   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
951   hypre_ParCSRMatrix *parcsr;
952   hypre_ParVector    *hx,*hy;
953   PetscScalar        *ax,*ay,*sax,*say;
954   PetscErrorCode     ierr;
955 
956   PetscFunctionBegin;
957   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
958   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
959   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
960   ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
961   ierr = VecGetArray(y,&ay);CHKERRQ(ierr);
962   if (trans) {
963     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
964     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
965     hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx);
966     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
967     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
968   } else {
969     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
970     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
971     hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy);
972     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
973     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
974   }
975   ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
976   ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr);
977   PetscFunctionReturn(0);
978 }
979 
980 static PetscErrorCode MatDestroy_HYPRE(Mat A)
981 {
982   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
983   PetscErrorCode ierr;
984 
985   PetscFunctionBegin;
986   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
987   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
988   if (hA->ij) {
989     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
990     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
991   }
992   if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); }
993   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr);
994   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr);
995   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr);
996   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr);
997   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr);
998   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr);
999   ierr = PetscFree(A->data);CHKERRQ(ierr);
1000   PetscFunctionReturn(0);
1001 }
1002 
1003 static PetscErrorCode MatSetUp_HYPRE(Mat A)
1004 {
1005   PetscErrorCode ierr;
1006 
1007   PetscFunctionBegin;
1008   ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr);
1009   PetscFunctionReturn(0);
1010 }
1011 
1012 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1013 {
1014   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1015   Vec                x,b;
1016   PetscErrorCode     ierr;
1017 
1018   PetscFunctionBegin;
1019   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
1020   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
1021   if (hA->x) PetscFunctionReturn(0);
1022   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1023   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1024   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr);
1025   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr);
1026   ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr);
1027   ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr);
1028   ierr = VecDestroy(&x);CHKERRQ(ierr);
1029   ierr = VecDestroy(&b);CHKERRQ(ierr);
1030   PetscFunctionReturn(0);
1031 }
1032 
1033 #define MATHYPRE_SCRATCH 2048
1034 
1035 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1036 {
1037   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1038   PetscScalar        *vals = (PetscScalar *)v;
1039   PetscScalar        sscr[MATHYPRE_SCRATCH];
1040   HYPRE_Int          cscr[2][MATHYPRE_SCRATCH];
1041   HYPRE_Int          i,nzc;
1042   PetscErrorCode     ierr;
1043 
1044   PetscFunctionBegin;
1045   for (i=0,nzc=0;i<nc;i++) {
1046     if (cols[i] >= 0) {
1047       cscr[0][nzc  ] = cols[i];
1048       cscr[1][nzc++] = i;
1049     }
1050   }
1051   if (!nzc) PetscFunctionReturn(0);
1052 
1053   if (ins == ADD_VALUES) {
1054     for (i=0;i<nr;i++) {
1055       if (rows[i] >= 0 && nzc) {
1056         PetscInt j;
1057         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1058         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr));
1059       }
1060       vals += nc;
1061     }
1062   } else { /* INSERT_VALUES */
1063 #if defined(PETSC_USE_DEBUG)
1064     /* Insert values cannot be used to insert offproc entries */
1065     PetscInt rst,ren;
1066     ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1067     for (i=0;i<nr;i++)
1068       if (rows[i] >= 0 && (rows[i] < rst || rows[i] >= ren)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use INSERT_VALUES for off-proc entries with MatHYPRE. Use ADD_VALUES instead");
1069 #endif
1070     for (i=0;i<nr;i++) {
1071       if (rows[i] >= 0 && nzc) {
1072         PetscInt j;
1073         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1074         PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr));
1075       }
1076       vals += nc;
1077     }
1078   }
1079   PetscFunctionReturn(0);
1080 }
1081 
1082 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1083 {
1084   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1085   HYPRE_Int          *hdnnz,*honnz;
1086   PetscInt           i,rs,re,cs,ce,bs;
1087   PetscMPIInt        size;
1088   PetscErrorCode     ierr;
1089 
1090   PetscFunctionBegin;
1091   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1092   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1093   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1094   rs   = A->rmap->rstart;
1095   re   = A->rmap->rend;
1096   cs   = A->cmap->rstart;
1097   ce   = A->cmap->rend;
1098   if (!hA->ij) {
1099     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1100     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1101   } else {
1102     HYPRE_Int hrs,hre,hcs,hce;
1103     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1104     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re);
1105     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce);
1106   }
1107   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1108 
1109   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1110   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;
1111 
1112   if (!dnnz) {
1113     ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr);
1114     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1115   } else {
1116     hdnnz = (HYPRE_Int*)dnnz;
1117   }
1118   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1119   if (size > 1) {
1120     if (!onnz) {
1121       ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr);
1122       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1123     } else {
1124       honnz = (HYPRE_Int*)onnz;
1125     }
1126     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1127   } else {
1128     honnz = NULL;
1129     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1130   }
1131   if (!dnnz) {
1132     ierr = PetscFree(hdnnz);CHKERRQ(ierr);
1133   }
1134   if (!onnz && honnz) {
1135     ierr = PetscFree(honnz);CHKERRQ(ierr);
1136   }
1137   A->preallocated = PETSC_TRUE;
1138 
1139   /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0 */
1140   {
1141     hypre_AuxParCSRMatrix *aux_matrix;
1142     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1143     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
1144   }
1145   PetscFunctionReturn(0);
1146 }
1147 
1148 /*@C
1149    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1150 
1151    Collective on Mat
1152 
1153    Input Parameters:
1154 +  A - the matrix
1155 .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1156           (same value is used for all local rows)
1157 .  dnnz - array containing the number of nonzeros in the various rows of the
1158           DIAGONAL portion of the local submatrix (possibly different for each row)
1159           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1160           The size of this array is equal to the number of local rows, i.e 'm'.
1161           For matrices that will be factored, you must leave room for (and set)
1162           the diagonal entry even if it is zero.
1163 .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1164           submatrix (same value is used for all local rows).
1165 -  onnz - array containing the number of nonzeros in the various rows of the
1166           OFF-DIAGONAL portion of the local submatrix (possibly different for
1167           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1168           structure. The size of this array is equal to the number
1169           of local rows, i.e 'm'.
1170 
1171    Notes: If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.
1172 
1173    Level: intermediate
1174 
1175 .keywords: matrix, aij, compressed row, sparse, parallel
1176 
1177 .seealso: MatCreate(), MatMPIAIJSetPreallocation, MATHYPRE
1178 @*/
1179 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1180 {
1181   PetscErrorCode ierr;
1182 
1183   PetscFunctionBegin;
1184   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1185   PetscValidType(A,1);
1186   ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr);
1187   PetscFunctionReturn(0);
1188 }
1189 
1190 /*
1191    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix
1192 
1193    Collective
1194 
1195    Input Parameters:
1196 +  vparcsr  - the pointer to the hypre_ParCSRMatrix
1197 .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1198 -  copymode - PETSc copying options
1199 
1200    Output Parameter:
1201 .  A  - the matrix
1202 
1203    Level: intermediate
1204 
1205 .seealso: MatHYPRE, PetscCopyMode
1206 */
1207 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *vparcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1208 {
1209   Mat                   T;
1210   Mat_HYPRE             *hA;
1211   hypre_ParCSRMatrix    *parcsr;
1212   MPI_Comm              comm;
1213   PetscInt              rstart,rend,cstart,cend,M,N;
1214   PetscBool             isseqaij,ismpiaij,isaij,ishyp,isis;
1215   PetscErrorCode        ierr;
1216 
1217   PetscFunctionBegin;
1218   parcsr = (hypre_ParCSRMatrix *)vparcsr;
1219   comm   = hypre_ParCSRMatrixComm(parcsr);
1220   ierr   = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
1221   ierr   = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
1222   ierr   = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr);
1223   ierr   = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr);
1224   ierr   = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr);
1225   isaij  = (PetscBool)(isseqaij || ismpiaij || isaij);
1226   if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
1227   if (ishyp && copymode == PETSC_COPY_VALUES) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported copymode PETSC_COPY_VALUES");
1228 
1229   /* access ParCSRMatrix */
1230   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1231   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1232   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1233   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1234   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1235   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1236 
1237   /* fix for empty local rows/columns */
1238   if (rend < rstart) rend = rstart;
1239   if (cend < cstart) cend = cstart;
1240 
1241   /* create PETSc matrix with MatHYPRE */
1242   ierr = MatCreate(comm,&T);CHKERRQ(ierr);
1243   ierr = MatSetSizes(T,rend-rstart+1,cend-cstart+1,M,N);CHKERRQ(ierr);
1244   ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr);
1245   hA   = (Mat_HYPRE*)(T->data);
1246 
1247   /* create HYPRE_IJMatrix */
1248   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1249 
1250   /* set ParCSR object */
1251   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1252   hypre_IJMatrixObject(hA->ij) = parcsr;
1253   T->preallocated = PETSC_TRUE;
1254 
1255   /* set assembled flag */
1256   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1257   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1258   if (ishyp) {
1259     PetscMPIInt myid = 0;
1260 
1261     /* make sure we always have row_starts and col_starts available */
1262     if (HYPRE_AssumedPartitionCheck()) {
1263       ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
1264     }
1265     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1266       PetscLayout map;
1267 
1268       ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr);
1269       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1270       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1271     }
1272     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1273       PetscLayout map;
1274 
1275       ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr);
1276       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1277       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1278     }
1279     /* prevent from freeing the pointer */
1280     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1281     *A   = T;
1282     ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1283     ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1284   } else if (isaij) {
1285     if (copymode != PETSC_OWN_POINTER) {
1286       /* prevent from freeing the pointer */
1287       hA->inner_free = PETSC_FALSE;
1288       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1289       ierr = MatDestroy(&T);CHKERRQ(ierr);
1290     } else { /* AIJ return type with PETSC_OWN_POINTER */
1291       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr);
1292       *A   = T;
1293     }
1294   } else if (isis) {
1295     ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1296     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1297     ierr = MatDestroy(&T);CHKERRQ(ierr);
1298   }
1299   PetscFunctionReturn(0);
1300 }
1301 
1302 PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1303 {
1304   Mat_HYPRE*            hA = (Mat_HYPRE*)A->data;
1305   HYPRE_Int             type;
1306   PetscErrorCode        ierr;
1307 
1308   PetscFunctionBegin;
1309   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1310   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1311   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1312   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1313   PetscFunctionReturn(0);
1314 }
1315 
1316 /*
1317    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1318 
1319    Not collective
1320 
1321    Input Parameters:
1322 +  A  - the MATHYPRE object
1323 
1324    Output Parameter:
1325 .  parcsr  - the pointer to the hypre_ParCSRMatrix
1326 
1327    Level: intermediate
1328 
1329 .seealso: MatHYPRE, PetscCopyMode
1330 */
1331 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1332 {
1333   PetscErrorCode ierr;
1334 
1335   PetscFunctionBegin;
1336   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1337   PetscValidType(A,1);
1338   ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr);
1339   PetscFunctionReturn(0);
1340 }
1341 
1342 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
1343 {
1344   Mat_HYPRE      *hB;
1345   PetscErrorCode ierr;
1346 
1347   PetscFunctionBegin;
1348   ierr           = PetscNewLog(B,&hB);CHKERRQ(ierr);
1349   hB->inner_free = PETSC_TRUE;
1350 
1351   B->data       = (void*)hB;
1352   B->rmap->bs   = 1;
1353   B->assembled  = PETSC_FALSE;
1354 
1355   B->ops->mult          = MatMult_HYPRE;
1356   B->ops->multtranspose = MatMultTranspose_HYPRE;
1357   B->ops->setup         = MatSetUp_HYPRE;
1358   B->ops->destroy       = MatDestroy_HYPRE;
1359   B->ops->assemblyend   = MatAssemblyEnd_HYPRE;
1360   B->ops->ptap          = MatPtAP_HYPRE_HYPRE;
1361   B->ops->matmult       = MatMatMult_HYPRE_HYPRE;
1362   B->ops->setvalues     = MatSetValues_HYPRE;
1363 
1364   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr);
1365   ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr);
1366   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr);
1367   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr);
1368   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
1369   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
1370   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr);
1371   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr);
1372   PetscFunctionReturn(0);
1373 }
1374 
1375 static PetscErrorCode hypre_array_destroy(void *ptr)
1376 {
1377    PetscFunctionBegin;
1378    hypre_TFree(ptr);
1379    PetscFunctionReturn(0);
1380 }
1381 
1382 #if 0
1383 /*
1384     Does NOT copy the data over, instead uses DIRECTLY the pointers from the PETSc MPIAIJ format
1385 
1386     This is UNFINISHED and does NOT work! The problem is that hypre puts the diagonal entry first
1387     which will corrupt the PETSc data structure if we did this. Need a work around to this problem.
1388 */
1389 #include <_hypre_IJ_mv.h>
1390 #include <HYPRE_IJ_mv.h>
1391 
1392 PetscErrorCode MatHYPRE_IJMatrixLink(Mat A, HYPRE_IJMatrix *ij)
1393 {
1394   PetscErrorCode        ierr;
1395   PetscInt              rstart,rend,cstart,cend;
1396   PetscBool             flg;
1397   hypre_AuxParCSRMatrix *aux_matrix;
1398 
1399   PetscFunctionBegin;
1400   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1401   PetscValidType(A,1);
1402   PetscValidPointer(ij,2);
1403   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
1404   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Can only use with PETSc MPIAIJ matrices");
1405   ierr = MatSetUp(A);CHKERRQ(ierr);
1406 
1407   ierr   = PetscLogEventBegin(MAT_Convert,A,0,0,0);CHKERRQ(ierr);
1408   rstart = A->rmap->rstart;
1409   rend   = A->rmap->rend;
1410   cstart = A->cmap->rstart;
1411   cend   = A->cmap->rend;
1412   PetscStackCallStandard(HYPRE_IJMatrixCreate,(PetscObjectComm((PetscObject)A),rstart,rend-1,cstart,cend-1,ij));
1413   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(*ij,HYPRE_PARCSR));
1414 
1415   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(*ij));
1416   PetscStackCall("hypre_IJMatrixTranslator",aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(*ij));
1417 
1418   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
1419 
1420   /* this is the Hack part where we monkey directly with the hypre datastructures */
1421 
1422   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(*ij));
1423   ierr = PetscLogEventEnd(MAT_Convert,A,0,0,0);CHKERRQ(ierr);
1424   PetscFunctionReturn(0);
1425 }
1426 #endif
1427