xref: /petsc/src/mat/impls/hypre/mhypre.c (revision 8ac3aa560d16e76638ff89a78bdd928fd99aa9bc)
1 
2 /*
3     Creates hypre ijmatrix from PETSc matrix
4 */
5 
6 #include <petscmathypre.h>
7 #include <petsc/private/matimpl.h>
8 #include <../src/mat/impls/hypre/mhypre.h>
9 #include <../src/mat/impls/aij/mpi/mpiaij.h>
10 #include <../src/vec/vec/impls/hypre/vhyp.h>
11 #include <HYPRE.h>
12 #include <HYPRE_utilities.h>
13 #include <_hypre_parcsr_ls.h>
14 #include <_hypre_sstruct_ls.h>
15 
16 PETSC_INTERN PetscErrorCode MatPtAP_IS_XAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);
17 
18 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
19 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
20 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
21 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
22 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool);
23 static PetscErrorCode hypre_array_destroy(void*);
24 PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins);
25 
26 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
27 {
28   PetscErrorCode ierr;
29   PetscInt       i,n_d,n_o;
30   const PetscInt *ia_d,*ia_o;
31   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
32   PetscInt       *nnz_d=NULL,*nnz_o=NULL;
33 
34   PetscFunctionBegin;
35   if (A_d) { /* determine number of nonzero entries in local diagonal part */
36     ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr);
37     if (done_d) {
38       ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr);
39       for (i=0; i<n_d; i++) {
40         nnz_d[i] = ia_d[i+1] - ia_d[i];
41       }
42     }
43     ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr);
44   }
45   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
46     ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
47     if (done_o) {
48       ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr);
49       for (i=0; i<n_o; i++) {
50         nnz_o[i] = ia_o[i+1] - ia_o[i];
51       }
52     }
53     ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
54   }
55   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
56     if (!done_o) { /* only diagonal part */
57       ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr);
58       for (i=0; i<n_d; i++) {
59         nnz_o[i] = 0;
60       }
61     }
62     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o));
63     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
64     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
65   }
66   PetscFunctionReturn(0);
67 }
68 
69 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
70 {
71   PetscErrorCode ierr;
72   PetscInt       rstart,rend,cstart,cend;
73 
74   PetscFunctionBegin;
75   ierr   = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
76   ierr   = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
77   rstart = A->rmap->rstart;
78   rend   = A->rmap->rend;
79   cstart = A->cmap->rstart;
80   cend   = A->cmap->rend;
81   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
82   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
83   {
84     PetscBool      same;
85     Mat            A_d,A_o;
86     const PetscInt *colmap;
87     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr);
88     if (same) {
89       ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
90       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
91       PetscFunctionReturn(0);
92     }
93     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr);
94     if (same) {
95       ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
96       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
97       PetscFunctionReturn(0);
98     }
99     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr);
100     if (same) {
101       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
102       PetscFunctionReturn(0);
103     }
104     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr);
105     if (same) {
106       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
107       PetscFunctionReturn(0);
108     }
109   }
110   PetscFunctionReturn(0);
111 }
112 
113 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
114 {
115   PetscErrorCode    ierr;
116   PetscInt          i,rstart,rend,ncols,nr,nc;
117   const PetscScalar *values;
118   const PetscInt    *cols;
119   PetscBool         flg;
120 
121   PetscFunctionBegin;
122   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
123   ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr);
124   if (flg && nr == nc) {
125     ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr);
126     PetscFunctionReturn(0);
127   }
128   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
129   if (flg) {
130     ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr);
131     PetscFunctionReturn(0);
132   }
133 
134   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
135   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
136   for (i=rstart; i<rend; i++) {
137     ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
138     if (ncols) {
139       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values));
140     }
141     ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
142   }
143   PetscFunctionReturn(0);
144 }
145 
146 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
147 {
148   PetscErrorCode        ierr;
149   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
150   HYPRE_Int             type;
151   hypre_ParCSRMatrix    *par_matrix;
152   hypre_AuxParCSRMatrix *aux_matrix;
153   hypre_CSRMatrix       *hdiag;
154 
155   PetscFunctionBegin;
156   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
157   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
158   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
159   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
160   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
161   /*
162        this is the Hack part where we monkey directly with the hypre datastructures
163   */
164   ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));CHKERRQ(ierr);
165   ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));CHKERRQ(ierr);
166   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));CHKERRQ(ierr);
167 
168   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
169   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
170   PetscFunctionReturn(0);
171 }
172 
173 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
174 {
175   PetscErrorCode        ierr;
176   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
177   Mat_SeqAIJ            *pdiag,*poffd;
178   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
179   HYPRE_Int             type;
180   hypre_ParCSRMatrix    *par_matrix;
181   hypre_AuxParCSRMatrix *aux_matrix;
182   hypre_CSRMatrix       *hdiag,*hoffd;
183 
184   PetscFunctionBegin;
185   pdiag = (Mat_SeqAIJ*) pA->A->data;
186   poffd = (Mat_SeqAIJ*) pA->B->data;
187   /* cstart is only valid for square MPIAIJ layed out in the usual way */
188   ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr);
189 
190   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
191   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
192   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
193   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
194   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
195   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
196 
197   /*
198        this is the Hack part where we monkey directly with the hypre datastructures
199   */
200   ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
201   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
202   jj  = (PetscInt*)hdiag->j;
203   pjj = (PetscInt*)pdiag->j;
204   for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i];
205   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
206   ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
207   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
208      If we hacked a hypre a bit more we might be able to avoid this step */
209   jj  = (PetscInt*) hoffd->j;
210   pjj = (PetscInt*) poffd->j;
211   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
212   ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));
213 
214   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
215   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
216   PetscFunctionReturn(0);
217 }
218 
219 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
220 {
221   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
222   Mat                    lA;
223   ISLocalToGlobalMapping rl2g,cl2g;
224   IS                     is;
225   hypre_ParCSRMatrix     *hA;
226   hypre_CSRMatrix        *hdiag,*hoffd;
227   MPI_Comm               comm;
228   PetscScalar            *hdd,*hod,*aa,*data;
229   HYPRE_Int              *col_map_offd,*hdi,*hdj,*hoi,*hoj;
230   PetscInt               *ii,*jj,*iptr,*jptr;
231   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
232   HYPRE_Int              type;
233   PetscErrorCode         ierr;
234 
235   PetscFunctionBegin;
236   comm = PetscObjectComm((PetscObject)A);
237   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
238   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
239   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
240   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
241   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
242   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
243   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
244   hdiag = hypre_ParCSRMatrixDiag(hA);
245   hoffd = hypre_ParCSRMatrixOffd(hA);
246   dr    = hypre_CSRMatrixNumRows(hdiag);
247   dc    = hypre_CSRMatrixNumCols(hdiag);
248   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
249   hdi   = hypre_CSRMatrixI(hdiag);
250   hdj   = hypre_CSRMatrixJ(hdiag);
251   hdd   = hypre_CSRMatrixData(hdiag);
252   oc    = hypre_CSRMatrixNumCols(hoffd);
253   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
254   hoi   = hypre_CSRMatrixI(hoffd);
255   hoj   = hypre_CSRMatrixJ(hoffd);
256   hod   = hypre_CSRMatrixData(hoffd);
257   if (reuse != MAT_REUSE_MATRIX) {
258     PetscInt *aux;
259 
260     /* generate l2g maps for rows and cols */
261     ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr);
262     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
263     ierr = ISDestroy(&is);CHKERRQ(ierr);
264     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
265     ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr);
266     for (i=0; i<dc; i++) aux[i] = i+stc;
267     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
268     ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
269     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
270     ierr = ISDestroy(&is);CHKERRQ(ierr);
271     /* create MATIS object */
272     ierr = MatCreate(comm,B);CHKERRQ(ierr);
273     ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr);
274     ierr = MatSetType(*B,MATIS);CHKERRQ(ierr);
275     ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr);
276     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
277     ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
278 
279     /* allocate CSR for local matrix */
280     ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr);
281     ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr);
282     ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr);
283   } else {
284     PetscInt  nr;
285     PetscBool done;
286     ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr);
287     ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr);
288     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
289     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
290     ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr);
291   }
292   /* merge local matrices */
293   ii   = iptr;
294   jj   = jptr;
295   aa   = data;
296   *ii  = *(hdi++) + *(hoi++);
297   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
298     PetscScalar *aold = aa;
299     PetscInt    *jold = jj,nc = jd+jo;
300     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
301     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
302     *(++ii) = *(hdi++) + *(hoi++);
303     ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr);
304   }
305   for (; cum<dr; cum++) *(++ii) = nnz;
306   if (reuse != MAT_REUSE_MATRIX) {
307     Mat_SeqAIJ* a;
308 
309     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr);
310     ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr);
311     /* hack SeqAIJ */
312     a          = (Mat_SeqAIJ*)(lA->data);
313     a->free_a  = PETSC_TRUE;
314     a->free_ij = PETSC_TRUE;
315     ierr = MatDestroy(&lA);CHKERRQ(ierr);
316   }
317   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
318   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
319   if (reuse == MAT_INPLACE_MATRIX) {
320     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
321   }
322   PetscFunctionReturn(0);
323 }
324 
325 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
326 {
327   Mat            M = NULL;
328   Mat_HYPRE      *hB;
329   MPI_Comm       comm = PetscObjectComm((PetscObject)A);
330   PetscErrorCode ierr;
331 
332   PetscFunctionBegin;
333   if (reuse == MAT_REUSE_MATRIX) {
334     /* always destroy the old matrix and create a new memory;
335        hope this does not churn the memory too much. The problem
336        is I do not know if it is possible to put the matrix back to
337        its initial state so that we can directly copy the values
338        the second time through. */
339     hB = (Mat_HYPRE*)((*B)->data);
340     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
341   } else {
342     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
343     ierr = MatSetType(M,MATHYPRE);CHKERRQ(ierr);
344     ierr = MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
345     hB   = (Mat_HYPRE*)(M->data);
346     if (reuse == MAT_INITIAL_MATRIX) *B = M;
347   }
348   ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr);
349   ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr);
350   if (reuse == MAT_INPLACE_MATRIX) {
351     ierr = MatHeaderReplace(A,&M);CHKERRQ(ierr);
352   }
353   (*B)->preallocated = PETSC_TRUE;
354   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
355   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
356   PetscFunctionReturn(0);
357 }
358 
359 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
360 {
361   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
362   hypre_ParCSRMatrix *parcsr;
363   hypre_CSRMatrix    *hdiag,*hoffd;
364   MPI_Comm           comm;
365   PetscScalar        *da,*oa,*aptr;
366   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
367   PetscInt           i,dnnz,onnz,m,n;
368   HYPRE_Int          type;
369   PetscMPIInt        size;
370   PetscErrorCode     ierr;
371 
372   PetscFunctionBegin;
373   comm = PetscObjectComm((PetscObject)A);
374   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
375   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
376   if (reuse == MAT_REUSE_MATRIX) {
377     PetscBool ismpiaij,isseqaij;
378     ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
379     ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
380     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
381   }
382   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
383 
384   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
385   hdiag = hypre_ParCSRMatrixDiag(parcsr);
386   hoffd = hypre_ParCSRMatrixOffd(parcsr);
387   m     = hypre_CSRMatrixNumRows(hdiag);
388   n     = hypre_CSRMatrixNumCols(hdiag);
389   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
390   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
391   if (reuse == MAT_INITIAL_MATRIX) {
392     ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
393     ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
394     ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr);
395   } else if (reuse == MAT_REUSE_MATRIX) {
396     PetscInt  nr;
397     PetscBool done;
398     if (size > 1) {
399       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
400 
401       ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
402       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
403       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
404       ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr);
405     } else {
406       ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
407       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
408       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
409       ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr);
410     }
411   } else { /* MAT_INPLACE_MATRIX */
412     dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
413     djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
414     da  = hypre_CSRMatrixData(hdiag);
415   }
416   ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
417   ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr);
418   ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr);
419   iptr = djj;
420   aptr = da;
421   for (i=0; i<m; i++) {
422     PetscInt nc = dii[i+1]-dii[i];
423     ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
424     iptr += nc;
425     aptr += nc;
426   }
427   if (size > 1) {
428     HYPRE_Int *offdj,*coffd;
429 
430     if (reuse == MAT_INITIAL_MATRIX) {
431       ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
432       ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
433       ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr);
434     } else if (reuse == MAT_REUSE_MATRIX) {
435       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
436       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
437       PetscBool  done;
438 
439       ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr);
440       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
441       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
442       ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr);
443     } else { /* MAT_INPLACE_MATRIX */
444       oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
445       ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
446       oa  = hypre_CSRMatrixData(hoffd);
447     }
448     ierr  = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
449     offdj = hypre_CSRMatrixJ(hoffd);
450     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
451     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
452     ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr);
453     iptr = ojj;
454     aptr = oa;
455     for (i=0; i<m; i++) {
456        PetscInt nc = oii[i+1]-oii[i];
457        ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
458        iptr += nc;
459        aptr += nc;
460     }
461     if (reuse == MAT_INITIAL_MATRIX) {
462       Mat_MPIAIJ *b;
463       Mat_SeqAIJ *d,*o;
464 
465       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr);
466       /* hack MPIAIJ */
467       b          = (Mat_MPIAIJ*)((*B)->data);
468       d          = (Mat_SeqAIJ*)b->A->data;
469       o          = (Mat_SeqAIJ*)b->B->data;
470       d->free_a  = PETSC_TRUE;
471       d->free_ij = PETSC_TRUE;
472       o->free_a  = PETSC_TRUE;
473       o->free_ij = PETSC_TRUE;
474     } else if (reuse == MAT_INPLACE_MATRIX) {
475       Mat T;
476       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr);
477       hypre_CSRMatrixI(hdiag)    = NULL;
478       hypre_CSRMatrixJ(hdiag)    = NULL;
479       hypre_CSRMatrixData(hdiag) = NULL;
480       hypre_CSRMatrixI(hoffd)    = NULL;
481       hypre_CSRMatrixJ(hoffd)    = NULL;
482       hypre_CSRMatrixData(hoffd) = NULL;
483       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
484     }
485   } else {
486     oii  = NULL;
487     ojj  = NULL;
488     oa   = NULL;
489     if (reuse == MAT_INITIAL_MATRIX) {
490       Mat_SeqAIJ* b;
491       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr);
492       /* hack SeqAIJ */
493       b          = (Mat_SeqAIJ*)((*B)->data);
494       b->free_a  = PETSC_TRUE;
495       b->free_ij = PETSC_TRUE;
496     } else if (reuse == MAT_INPLACE_MATRIX) {
497       Mat T;
498       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr);
499       hypre_CSRMatrixI(hdiag)    = NULL;
500       hypre_CSRMatrixJ(hdiag)    = NULL;
501       hypre_CSRMatrixData(hdiag) = NULL;
502       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
503     }
504   }
505 
506   /* we have to use hypre_Tfree to free the arrays */
507   if (reuse == MAT_INPLACE_MATRIX) {
508     void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
509     const char *names[6] = {"_hypre_csr_dii",
510                             "_hypre_csr_djj",
511                             "_hypre_csr_da",
512                             "_hypre_csr_oii",
513                             "_hypre_csr_ojj",
514                             "_hypre_csr_oa"};
515     for (i=0; i<6; i++) {
516       PetscContainer c;
517 
518       ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr);
519       ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr);
520       ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr);
521       ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr);
522       ierr = PetscContainerDestroy(&c);CHKERRQ(ierr);
523     }
524   }
525   PetscFunctionReturn(0);
526 }
527 
528 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
529 {
530   hypre_ParCSRMatrix *tA;
531   hypre_CSRMatrix    *hdiag,*hoffd;
532   Mat_SeqAIJ         *diag,*offd;
533   PetscInt           *garray,noffd,dnnz,onnz,*row_starts,*col_starts;
534   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
535   PetscBool          ismpiaij,isseqaij;
536   PetscErrorCode     ierr;
537 
538   PetscFunctionBegin;
539   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
540   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
541   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type);
542   if (ismpiaij) {
543     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);
544 
545     diag   = (Mat_SeqAIJ*)a->A->data;
546     offd   = (Mat_SeqAIJ*)a->B->data;
547     garray = a->garray;
548     noffd  = a->B->cmap->N;
549     dnnz   = diag->nz;
550     onnz   = offd->nz;
551   } else {
552     diag   = (Mat_SeqAIJ*)A->data;
553     offd   = NULL;
554     garray = NULL;
555     noffd  = 0;
556     dnnz   = diag->nz;
557     onnz   = 0;
558   }
559 
560   /* create a temporary ParCSR */
561   if (HYPRE_AssumedPartitionCheck()) {
562     PetscMPIInt myid;
563 
564     ierr       = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
565     row_starts = A->rmap->range + myid;
566     col_starts = A->cmap->range + myid;
567   } else {
568     row_starts = A->rmap->range;
569     col_starts = A->cmap->range;
570   }
571   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz);
572   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
573   hypre_ParCSRMatrixSetColStartsOwner(tA,0);
574 
575   /* set diagonal part */
576   hdiag = hypre_ParCSRMatrixDiag(tA);
577   hypre_CSRMatrixI(hdiag)           = (HYPRE_Int*)diag->i;
578   hypre_CSRMatrixJ(hdiag)           = (HYPRE_Int*)diag->j;
579   hypre_CSRMatrixData(hdiag)        = diag->a;
580   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
581   hypre_CSRMatrixSetRownnz(hdiag);
582   hypre_CSRMatrixSetDataOwner(hdiag,0);
583 
584   /* set offdiagonal part */
585   hoffd = hypre_ParCSRMatrixOffd(tA);
586   if (offd) {
587     hypre_CSRMatrixI(hoffd)           = (HYPRE_Int*)offd->i;
588     hypre_CSRMatrixJ(hoffd)           = (HYPRE_Int*)offd->j;
589     hypre_CSRMatrixData(hoffd)        = offd->a;
590     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
591     hypre_CSRMatrixSetRownnz(hoffd);
592     hypre_CSRMatrixSetDataOwner(hoffd,0);
593     hypre_ParCSRMatrixSetNumNonzeros(tA);
594     hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray;
595   }
596   *hA = tA;
597   PetscFunctionReturn(0);
598 }
599 
600 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
601 {
602   hypre_CSRMatrix    *hdiag,*hoffd;
603 
604   PetscFunctionBegin;
605   hdiag = hypre_ParCSRMatrixDiag(*hA);
606   hoffd = hypre_ParCSRMatrixOffd(*hA);
607   /* set pointers to NULL before destroying tA */
608   hypre_CSRMatrixI(hdiag)           = NULL;
609   hypre_CSRMatrixJ(hdiag)           = NULL;
610   hypre_CSRMatrixData(hdiag)        = NULL;
611   hypre_CSRMatrixI(hoffd)           = NULL;
612   hypre_CSRMatrixJ(hoffd)           = NULL;
613   hypre_CSRMatrixData(hoffd)        = NULL;
614   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
615   hypre_ParCSRMatrixDestroy(*hA);
616   *hA = NULL;
617   PetscFunctionReturn(0);
618 }
619 
620 /* calls RAP from BoomerAMG:
621    the resulting ParCSR will not own the column and row starts
622    It looks like we don't need to have the diagonal entries
623    ordered first in the rows of the diagonal part
624    for boomerAMGBuildCoarseOperator to work */
625 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
626 {
627   HYPRE_Int P_owns_col_starts,R_owns_row_starts;
628 
629   PetscFunctionBegin;
630   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
631   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
632   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
633   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
634   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
635   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
636   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
637   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
638   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
639   PetscFunctionReturn(0);
640 }
641 
642 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
643 {
644   Mat                B;
645   hypre_ParCSRMatrix *hA,*hP,*hPtAP;
646   PetscErrorCode     ierr;
647 
648   PetscFunctionBegin;
649   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
650   ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr);
651   ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr);
652   ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
653   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
654   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
655   ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr);
656   PetscFunctionReturn(0);
657 }
658 
659 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C)
660 {
661   PetscErrorCode ierr;
662 
663   PetscFunctionBegin;
664   ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
665   ierr                   = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
666   (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
667   PetscFunctionReturn(0);
668 }
669 
670 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
671 {
672   Mat                B;
673   Mat_HYPRE          *hP;
674   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr;
675   HYPRE_Int          type;
676   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
677   PetscBool          ishypre;
678   PetscErrorCode     ierr;
679 
680   PetscFunctionBegin;
681   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
682   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
683   hP = (Mat_HYPRE*)P->data;
684   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
685   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
686   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
687 
688   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
689   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
690   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
691 
692   /* create temporary matrix and merge to C */
693   ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
694   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
695   PetscFunctionReturn(0);
696 }
697 
698 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
699 {
700   PetscErrorCode ierr;
701 
702   PetscFunctionBegin;
703   if (scall == MAT_INITIAL_MATRIX) {
704     const char *deft = MATAIJ;
705     char       type[256];
706     PetscBool  flg;
707 
708     ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr);
709     ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr);
710     ierr = PetscOptionsEnd();CHKERRQ(ierr);
711     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
712     ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
713     if (flg) {
714       ierr = MatSetType(*C,type);CHKERRQ(ierr);
715     } else {
716       ierr = MatSetType(*C,deft);CHKERRQ(ierr);
717     }
718     (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE;
719     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
720   }
721   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
722   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
723   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
724   PetscFunctionReturn(0);
725 }
726 
727 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
728 {
729   Mat                B;
730   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
731   Mat_HYPRE          *hA,*hP;
732   PetscBool          ishypre;
733   HYPRE_Int          type;
734   PetscErrorCode     ierr;
735 
736   PetscFunctionBegin;
737   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
738   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
739   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
740   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
741   hA = (Mat_HYPRE*)A->data;
742   hP = (Mat_HYPRE*)P->data;
743   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
744   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
745   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
746   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
747   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
748   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
749   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
750   ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
751   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
752   PetscFunctionReturn(0);
753 }
754 
755 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
756 {
757   PetscErrorCode ierr;
758 
759   PetscFunctionBegin;
760   if (scall == MAT_INITIAL_MATRIX) {
761     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
762     ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
763     ierr                   = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
764     (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE;
765     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
766   }
767   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
768   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
769   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
770   PetscFunctionReturn(0);
771 }
772 
773 /* calls hypre_ParMatmul
774    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
775    hypre_ParMatrixCreate does not duplicate the communicator
776    It looks like we don't need to have the diagonal entries
777    ordered first in the rows of the diagonal part
778    for boomerAMGBuildCoarseOperator to work */
779 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
780 {
781   PetscFunctionBegin;
782   PetscStackPush("hypre_ParMatmul");
783   *hAB = hypre_ParMatmul(hA,hB);
784   PetscStackPop;
785   PetscFunctionReturn(0);
786 }
787 
788 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
789 {
790   Mat                D;
791   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
792   PetscErrorCode     ierr;
793 
794   PetscFunctionBegin;
795   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
796   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
797   ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr);
798   ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
799   ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr);
800   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
801   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
802   PetscFunctionReturn(0);
803 }
804 
805 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C)
806 {
807   PetscErrorCode ierr;
808 
809   PetscFunctionBegin;
810   ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
811   ierr                      = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
812   (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
813   PetscFunctionReturn(0);
814 }
815 
816 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
817 {
818   Mat                D;
819   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
820   Mat_HYPRE          *hA,*hB;
821   PetscBool          ishypre;
822   HYPRE_Int          type;
823   PetscErrorCode     ierr;
824 
825   PetscFunctionBegin;
826   ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr);
827   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
828   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
829   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
830   hA = (Mat_HYPRE*)A->data;
831   hB = (Mat_HYPRE*)B->data;
832   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
833   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
834   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
835   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
836   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
837   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
838   ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr);
839   ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
840   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
841   ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr);
842   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
843   PetscFunctionReturn(0);
844 }
845 
846 static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
847 {
848   PetscErrorCode ierr;
849 
850   PetscFunctionBegin;
851   if (scall == MAT_INITIAL_MATRIX) {
852     ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
853     ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
854     ierr                      = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
855     (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
856     ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
857   }
858   ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
859   ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr);
860   ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
861   PetscFunctionReturn(0);
862 }
863 
864 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
865 {
866   Mat                E;
867   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC;
868   PetscErrorCode     ierr;
869 
870   PetscFunctionBegin;
871   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
872   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
873   ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr);
874   ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr);
875   ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr);
876   ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr);
877   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
878   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
879   ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr);
880   PetscFunctionReturn(0);
881 }
882 
883 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D)
884 {
885   PetscErrorCode ierr;
886 
887   PetscFunctionBegin;
888   ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr);
889   ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr);
890   PetscFunctionReturn(0);
891 }
892 
893 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
894 {
895   PetscErrorCode ierr;
896 
897   PetscFunctionBegin;
898   ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);CHKERRQ(ierr);
899   PetscFunctionReturn(0);
900 }
901 
902 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
903 {
904   PetscErrorCode ierr;
905 
906   PetscFunctionBegin;
907   ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);CHKERRQ(ierr);
908   PetscFunctionReturn(0);
909 }
910 
911 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans)
912 {
913   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
914   hypre_ParCSRMatrix *parcsr;
915   hypre_ParVector    *hx,*hy;
916   PetscScalar        *ax,*ay,*sax,*say;
917   PetscErrorCode     ierr;
918 
919   PetscFunctionBegin;
920   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
921   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
922   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
923   ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
924   ierr = VecGetArray(y,&ay);CHKERRQ(ierr);
925   if (trans) {
926     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
927     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
928     hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx);
929     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
930     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
931   } else {
932     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
933     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
934     hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy);
935     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
936     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
937   }
938   ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
939   ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr);
940   PetscFunctionReturn(0);
941 }
942 
943 static PetscErrorCode MatDestroy_HYPRE(Mat A)
944 {
945   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
946   PetscErrorCode ierr;
947 
948   PetscFunctionBegin;
949   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
950   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
951   if (hA->ij) {
952     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
953     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
954   }
955   if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); }
956 
957   ierr = MatStashDestroy_Private(&A->stash);CHKERRQ(ierr);
958 
959   ierr = PetscFree(hA->array);CHKERRQ(ierr);
960 
961   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr);
962   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr);
963   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr);
964   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr);
965   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr);
966   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr);
967   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_is_hypre_C",NULL);CHKERRQ(ierr);
968   ierr = PetscFree(A->data);CHKERRQ(ierr);
969   PetscFunctionReturn(0);
970 }
971 
972 static PetscErrorCode MatSetUp_HYPRE(Mat A)
973 {
974   PetscErrorCode ierr;
975 
976   PetscFunctionBegin;
977   ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr);
978   PetscFunctionReturn(0);
979 }
980 
981 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
982 {
983   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
984   Vec                x,b;
985   PetscMPIInt        n;
986   PetscInt           i,j,rstart,ncols,flg;
987   PetscInt           *row,*col;
988   PetscScalar        *val;
989   PetscErrorCode     ierr;
990 
991   PetscFunctionBegin;
992   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
993 
994   if (!A->nooffprocentries) {
995     while (1) {
996       ierr = MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
997       if (!flg) break;
998 
999       for (i=0; i<n; ) {
1000         /* Now identify the consecutive vals belonging to the same row */
1001         for (j=i,rstart=row[j]; j<n; j++) {
1002           if (row[j] != rstart) break;
1003         }
1004         if (j < n) ncols = j-i;
1005         else       ncols = n-i;
1006         /* Now assemble all these values with a single function call */
1007         ierr = MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);CHKERRQ(ierr);
1008 
1009         i = j;
1010       }
1011     }
1012     ierr = MatStashScatterEnd_Private(&A->stash);CHKERRQ(ierr);
1013   }
1014 
1015   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
1016   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitalize */
1017   {
1018     hypre_AuxParCSRMatrix *aux_matrix;
1019 
1020     /* call destroy just to make sure we do not leak anything */
1021     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1022     PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix));
1023     hypre_IJMatrixTranslator(hA->ij) = NULL;
1024 
1025     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1026     PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1027     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1028     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1029     PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix));
1030   }
1031   if (hA->x) PetscFunctionReturn(0);
1032   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1033   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1034   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr);
1035   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr);
1036   ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr);
1037   ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr);
1038   ierr = VecDestroy(&x);CHKERRQ(ierr);
1039   ierr = VecDestroy(&b);CHKERRQ(ierr);
1040   PetscFunctionReturn(0);
1041 }
1042 
1043 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1044 {
1045   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1046   PetscErrorCode     ierr;
1047 
1048   PetscFunctionBegin;
1049   if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use");
1050 
1051   if (hA->size >= size) *array = hA->array;
1052   else {
1053     ierr = PetscFree(hA->array);CHKERRQ(ierr);
1054     hA->size = size;
1055     ierr = PetscMalloc(hA->size,&hA->array);CHKERRQ(ierr);
1056     *array = hA->array;
1057   }
1058 
1059   hA->available = PETSC_FALSE;
1060   PetscFunctionReturn(0);
1061 }
1062 
1063 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1064 {
1065   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1066 
1067   PetscFunctionBegin;
1068   *array = NULL;
1069   hA->available = PETSC_TRUE;
1070   PetscFunctionReturn(0);
1071 }
1072 
1073 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1074 {
1075   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1076   PetscScalar        *vals = (PetscScalar *)v;
1077   PetscScalar        *sscr;
1078   PetscInt           *cscr[2];
1079   PetscInt           i,nzc;
1080   void               *array = NULL;
1081   PetscErrorCode     ierr;
1082 
1083   PetscFunctionBegin;
1084   ierr = MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(PetscScalar)*nc*nr,&array);CHKERRQ(ierr);
1085   cscr[0] = (PetscInt*)array;
1086   cscr[1] = ((PetscInt*)array)+nc;
1087   sscr = (PetscScalar*)(((PetscInt*)array)+nc*2);
1088   for (i=0,nzc=0;i<nc;i++) {
1089     if (cols[i] >= 0) {
1090       cscr[0][nzc  ] = cols[i];
1091       cscr[1][nzc++] = i;
1092     }
1093   }
1094   if (!nzc) {
1095     ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr);
1096     PetscFunctionReturn(0);
1097   }
1098 
1099   if (ins == ADD_VALUES) {
1100     for (i=0;i<nr;i++) {
1101       if (rows[i] >= 0 && nzc) {
1102         PetscInt j;
1103         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1104         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,(HYPRE_Int*)&nzc,(HYPRE_Int*)(rows+i),(HYPRE_Int*)cscr[0],sscr));
1105       }
1106       vals += nc;
1107     }
1108   } else { /* INSERT_VALUES */
1109 
1110     PetscInt rst,ren;
1111     ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1112 
1113     for (i=0;i<nr;i++) {
1114       if (rows[i] >= 0 && nzc) {
1115         PetscInt j;
1116         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1117         /* nonlocal values */
1118         if (rows[i] < rst || rows[i] >= ren) { ierr = MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],sscr,PETSC_FALSE);CHKERRQ(ierr); }
1119         /* local values */
1120         else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,(HYPRE_Int*)&nzc,(HYPRE_Int*)(rows+i),(HYPRE_Int*)cscr[0],sscr));
1121       }
1122       vals += nc;
1123     }
1124   }
1125 
1126   ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr);
1127   PetscFunctionReturn(0);
1128 }
1129 
1130 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1131 {
1132   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1133   HYPRE_Int      *hdnnz,*honnz;
1134   PetscInt       i,rs,re,cs,ce,bs;
1135   PetscMPIInt    size;
1136   PetscErrorCode ierr;
1137 
1138   PetscFunctionBegin;
1139   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1140   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1141   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1142   rs   = A->rmap->rstart;
1143   re   = A->rmap->rend;
1144   cs   = A->cmap->rstart;
1145   ce   = A->cmap->rend;
1146   if (!hA->ij) {
1147     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1148     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1149   } else {
1150     HYPRE_Int hrs,hre,hcs,hce;
1151     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1152     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re);
1153     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce);
1154   }
1155   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1156   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;
1157 
1158   if (!dnnz) {
1159     ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr);
1160     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1161   } else {
1162     hdnnz = (HYPRE_Int*)dnnz;
1163   }
1164   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1165   if (size > 1) {
1166     hypre_AuxParCSRMatrix *aux_matrix;
1167     if (!onnz) {
1168       ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr);
1169       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1170     } else {
1171       honnz = (HYPRE_Int*)onnz;
1172     }
1173     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1174        they assume the user will input the entire row values, properly sorted
1175        In PETSc, we don't make such an assumption, and we instead set this flag to 1
1176        Also, to avoid possible memory leaks, we destroy and recreate the translator
1177        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1178        the IJ matrix for us */
1179     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1180     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1181     hypre_IJMatrixTranslator(hA->ij) = NULL;
1182     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1183     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1184     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
1185   } else {
1186     honnz = NULL;
1187     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1188   }
1189 
1190   /* reset assembled flag and call the initialize method */
1191   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1192   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1193   if (!dnnz) {
1194     ierr = PetscFree(hdnnz);CHKERRQ(ierr);
1195   }
1196   if (!onnz && honnz) {
1197     ierr = PetscFree(honnz);CHKERRQ(ierr);
1198   }
1199 
1200   /* Match AIJ logic */
1201   A->preallocated = PETSC_TRUE;
1202   A->assembled    = PETSC_FALSE;
1203   PetscFunctionReturn(0);
1204 }
1205 
1206 /*@C
1207    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1208 
1209    Collective on Mat
1210 
1211    Input Parameters:
1212 +  A - the matrix
1213 .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1214           (same value is used for all local rows)
1215 .  dnnz - array containing the number of nonzeros in the various rows of the
1216           DIAGONAL portion of the local submatrix (possibly different for each row)
1217           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1218           The size of this array is equal to the number of local rows, i.e 'm'.
1219           For matrices that will be factored, you must leave room for (and set)
1220           the diagonal entry even if it is zero.
1221 .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1222           submatrix (same value is used for all local rows).
1223 -  onnz - array containing the number of nonzeros in the various rows of the
1224           OFF-DIAGONAL portion of the local submatrix (possibly different for
1225           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1226           structure. The size of this array is equal to the number
1227           of local rows, i.e 'm'.
1228 
1229    Notes:
1230     If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.
1231 
1232    Level: intermediate
1233 
1234 .keywords: matrix, aij, compressed row, sparse, parallel
1235 
1236 .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE
1237 @*/
1238 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1239 {
1240   PetscErrorCode ierr;
1241 
1242   PetscFunctionBegin;
1243   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1244   PetscValidType(A,1);
1245   ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr);
1246   PetscFunctionReturn(0);
1247 }
1248 
1249 /*
1250    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix
1251 
1252    Collective
1253 
1254    Input Parameters:
1255 +  parcsr   - the pointer to the hypre_ParCSRMatrix
1256 .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1257 -  copymode - PETSc copying options
1258 
1259    Output Parameter:
1260 .  A  - the matrix
1261 
1262    Level: intermediate
1263 
1264 .seealso: MatHYPRE, PetscCopyMode
1265 */
1266 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1267 {
1268   Mat                   T;
1269   Mat_HYPRE             *hA;
1270   MPI_Comm              comm;
1271   PetscInt              rstart,rend,cstart,cend,M,N;
1272   PetscBool             isseqaij,ismpiaij,isaij,ishyp,isis;
1273   PetscErrorCode        ierr;
1274 
1275   PetscFunctionBegin;
1276   comm   = hypre_ParCSRMatrixComm(parcsr);
1277   ierr   = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
1278   ierr   = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
1279   ierr   = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr);
1280   ierr   = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr);
1281   ierr   = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr);
1282   isaij  = (PetscBool)(isseqaij || ismpiaij || isaij);
1283   if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
1284   /* access ParCSRMatrix */
1285   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1286   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1287   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1288   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1289   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1290   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1291 
1292   /* fix for empty local rows/columns */
1293   if (rend < rstart) rend = rstart;
1294   if (cend < cstart) cend = cstart;
1295 
1296   /* PETSc convention */
1297   rend++;
1298   cend++;
1299   rend = PetscMin(rend,M);
1300   cend = PetscMin(cend,N);
1301 
1302   /* create PETSc matrix with MatHYPRE */
1303   ierr = MatCreate(comm,&T);CHKERRQ(ierr);
1304   ierr = MatSetSizes(T,rend-rstart,cend-cstart,M,N);CHKERRQ(ierr);
1305   ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr);
1306   hA   = (Mat_HYPRE*)(T->data);
1307 
1308   /* create HYPRE_IJMatrix */
1309   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1310   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1311 
1312   /* create new ParCSR object if needed */
1313   if (ishyp && copymode == PETSC_COPY_VALUES) {
1314     hypre_ParCSRMatrix *new_parcsr;
1315     hypre_CSRMatrix    *hdiag,*hoffd,*ndiag,*noffd;
1316 
1317     new_parcsr = hypre_ParCSRMatrixCompleteClone(parcsr);
1318     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1319     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1320     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1321     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1322     ierr       = PetscMemcpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag)*sizeof(PetscScalar));CHKERRQ(ierr);
1323     ierr       = PetscMemcpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd)*sizeof(PetscScalar));CHKERRQ(ierr);
1324     parcsr     = new_parcsr;
1325     copymode   = PETSC_OWN_POINTER;
1326   }
1327 
1328   /* set ParCSR object */
1329   hypre_IJMatrixObject(hA->ij) = parcsr;
1330   T->preallocated = PETSC_TRUE;
1331 
1332   /* set assembled flag */
1333   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1334   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1335   if (ishyp) {
1336     PetscMPIInt myid = 0;
1337 
1338     /* make sure we always have row_starts and col_starts available */
1339     if (HYPRE_AssumedPartitionCheck()) {
1340       ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
1341     }
1342     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1343       PetscLayout map;
1344 
1345       ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr);
1346       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1347       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1348     }
1349     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1350       PetscLayout map;
1351 
1352       ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr);
1353       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1354       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1355     }
1356     /* prevent from freeing the pointer */
1357     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1358     *A   = T;
1359     ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1360     ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1361   } else if (isaij) {
1362     if (copymode != PETSC_OWN_POINTER) {
1363       /* prevent from freeing the pointer */
1364       hA->inner_free = PETSC_FALSE;
1365       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1366       ierr = MatDestroy(&T);CHKERRQ(ierr);
1367     } else { /* AIJ return type with PETSC_OWN_POINTER */
1368       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr);
1369       *A   = T;
1370     }
1371   } else if (isis) {
1372     ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1373     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1374     ierr = MatDestroy(&T);CHKERRQ(ierr);
1375   }
1376   PetscFunctionReturn(0);
1377 }
1378 
1379 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1380 {
1381   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1382   HYPRE_Int type;
1383 
1384   PetscFunctionBegin;
1385   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1386   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1387   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1388   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1389   PetscFunctionReturn(0);
1390 }
1391 
1392 /*
1393    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1394 
1395    Not collective
1396 
1397    Input Parameters:
1398 +  A  - the MATHYPRE object
1399 
1400    Output Parameter:
1401 .  parcsr  - the pointer to the hypre_ParCSRMatrix
1402 
1403    Level: intermediate
1404 
1405 .seealso: MatHYPRE, PetscCopyMode
1406 */
1407 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1408 {
1409   PetscErrorCode ierr;
1410 
1411   PetscFunctionBegin;
1412   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1413   PetscValidType(A,1);
1414   ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr);
1415   PetscFunctionReturn(0);
1416 }
1417 
1418 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1419 {
1420   hypre_ParCSRMatrix *parcsr;
1421   hypre_CSRMatrix    *ha;
1422   PetscInt           rst;
1423   PetscErrorCode     ierr;
1424 
1425   PetscFunctionBegin;
1426   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks");
1427   ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr);
1428   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1429   if (missing) *missing = PETSC_FALSE;
1430   if (dd) *dd = -1;
1431   ha = hypre_ParCSRMatrixDiag(parcsr);
1432   if (ha) {
1433     PetscInt  size,i;
1434     HYPRE_Int *ii,*jj;
1435 
1436     size = hypre_CSRMatrixNumRows(ha);
1437     ii   = hypre_CSRMatrixI(ha);
1438     jj   = hypre_CSRMatrixJ(ha);
1439     for (i = 0; i < size; i++) {
1440       PetscInt  j;
1441       PetscBool found = PETSC_FALSE;
1442 
1443       for (j = ii[i]; j < ii[i+1] && !found; j++)
1444         found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;
1445 
1446       if (!found) {
1447         PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i);
1448         if (missing) *missing = PETSC_TRUE;
1449         if (dd) *dd = i+rst;
1450         PetscFunctionReturn(0);
1451       }
1452     }
1453     if (!size) {
1454       PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1455       if (missing) *missing = PETSC_TRUE;
1456       if (dd) *dd = rst;
1457     }
1458   } else {
1459     PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1460     if (missing) *missing = PETSC_TRUE;
1461     if (dd) *dd = rst;
1462   }
1463   PetscFunctionReturn(0);
1464 }
1465 
1466 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1467 {
1468   hypre_ParCSRMatrix *parcsr;
1469   hypre_CSRMatrix    *ha;
1470   PetscErrorCode     ierr;
1471 
1472   PetscFunctionBegin;
1473   ierr  = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1474   /* diagonal part */
1475   ha = hypre_ParCSRMatrixDiag(parcsr);
1476   if (ha) {
1477     PetscInt    size,i;
1478     HYPRE_Int   *ii;
1479     PetscScalar *a;
1480 
1481     size = hypre_CSRMatrixNumRows(ha);
1482     a    = hypre_CSRMatrixData(ha);
1483     ii   = hypre_CSRMatrixI(ha);
1484     for (i = 0; i < ii[size]; i++) a[i] *= s;
1485   }
1486   /* offdiagonal part */
1487   ha = hypre_ParCSRMatrixOffd(parcsr);
1488   if (ha) {
1489     PetscInt    size,i;
1490     HYPRE_Int   *ii;
1491     PetscScalar *a;
1492 
1493     size = hypre_CSRMatrixNumRows(ha);
1494     a    = hypre_CSRMatrixData(ha);
1495     ii   = hypre_CSRMatrixI(ha);
1496     for (i = 0; i < ii[size]; i++) a[i] *= s;
1497   }
1498   PetscFunctionReturn(0);
1499 }
1500 
1501 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1502 {
1503   hypre_ParCSRMatrix *parcsr;
1504   HYPRE_Int          *lrows;
1505   PetscInt           rst,ren,i;
1506   PetscErrorCode     ierr;
1507 
1508   PetscFunctionBegin;
1509   if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
1510   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1511   ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr);
1512   ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1513   for (i=0;i<numRows;i++) {
1514     if (rows[i] < rst || rows[i] >= ren)
1515       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported");
1516     lrows[i] = rows[i] - rst;
1517   }
1518   PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows));
1519   ierr = PetscFree(lrows);CHKERRQ(ierr);
1520   PetscFunctionReturn(0);
1521 }
1522 
1523 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1524 {
1525   PetscErrorCode      ierr;
1526 
1527   PetscFunctionBegin;
1528   if (ha) {
1529     HYPRE_Int     *ii, size;
1530     HYPRE_Complex *a;
1531 
1532     size = hypre_CSRMatrixNumRows(ha);
1533     a    = hypre_CSRMatrixData(ha);
1534     ii   = hypre_CSRMatrixI(ha);
1535 
1536     if (a) { ierr = PetscMemzero(a,(ii[size])*sizeof(HYPRE_Complex));CHKERRQ(ierr); }
1537   }
1538   PetscFunctionReturn(0);
1539 }
1540 
1541 PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1542 {
1543   hypre_ParCSRMatrix  *parcsr;
1544   PetscErrorCode      ierr;
1545 
1546   PetscFunctionBegin;
1547   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1548   /* diagonal part */
1549   ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));CHKERRQ(ierr);
1550   /* off-diagonal part */
1551   ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));CHKERRQ(ierr);
1552   PetscFunctionReturn(0);
1553 }
1554 
1555 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],PetscScalar diag)
1556 {
1557   PetscInt        ii, jj, ibeg, iend, irow;
1558   PetscInt        *i, *j;
1559   PetscScalar     *a;
1560 
1561   PetscFunctionBegin;
1562   if (!hA) PetscFunctionReturn(0);
1563 
1564   i = (PetscInt*) hypre_CSRMatrixI(hA);
1565   j = (PetscInt*) hypre_CSRMatrixJ(hA);
1566   a = hypre_CSRMatrixData(hA);
1567 
1568   for (ii = 0; ii < N; ii++) {
1569     irow = rows[ii];
1570     ibeg = i[irow];
1571     iend = i[irow+1];
1572     for (jj = ibeg; jj < iend; jj++)
1573       if (j[jj] == irow) a[jj] = diag;
1574       else a[jj] = 0.0;
1575    }
1576    PetscFunctionReturn(0);
1577 }
1578 
1579 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1580 {
1581   hypre_ParCSRMatrix  *parcsr;
1582   PetscInt            *lrows,len;
1583   PetscErrorCode      ierr;
1584 
1585   PetscFunctionBegin;
1586   if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size \n");
1587   /* retrieve the internal matrix */
1588   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1589   /* get locally owned rows */
1590   ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr);
1591   /* zero diagonal part */
1592   ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,diag);CHKERRQ(ierr);
1593   /* zero off-diagonal part */
1594   ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);CHKERRQ(ierr);
1595 
1596   ierr = PetscFree(lrows);CHKERRQ(ierr);
1597   PetscFunctionReturn(0);
1598 }
1599 
1600 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode)
1601 {
1602   PetscErrorCode ierr;
1603 
1604   PetscFunctionBegin;
1605   if (mat->nooffprocentries) PetscFunctionReturn(0);
1606 
1607   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
1608   PetscFunctionReturn(0);
1609 }
1610 
1611 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1612 {
1613   hypre_ParCSRMatrix  *parcsr;
1614   PetscErrorCode      ierr;
1615 
1616   PetscFunctionBegin;
1617   /* retrieve the internal matrix */
1618   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1619   /* call HYPRE API */
1620   PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,(HYPRE_Int*)nz,(HYPRE_Int**)idx,v));
1621   PetscFunctionReturn(0);
1622 }
1623 
1624 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1625 {
1626   hypre_ParCSRMatrix  *parcsr;
1627   PetscErrorCode      ierr;
1628 
1629   PetscFunctionBegin;
1630   /* retrieve the internal matrix */
1631   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1632   /* call HYPRE API */
1633   PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,(HYPRE_Int*)nz,(HYPRE_Int**)idx,v));
1634   PetscFunctionReturn(0);
1635 }
1636 
1637 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1638 {
1639   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1640   PetscInt  i;
1641 
1642   PetscFunctionBegin;
1643   if (!m || !n) PetscFunctionReturn(0);
1644   /* Ignore negative row indices
1645    * And negative column indices should be automatically ignored in hypre
1646    * */
1647   for (i=0; i<m; i++)
1648     if (idxm[i] >= 0) PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,(HYPRE_Int*)&n,(HYPRE_Int*)&idxm[i],(HYPRE_Int*)idxn,&v[i*n]));
1649   PetscFunctionReturn(0);
1650 }
1651 
1652 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg)
1653 {
1654   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1655 
1656   PetscFunctionBegin;
1657   switch (op)
1658   {
1659   case MAT_NO_OFF_PROC_ENTRIES:
1660     if (flg) {
1661       PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0));
1662     }
1663     break;
1664   default:
1665     break;
1666   }
1667   PetscFunctionReturn(0);
1668 }
1669 
1670 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
1671 {
1672   hypre_ParCSRMatrix *parcsr;
1673   PetscErrorCode     ierr;
1674   Mat                B;
1675   PetscViewerFormat  format;
1676   PetscErrorCode     (*mview)(Mat,PetscViewer) = NULL;
1677 
1678   PetscFunctionBegin;
1679   ierr = PetscViewerGetFormat(view,&format);CHKERRQ(ierr);
1680   if (format != PETSC_VIEWER_NATIVE) {
1681     ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1682     ierr = MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);CHKERRQ(ierr);
1683     ierr = MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);CHKERRQ(ierr);
1684     if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");CHKERRQ(ierr);
1685     ierr = (*mview)(B,view);CHKERRQ(ierr);
1686     ierr = MatDestroy(&B);CHKERRQ(ierr);
1687   } else {
1688     Mat_HYPRE  *hA = (Mat_HYPRE*)A->data;
1689     PetscMPIInt size;
1690     PetscBool   isascii;
1691     const char *filename;
1692 
1693     /* HYPRE uses only text files */
1694     ierr = PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr);
1695     if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name);
1696     ierr = PetscViewerFileGetName(view,&filename);CHKERRQ(ierr);
1697     PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename));
1698     ierr = MPI_Comm_size(hA->comm,&size);CHKERRQ(ierr);
1699     if (size > 1) {
1700       ierr = PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);CHKERRQ(ierr);
1701     } else {
1702       ierr = PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);CHKERRQ(ierr);
1703     }
1704   }
1705   PetscFunctionReturn(0);
1706 }
1707 
1708 static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B)
1709 {
1710   hypre_ParCSRMatrix *parcsr;
1711   PetscErrorCode     ierr;
1712   PetscCopyMode      cpmode;
1713 
1714   PetscFunctionBegin;
1715   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1716   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
1717     parcsr = hypre_ParCSRMatrixCompleteClone(parcsr);
1718     cpmode = PETSC_OWN_POINTER;
1719   } else {
1720     cpmode = PETSC_COPY_VALUES;
1721   }
1722   ierr = MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);CHKERRQ(ierr);
1723   PetscFunctionReturn(0);
1724 }
1725 
1726 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
1727 {
1728   hypre_ParCSRMatrix *acsr,*bcsr;
1729   PetscErrorCode     ierr;
1730 
1731   PetscFunctionBegin;
1732   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
1733     ierr = MatHYPREGetParCSR_HYPRE(A,&acsr);CHKERRQ(ierr);
1734     ierr = MatHYPREGetParCSR_HYPRE(B,&bcsr);CHKERRQ(ierr);
1735     PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1));
1736     ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1737     ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1738   } else {
1739     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1740   }
1741   PetscFunctionReturn(0);
1742 }
1743 
1744 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
1745 {
1746   hypre_ParCSRMatrix *parcsr;
1747   hypre_CSRMatrix    *dmat;
1748   PetscScalar        *a,*data = NULL;
1749   PetscInt           i,*diag = NULL;
1750   PetscBool          cong;
1751   PetscErrorCode     ierr;
1752 
1753   PetscFunctionBegin;
1754   ierr = MatHasCongruentLayouts(A,&cong);CHKERRQ(ierr);
1755   if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns");
1756 #if defined(PETSC_USE_DEBUG)
1757   {
1758     PetscBool miss;
1759     ierr = MatMissingDiagonal(A,&miss,NULL);CHKERRQ(ierr);
1760     if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing");
1761   }
1762 #endif
1763   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1764   dmat = hypre_ParCSRMatrixDiag(parcsr);
1765   if (dmat) {
1766     ierr = VecGetArray(d,&a);CHKERRQ(ierr);
1767     diag = (PetscInt*)(hypre_CSRMatrixI(dmat));
1768     data = (PetscScalar*)(hypre_CSRMatrixData(dmat));
1769     for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]];
1770     ierr = VecRestoreArray(d,&a);CHKERRQ(ierr);
1771   }
1772   PetscFunctionReturn(0);
1773 }
1774 
1775 /*MC
1776    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
1777           based on the hypre IJ interface.
1778 
1779    Level: intermediate
1780 
1781 .seealso: MatCreate()
1782 M*/
1783 
1784 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
1785 {
1786   Mat_HYPRE      *hB;
1787   PetscErrorCode ierr;
1788 
1789   PetscFunctionBegin;
1790   ierr           = PetscNewLog(B,&hB);CHKERRQ(ierr);
1791   hB->inner_free = PETSC_TRUE;
1792   hB->available  = PETSC_TRUE;
1793   hB->size       = 0;
1794   hB->array      = NULL;
1795 
1796   B->data       = (void*)hB;
1797   B->rmap->bs   = 1;
1798   B->assembled  = PETSC_FALSE;
1799 
1800   ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1801   B->ops->mult            = MatMult_HYPRE;
1802   B->ops->multtranspose   = MatMultTranspose_HYPRE;
1803   B->ops->setup           = MatSetUp_HYPRE;
1804   B->ops->destroy         = MatDestroy_HYPRE;
1805   B->ops->assemblyend     = MatAssemblyEnd_HYPRE;
1806   B->ops->assemblybegin   = MatAssemblyBegin_HYPRE;
1807   B->ops->ptap            = MatPtAP_HYPRE_HYPRE;
1808   B->ops->matmult         = MatMatMult_HYPRE_HYPRE;
1809   B->ops->setvalues       = MatSetValues_HYPRE;
1810   B->ops->missingdiagonal = MatMissingDiagonal_HYPRE;
1811   B->ops->scale           = MatScale_HYPRE;
1812   B->ops->zerorowscolumns = MatZeroRowsColumns_HYPRE;
1813   B->ops->zeroentries     = MatZeroEntries_HYPRE;
1814   B->ops->zerorows        = MatZeroRows_HYPRE;
1815   B->ops->getrow          = MatGetRow_HYPRE;
1816   B->ops->restorerow      = MatRestoreRow_HYPRE;
1817   B->ops->getvalues       = MatGetValues_HYPRE;
1818   B->ops->setoption       = MatSetOption_HYPRE;
1819   B->ops->duplicate       = MatDuplicate_HYPRE;
1820   B->ops->copy            = MatCopy_HYPRE;
1821   B->ops->view            = MatView_HYPRE;
1822   B->ops->getdiagonal     = MatGetDiagonal_HYPRE;
1823 
1824   /* build cache for off array entries formed */
1825   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
1826 
1827   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr);
1828   ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr);
1829   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr);
1830   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr);
1831   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
1832   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
1833   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr);
1834   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr);
1835   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_is_hypre_C",MatPtAP_IS_XAIJ);CHKERRQ(ierr);
1836   PetscFunctionReturn(0);
1837 }
1838 
1839 static PetscErrorCode hypre_array_destroy(void *ptr)
1840 {
1841    PetscFunctionBegin;
1842    hypre_TFree(ptr,HYPRE_MEMORY_HOST);
1843    PetscFunctionReturn(0);
1844 }
1845