xref: /petsc/src/mat/impls/is/matis.c (revision 7230de76bc58b08aad1041ea2007176978440db2)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4     This stores the matrices in globally unassembled form. Each processor
5     assembles only its local Neumann problem and the parallel matrix vector
6     product is handled "implicitly".
7 
8     Currently this allows for only one subdomain per processor.
9 */
10 
11 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
12 
13 static PetscErrorCode MatISComputeSF_Private(Mat);
14 static PetscErrorCode MatISZeroRowsLocal_Private(Mat,PetscInt,const PetscInt[],PetscScalar);
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "MatISComputeSF_Private"
18 static PetscErrorCode MatISComputeSF_Private(Mat B)
19 {
20   Mat_IS         *matis = (Mat_IS*)(B->data);
21   const PetscInt *gidxs;
22   PetscErrorCode ierr;
23 
24   PetscFunctionBegin;
25   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
26   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
27   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
28   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
29   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
30   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
31   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
32   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
33   PetscFunctionReturn(0);
34 }
35 
36 #undef __FUNCT__
37 #define __FUNCT__ "MatISSetPreallocation"
38 /*@
39    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
40 
41    Collective on MPI_Comm
42 
43    Input Parameters:
44 +  B - the matrix
45 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
46            (same value is used for all local rows)
47 .  d_nnz - array containing the number of nonzeros in the various rows of the
48            DIAGONAL portion of the local submatrix (possibly different for each row)
49            or NULL, if d_nz is used to specify the nonzero structure.
50            The size of this array is equal to the number of local rows, i.e 'm'.
51            For matrices that will be factored, you must leave room for (and set)
52            the diagonal entry even if it is zero.
53 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
54            submatrix (same value is used for all local rows).
55 -  o_nnz - array containing the number of nonzeros in the various rows of the
56            OFF-DIAGONAL portion of the local submatrix (possibly different for
57            each row) or NULL, if o_nz is used to specify the nonzero
58            structure. The size of this array is equal to the number
59            of local rows, i.e 'm'.
60 
61    If the *_nnz parameter is given then the *_nz parameter is ignored
62 
63    Level: intermediate
64 
65    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
66           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
67           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
68 
69 .keywords: matrix
70 
71 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS
72 @*/
73 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
74 {
75   PetscErrorCode ierr;
76 
77   PetscFunctionBegin;
78   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
79   PetscValidType(B,1);
80   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
81   PetscFunctionReturn(0);
82 }
83 
84 #undef __FUNCT__
85 #define __FUNCT__ "MatISSetPreallocation_IS"
86 static PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
87 {
88   Mat_IS         *matis = (Mat_IS*)(B->data);
89   PetscInt       bs,i,nlocalcols;
90   PetscErrorCode ierr;
91 
92   PetscFunctionBegin;
93   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
94   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
95     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
96   }
97   if (!d_nnz) {
98     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
99   } else {
100     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
101   }
102   if (!o_nnz) {
103     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
104   } else {
105     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
106   }
107   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
108   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
109   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
110   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
111   for (i=0;i<matis->sf_nleaves;i++) {
112     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
113   }
114   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
115   for (i=0;i<matis->sf_nleaves/bs;i++) {
116     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
117   }
118   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
119   for (i=0;i<matis->sf_nleaves/bs;i++) {
120     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
121   }
122   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
123   PetscFunctionReturn(0);
124 }
125 
126 #undef __FUNCT__
127 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
128 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
129 {
130   Mat_IS          *matis = (Mat_IS*)(A->data);
131   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
132   const PetscInt  *global_indices_r,*global_indices_c;
133   PetscInt        i,j,bs,rows,cols;
134   PetscInt        lrows,lcols;
135   PetscInt        local_rows,local_cols;
136   PetscMPIInt     nsubdomains;
137   PetscBool       isdense,issbaij;
138   PetscErrorCode  ierr;
139 
140   PetscFunctionBegin;
141   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
142   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
143   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
144   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
145   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
146   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
147   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
148   if (A->rmap->mapping != A->cmap->mapping) {
149     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
150   } else {
151     global_indices_c = global_indices_r;
152   }
153 
154   if (issbaij) {
155     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
156   }
157   /*
158      An SF reduce is needed to sum up properly on shared rows.
159      Note that generally preallocation is not exact, since it overestimates nonzeros
160   */
161   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
162     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
163   }
164   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
165   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
166   /* All processes need to compute entire row ownership */
167   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
168   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
169   for (i=0;i<nsubdomains;i++) {
170     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
171       row_ownership[j] = i;
172     }
173   }
174   ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
175 
176   /*
177      my_dnz and my_onz contains exact contribution to preallocation from each local mat
178      then, they will be summed up properly. This way, preallocation is always sufficient
179   */
180   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
181   /* preallocation as a MATAIJ */
182   if (isdense) { /* special case for dense local matrices */
183     for (i=0;i<local_rows;i++) {
184       PetscInt index_row = global_indices_r[i];
185       for (j=i;j<local_rows;j++) {
186         PetscInt owner = row_ownership[index_row];
187         PetscInt index_col = global_indices_c[j];
188         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
189           my_dnz[i] += 1;
190         } else { /* offdiag block */
191           my_onz[i] += 1;
192         }
193         /* same as before, interchanging rows and cols */
194         if (i != j) {
195           owner = row_ownership[index_col];
196           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
197             my_dnz[j] += 1;
198           } else {
199             my_onz[j] += 1;
200           }
201         }
202       }
203     }
204   } else { /* TODO: this could be optimized using MatGetRowIJ */
205     for (i=0;i<local_rows;i++) {
206       const PetscInt *cols;
207       PetscInt       ncols,index_row = global_indices_r[i];
208       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
209       for (j=0;j<ncols;j++) {
210         PetscInt owner = row_ownership[index_row];
211         PetscInt index_col = global_indices_c[cols[j]];
212         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
213           my_dnz[i] += 1;
214         } else { /* offdiag block */
215           my_onz[i] += 1;
216         }
217         /* same as before, interchanging rows and cols */
218         if (issbaij && index_col != index_row) {
219           owner = row_ownership[index_col];
220           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
221             my_dnz[cols[j]] += 1;
222           } else {
223             my_onz[cols[j]] += 1;
224           }
225         }
226       }
227       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
228     }
229   }
230   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
231   if (global_indices_c != global_indices_r) {
232     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
233   }
234   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
235 
236   /* Reduce my_dnz and my_onz */
237   if (maxreduce) {
238     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
239     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
240     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
241     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
242   } else {
243     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
244     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
245     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
246     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
247   }
248   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
249 
250   /* Resize preallocation if overestimated */
251   for (i=0;i<lrows;i++) {
252     dnz[i] = PetscMin(dnz[i],lcols);
253     onz[i] = PetscMin(onz[i],cols-lcols);
254   }
255   /* set preallocation */
256   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
257   for (i=0;i<lrows/bs;i++) {
258     dnz[i] = dnz[i*bs]/bs;
259     onz[i] = onz[i*bs]/bs;
260   }
261   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
262   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
263   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
264   if (issbaij) {
265     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
266   }
267   PetscFunctionReturn(0);
268 }
269 
270 #undef __FUNCT__
271 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
272 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
273 {
274   Mat_IS         *matis = (Mat_IS*)(mat->data);
275   Mat            local_mat;
276   /* info on mat */
277   PetscInt       bs,rows,cols,lrows,lcols;
278   PetscInt       local_rows,local_cols;
279   PetscBool      isdense,issbaij,isseqaij;
280   PetscMPIInt    nsubdomains;
281   /* values insertion */
282   PetscScalar    *array;
283   /* work */
284   PetscErrorCode ierr;
285 
286   PetscFunctionBegin;
287   /* get info from mat */
288   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
289   if (nsubdomains == 1) {
290     if (reuse == MAT_INITIAL_MATRIX) {
291       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
292     } else {
293       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
294     }
295     PetscFunctionReturn(0);
296   }
297   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
298   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
299   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
300   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
301   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
302   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
303   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
304 
305   if (reuse == MAT_INITIAL_MATRIX) {
306     MatType     new_mat_type;
307     PetscBool   issbaij_red;
308 
309     /* determining new matrix type */
310     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
311     if (issbaij_red) {
312       new_mat_type = MATSBAIJ;
313     } else {
314       if (bs>1) {
315         new_mat_type = MATBAIJ;
316       } else {
317         new_mat_type = MATAIJ;
318       }
319     }
320 
321     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
322     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
323     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
324     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
325     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
326   } else {
327     PetscInt mbs,mrows,mcols,mlrows,mlcols;
328     /* some checks */
329     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
330     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
331     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
332     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
333     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
334     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
335     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
336     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
337     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
338   }
339 
340   if (issbaij) {
341     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
342   } else {
343     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
344     local_mat = matis->A;
345   }
346 
347   /* Set values */
348   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
349   if (isdense) { /* special case for dense local matrices */
350     PetscInt i,*dummy_rows,*dummy_cols;
351 
352     if (local_rows != local_cols) {
353       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
354       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
355       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
356     } else {
357       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
358       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
359       dummy_cols = dummy_rows;
360     }
361     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
362     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
363     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
364     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
365     if (dummy_rows != dummy_cols) {
366       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
367     } else {
368       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
369     }
370   } else if (isseqaij) {
371     PetscInt  i,nvtxs,*xadj,*adjncy;
372     PetscBool done;
373 
374     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
375     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
376     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
377     for (i=0;i<nvtxs;i++) {
378       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
379     }
380     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
381     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
382     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
383   } else { /* very basic values insertion for all other matrix types */
384     PetscInt i;
385 
386     for (i=0;i<local_rows;i++) {
387       PetscInt       j;
388       const PetscInt *local_indices_cols;
389 
390       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
391       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
392       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
393     }
394   }
395   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
396   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
397   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
398   if (isdense) {
399     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
400   }
401   PetscFunctionReturn(0);
402 }
403 
404 #undef __FUNCT__
405 #define __FUNCT__ "MatISGetMPIXAIJ"
406 /*@
407     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
408 
409   Input Parameter:
410 .  mat - the matrix (should be of type MATIS)
411 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
412 
413   Output Parameter:
414 .  newmat - the matrix in AIJ format
415 
416   Level: developer
417 
418   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
419 
420 .seealso: MATIS
421 @*/
422 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
423 {
424   PetscErrorCode ierr;
425 
426   PetscFunctionBegin;
427   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
428   PetscValidLogicalCollectiveEnum(mat,reuse,2);
429   PetscValidPointer(newmat,3);
430   if (reuse != MAT_INITIAL_MATRIX) {
431     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
432     PetscCheckSameComm(mat,1,*newmat,3);
433     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
434   }
435   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
436   PetscFunctionReturn(0);
437 }
438 
439 #undef __FUNCT__
440 #define __FUNCT__ "MatDuplicate_IS"
441 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
442 {
443   PetscErrorCode ierr;
444   Mat_IS         *matis = (Mat_IS*)(mat->data);
445   PetscInt       bs,m,n,M,N;
446   Mat            B,localmat;
447 
448   PetscFunctionBegin;
449   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
450   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
451   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
452   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
453   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
454   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
455   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
456   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
457   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
458   *newmat = B;
459   PetscFunctionReturn(0);
460 }
461 
462 #undef __FUNCT__
463 #define __FUNCT__ "MatIsHermitian_IS"
464 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
465 {
466   PetscErrorCode ierr;
467   Mat_IS         *matis = (Mat_IS*)A->data;
468   PetscBool      local_sym;
469 
470   PetscFunctionBegin;
471   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
472   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
473   PetscFunctionReturn(0);
474 }
475 
476 #undef __FUNCT__
477 #define __FUNCT__ "MatIsSymmetric_IS"
478 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
479 {
480   PetscErrorCode ierr;
481   Mat_IS         *matis = (Mat_IS*)A->data;
482   PetscBool      local_sym;
483 
484   PetscFunctionBegin;
485   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
486   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
487   PetscFunctionReturn(0);
488 }
489 
490 #undef __FUNCT__
491 #define __FUNCT__ "MatDestroy_IS"
492 PetscErrorCode MatDestroy_IS(Mat A)
493 {
494   PetscErrorCode ierr;
495   Mat_IS         *b = (Mat_IS*)A->data;
496 
497   PetscFunctionBegin;
498   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
499   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
500   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
501   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
502   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
503   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
504   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
505   ierr = PetscFree(A->data);CHKERRQ(ierr);
506   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
507   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
508   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
509   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
510   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
511   PetscFunctionReturn(0);
512 }
513 
514 #undef __FUNCT__
515 #define __FUNCT__ "MatMult_IS"
516 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
517 {
518   PetscErrorCode ierr;
519   Mat_IS         *is  = (Mat_IS*)A->data;
520   PetscScalar    zero = 0.0;
521 
522   PetscFunctionBegin;
523   /*  scatter the global vector x into the local work vector */
524   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
525   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
526 
527   /* multiply the local matrix */
528   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
529 
530   /* scatter product back into global memory */
531   ierr = VecSet(y,zero);CHKERRQ(ierr);
532   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
533   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
534   PetscFunctionReturn(0);
535 }
536 
537 #undef __FUNCT__
538 #define __FUNCT__ "MatMultAdd_IS"
539 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
540 {
541   Vec            temp_vec;
542   PetscErrorCode ierr;
543 
544   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
545   if (v3 != v2) {
546     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
547     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
548   } else {
549     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
550     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
551     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
552     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
553     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
554   }
555   PetscFunctionReturn(0);
556 }
557 
558 #undef __FUNCT__
559 #define __FUNCT__ "MatMultTranspose_IS"
560 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
561 {
562   Mat_IS         *is = (Mat_IS*)A->data;
563   PetscErrorCode ierr;
564 
565   PetscFunctionBegin;
566   /*  scatter the global vector x into the local work vector */
567   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
568   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
569 
570   /* multiply the local matrix */
571   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
572 
573   /* scatter product back into global vector */
574   ierr = VecSet(x,0);CHKERRQ(ierr);
575   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
576   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 #undef __FUNCT__
581 #define __FUNCT__ "MatMultTransposeAdd_IS"
582 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
583 {
584   Vec            temp_vec;
585   PetscErrorCode ierr;
586 
587   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
588   if (v3 != v2) {
589     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
590     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
591   } else {
592     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
593     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
594     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
595     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
596     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
597   }
598   PetscFunctionReturn(0);
599 }
600 
601 #undef __FUNCT__
602 #define __FUNCT__ "MatView_IS"
603 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
604 {
605   Mat_IS         *a = (Mat_IS*)A->data;
606   PetscErrorCode ierr;
607   PetscViewer    sviewer;
608 
609   PetscFunctionBegin;
610   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
611   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
612   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
613   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
614   PetscFunctionReturn(0);
615 }
616 
617 #undef __FUNCT__
618 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
619 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
620 {
621   PetscErrorCode ierr;
622   PetscInt       nr,rbs,nc,cbs;
623   Mat_IS         *is = (Mat_IS*)A->data;
624   IS             from,to;
625   Vec            cglobal,rglobal;
626 
627   PetscFunctionBegin;
628   PetscCheckSameComm(A,1,rmapping,2);
629   PetscCheckSameComm(A,1,cmapping,3);
630   /* Destroy any previous data */
631   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
632   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
633   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
634   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
635   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
636   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
637   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
638 
639   /* Setup Layout and set local to global maps */
640   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
641   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
642   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
643   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
644 
645   /* Create the local matrix A */
646   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
647   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
648   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
649   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
650   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
651   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
652   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
653   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
654   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
655   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
656   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
657 
658   /* Create the local work vectors */
659   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
660 
661   /* setup the global to local scatters */
662   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
663   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
664   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
665   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
666   if (rmapping != cmapping) {
667     ierr = ISDestroy(&to);CHKERRQ(ierr);
668     ierr = ISDestroy(&from);CHKERRQ(ierr);
669     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
670     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
671     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
672   } else {
673     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
674     is->cctx = is->rctx;
675   }
676   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
677   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
678   ierr = ISDestroy(&to);CHKERRQ(ierr);
679   ierr = ISDestroy(&from);CHKERRQ(ierr);
680   ierr = MatSetUp(A);CHKERRQ(ierr);
681   PetscFunctionReturn(0);
682 }
683 
684 #undef __FUNCT__
685 #define __FUNCT__ "MatSetValues_IS"
686 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
687 {
688   Mat_IS         *is = (Mat_IS*)mat->data;
689   PetscInt       rows_l[2048],cols_l[2048];
690   PetscErrorCode ierr;
691 
692   PetscFunctionBegin;
693 #if defined(PETSC_USE_DEBUG)
694   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
695 #endif
696   ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
697   ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
698   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
699   PetscFunctionReturn(0);
700 }
701 
702 #undef __FUNCT__
703 #define __FUNCT__ "MatSetValuesLocal_IS"
704 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
705 {
706   PetscErrorCode ierr;
707   Mat_IS         *is = (Mat_IS*)A->data;
708 
709   PetscFunctionBegin;
710   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
711   PetscFunctionReturn(0);
712 }
713 
714 #undef __FUNCT__
715 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
716 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
717 {
718   PetscErrorCode ierr;
719   Mat_IS         *is = (Mat_IS*)A->data;
720 
721   PetscFunctionBegin;
722   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
723   PetscFunctionReturn(0);
724 }
725 
726 #undef __FUNCT__
727 #define __FUNCT__ "MatZeroRows_IS"
728 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
729 {
730   Mat_IS         *matis = (Mat_IS*)A->data;
731   PetscInt       nr,nl,len,i;
732   PetscInt       *lrows;
733   PetscErrorCode ierr;
734 
735   PetscFunctionBegin;
736   /* get locally owned rows */
737   ierr = MatZeroRowsMapLocal_Private(A,n,rows,&len,&lrows);CHKERRQ(ierr);
738   /* fix right hand side if needed */
739   if (x && b) {
740     const PetscScalar *xx;
741     PetscScalar       *bb;
742 
743     ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr);
744     ierr = VecGetArray(b, &bb);CHKERRQ(ierr);
745     for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]];
746     ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr);
747     ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr);
748   }
749   /* get rows associated to the local matrices */
750   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
751     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
752   }
753   ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr);
754   ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr);
755   ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
756   for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1;
757   ierr = PetscFree(lrows);CHKERRQ(ierr);
758   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
759   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
760   ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr);
761   for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i;
762   ierr = MatISZeroRowsLocal_Private(A,nr,lrows,diag);CHKERRQ(ierr);
763   ierr = PetscFree(lrows);CHKERRQ(ierr);
764   PetscFunctionReturn(0);
765 }
766 
767 #undef __FUNCT__
768 #define __FUNCT__ "MatISZeroRowsLocal_Private"
769 static PetscErrorCode MatISZeroRowsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag)
770 {
771   Mat_IS         *is = (Mat_IS*)A->data;
772   PetscErrorCode ierr;
773 
774   PetscFunctionBegin;
775   if (diag != 0.) {
776     /*
777        Set up is->x as a "counting vector". This is in order to MatMult_IS
778        work properly in the interface nodes.
779     */
780     Vec counter;
781     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
782     ierr = VecSet(counter,0.);CHKERRQ(ierr);
783     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
784     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
785     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
786     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
787     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
788     ierr = VecDestroy(&counter);CHKERRQ(ierr);
789   }
790   if (!n) {
791     is->pure_neumann = PETSC_TRUE;
792   } else {
793     PetscInt i;
794     is->pure_neumann = PETSC_FALSE;
795 
796     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
797     if (diag != 0.) {
798       const PetscScalar *array;
799       ierr = VecGetArrayRead(is->y,&array);CHKERRQ(ierr);
800       for (i=0; i<n; i++) {
801         ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
802       }
803       ierr = VecRestoreArrayRead(is->y,&array);CHKERRQ(ierr);
804     }
805     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
806     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
807   }
808   PetscFunctionReturn(0);
809 }
810 
811 #undef __FUNCT__
812 #define __FUNCT__ "MatAssemblyBegin_IS"
813 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
814 {
815   Mat_IS         *is = (Mat_IS*)A->data;
816   PetscErrorCode ierr;
817 
818   PetscFunctionBegin;
819   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
820   PetscFunctionReturn(0);
821 }
822 
823 #undef __FUNCT__
824 #define __FUNCT__ "MatAssemblyEnd_IS"
825 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
826 {
827   Mat_IS         *is = (Mat_IS*)A->data;
828   PetscErrorCode ierr;
829 
830   PetscFunctionBegin;
831   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
832   PetscFunctionReturn(0);
833 }
834 
835 #undef __FUNCT__
836 #define __FUNCT__ "MatISGetLocalMat_IS"
837 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
838 {
839   Mat_IS *is = (Mat_IS*)mat->data;
840 
841   PetscFunctionBegin;
842   *local = is->A;
843   PetscFunctionReturn(0);
844 }
845 
846 #undef __FUNCT__
847 #define __FUNCT__ "MatISGetLocalMat"
848 /*@
849     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
850 
851   Input Parameter:
852 .  mat - the matrix
853 
854   Output Parameter:
855 .  local - the local matrix
856 
857   Level: advanced
858 
859   Notes:
860     This can be called if you have precomputed the nonzero structure of the
861   matrix and want to provide it to the inner matrix object to improve the performance
862   of the MatSetValues() operation.
863 
864 .seealso: MATIS
865 @*/
866 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
867 {
868   PetscErrorCode ierr;
869 
870   PetscFunctionBegin;
871   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
872   PetscValidPointer(local,2);
873   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
874   PetscFunctionReturn(0);
875 }
876 
877 #undef __FUNCT__
878 #define __FUNCT__ "MatISSetLocalMat_IS"
879 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
880 {
881   Mat_IS         *is = (Mat_IS*)mat->data;
882   PetscInt       nrows,ncols,orows,ocols;
883   PetscErrorCode ierr;
884 
885   PetscFunctionBegin;
886   if (is->A) {
887     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
888     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
889     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
890   }
891   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
892   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
893   is->A = local;
894   PetscFunctionReturn(0);
895 }
896 
897 #undef __FUNCT__
898 #define __FUNCT__ "MatISSetLocalMat"
899 /*@
900     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
901 
902   Input Parameter:
903 .  mat - the matrix
904 .  local - the local matrix
905 
906   Output Parameter:
907 
908   Level: advanced
909 
910   Notes:
911     This can be called if you have precomputed the local matrix and
912   want to provide it to the matrix object MATIS.
913 
914 .seealso: MATIS
915 @*/
916 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
917 {
918   PetscErrorCode ierr;
919 
920   PetscFunctionBegin;
921   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
922   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
923   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
924   PetscFunctionReturn(0);
925 }
926 
927 #undef __FUNCT__
928 #define __FUNCT__ "MatZeroEntries_IS"
929 PetscErrorCode MatZeroEntries_IS(Mat A)
930 {
931   Mat_IS         *a = (Mat_IS*)A->data;
932   PetscErrorCode ierr;
933 
934   PetscFunctionBegin;
935   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
936   PetscFunctionReturn(0);
937 }
938 
939 #undef __FUNCT__
940 #define __FUNCT__ "MatScale_IS"
941 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
942 {
943   Mat_IS         *is = (Mat_IS*)A->data;
944   PetscErrorCode ierr;
945 
946   PetscFunctionBegin;
947   ierr = MatScale(is->A,a);CHKERRQ(ierr);
948   PetscFunctionReturn(0);
949 }
950 
951 #undef __FUNCT__
952 #define __FUNCT__ "MatGetDiagonal_IS"
953 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
954 {
955   Mat_IS         *is = (Mat_IS*)A->data;
956   PetscErrorCode ierr;
957 
958   PetscFunctionBegin;
959   /* get diagonal of the local matrix */
960   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
961 
962   /* scatter diagonal back into global vector */
963   ierr = VecSet(v,0);CHKERRQ(ierr);
964   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
965   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
966   PetscFunctionReturn(0);
967 }
968 
969 #undef __FUNCT__
970 #define __FUNCT__ "MatSetOption_IS"
971 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
972 {
973   Mat_IS         *a = (Mat_IS*)A->data;
974   PetscErrorCode ierr;
975 
976   PetscFunctionBegin;
977   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
978   PetscFunctionReturn(0);
979 }
980 
981 #undef __FUNCT__
982 #define __FUNCT__ "MatCreateIS"
983 /*@
984     MatCreateIS - Creates a "process" unassembled matrix, assembled on each
985        process but not across processes.
986 
987    Input Parameters:
988 +     comm    - MPI communicator that will share the matrix
989 .     bs      - block size of the matrix
990 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
991 .     rmap    - local to global map for rows
992 -     cmap    - local to global map for cols
993 
994    Output Parameter:
995 .    A - the resulting matrix
996 
997    Level: advanced
998 
999    Notes: See MATIS for more details.
1000           m and n are NOT related to the size of the map; they are the size of the part of the vector owned
1001           by that process. The sizes of rmap and cmap define the size of the local matrices.
1002           If either rmap or cmap are NULL, then the matrix is assumed to be square.
1003 
1004 .seealso: MATIS, MatSetLocalToGlobalMapping()
1005 @*/
1006 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1007 {
1008   PetscErrorCode ierr;
1009 
1010   PetscFunctionBegin;
1011   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1012   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1013   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1014   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1015   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1016   if (rmap && cmap) {
1017     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1018   } else if (!rmap) {
1019     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1020   } else {
1021     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1022   }
1023   PetscFunctionReturn(0);
1024 }
1025 
1026 /*MC
1027    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1028    This stores the matrices in globally unassembled form. Each processor
1029    assembles only its local Neumann problem and the parallel matrix vector
1030    product is handled "implicitly".
1031 
1032    Operations Provided:
1033 +  MatMult()
1034 .  MatMultAdd()
1035 .  MatMultTranspose()
1036 .  MatMultTransposeAdd()
1037 .  MatZeroEntries()
1038 .  MatSetOption()
1039 .  MatZeroRows()
1040 .  MatSetValues()
1041 .  MatSetValuesLocal()
1042 .  MatScale()
1043 .  MatGetDiagonal()
1044 -  MatSetLocalToGlobalMapping()
1045 
1046    Options Database Keys:
1047 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1048 
1049    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1050 
1051           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1052 
1053           You can do matrix preallocation on the local matrix after you obtain it with
1054           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1055 
1056   Level: advanced
1057 
1058 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC, MatCreateIS()
1059 
1060 M*/
1061 
1062 #undef __FUNCT__
1063 #define __FUNCT__ "MatCreate_IS"
1064 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1065 {
1066   PetscErrorCode ierr;
1067   Mat_IS         *b;
1068 
1069   PetscFunctionBegin;
1070   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1071   A->data = (void*)b;
1072 
1073   /* matrix ops */
1074   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1075   A->ops->mult                    = MatMult_IS;
1076   A->ops->multadd                 = MatMultAdd_IS;
1077   A->ops->multtranspose           = MatMultTranspose_IS;
1078   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1079   A->ops->destroy                 = MatDestroy_IS;
1080   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1081   A->ops->setvalues               = MatSetValues_IS;
1082   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1083   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1084   A->ops->zerorows                = MatZeroRows_IS;
1085   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1086   A->ops->assemblyend             = MatAssemblyEnd_IS;
1087   A->ops->view                    = MatView_IS;
1088   A->ops->zeroentries             = MatZeroEntries_IS;
1089   A->ops->scale                   = MatScale_IS;
1090   A->ops->getdiagonal             = MatGetDiagonal_IS;
1091   A->ops->setoption               = MatSetOption_IS;
1092   A->ops->ishermitian             = MatIsHermitian_IS;
1093   A->ops->issymmetric             = MatIsSymmetric_IS;
1094   A->ops->duplicate               = MatDuplicate_IS;
1095 
1096   /* special MATIS functions */
1097   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1098   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1099   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1100   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1101   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1102   PetscFunctionReturn(0);
1103 }
1104