xref: /petsc/src/mat/impls/is/matis.c (revision e176bc59e91db6bb040a0187ebf8c00fa6f50623)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 static PetscErrorCode MatISComputeSF_Private(Mat);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatISComputeSF_Private"
21 static PetscErrorCode MatISComputeSF_Private(Mat B)
22 {
23   Mat_IS         *matis = (Mat_IS*)(B->data);
24   const PetscInt *gidxs;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
29   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
30   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
31   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
32   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
33   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
34   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
35   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatISSetPreallocation"
41 /*@
42    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
43 
44    Collective on MPI_Comm
45 
46    Input Parameters:
47 +  B - the matrix
48 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
49            (same value is used for all local rows)
50 .  d_nnz - array containing the number of nonzeros in the various rows of the
51            DIAGONAL portion of the local submatrix (possibly different for each row)
52            or NULL, if d_nz is used to specify the nonzero structure.
53            The size of this array is equal to the number of local rows, i.e 'm'.
54            For matrices that will be factored, you must leave room for (and set)
55            the diagonal entry even if it is zero.
56 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
57            submatrix (same value is used for all local rows).
58 -  o_nnz - array containing the number of nonzeros in the various rows of the
59            OFF-DIAGONAL portion of the local submatrix (possibly different for
60            each row) or NULL, if o_nz is used to specify the nonzero
61            structure. The size of this array is equal to the number
62            of local rows, i.e 'm'.
63 
64    If the *_nnz parameter is given then the *_nz parameter is ignored
65 
66    Level: intermediate
67 
68    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
69           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
70           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
71 
72 .keywords: matrix
73 
74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
75 @*/
76 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
77 {
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
82   PetscValidType(B,1);
83   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatISSetPreallocation_IS"
89 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
90 {
91   Mat_IS         *matis = (Mat_IS*)(B->data);
92   PetscInt       bs,i,nlocalcols;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   if (!matis->A) {
97     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
98   }
99   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
100     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
101   }
102   if (!d_nnz) {
103     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
104   } else {
105     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
106   }
107   if (!o_nnz) {
108     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
109   } else {
110     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
111   }
112   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
113   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
114   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
115   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
116   for (i=0;i<matis->sf_nleaves;i++) {
117     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
118   }
119   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
120   for (i=0;i<matis->sf_nleaves/bs;i++) {
121     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
122   }
123   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
124   for (i=0;i<matis->sf_nleaves/bs;i++) {
125     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
126   }
127   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
133 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
134 {
135   Mat_IS          *matis = (Mat_IS*)(A->data);
136   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
137   const PetscInt* global_indices;
138   PetscInt        i,j,bs,rows,cols;
139   PetscInt        lrows,lcols;
140   PetscInt        local_rows,local_cols;
141   PetscMPIInt     nsubdomains;
142   PetscBool       isdense,issbaij;
143   PetscErrorCode  ierr;
144 
145   PetscFunctionBegin;
146   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
147   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
148   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
149   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
150   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
151   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
152   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices);CHKERRQ(ierr);
153   if (issbaij) {
154     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
155   }
156   /*
157      An SF reduce is needed to sum up properly on shared interface dofs.
158      Note that generally preallocation is not exact, since it overestimates nonzeros
159   */
160   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
161     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
162   }
163   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
164   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
165   /* All processes need to compute entire row ownership */
166   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
167   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
168   for (i=0;i<nsubdomains;i++) {
169     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
170       row_ownership[j]=i;
171     }
172   }
173 
174   /*
175      my_dnz and my_onz contains exact contribution to preallocation from each local mat
176      then, they will be summed up properly. This way, preallocation is always sufficient
177   */
178   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
179   /* preallocation as a MATAIJ */
180   if (isdense) { /* special case for dense local matrices */
181     for (i=0;i<local_rows;i++) {
182       PetscInt index_row = global_indices[i];
183       for (j=i;j<local_rows;j++) {
184         PetscInt owner = row_ownership[index_row];
185         PetscInt index_col = global_indices[j];
186         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
187           my_dnz[i] += 1;
188         } else { /* offdiag block */
189           my_onz[i] += 1;
190         }
191         /* same as before, interchanging rows and cols */
192         if (i != j) {
193           owner = row_ownership[index_col];
194           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
195             my_dnz[j] += 1;
196           } else {
197             my_onz[j] += 1;
198           }
199         }
200       }
201     }
202   } else {
203     for (i=0;i<local_rows;i++) {
204       const PetscInt *cols;
205       PetscInt       ncols,index_row = global_indices[i];
206       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
207       for (j=0;j<ncols;j++) {
208         PetscInt owner = row_ownership[index_row];
209         PetscInt index_col = global_indices[cols[j]];
210         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
211           my_dnz[i] += 1;
212         } else { /* offdiag block */
213           my_onz[i] += 1;
214         }
215         /* same as before, interchanging rows and cols */
216         if (issbaij && index_col != index_row) {
217           owner = row_ownership[index_col];
218           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
219             my_dnz[cols[j]] += 1;
220           } else {
221             my_onz[cols[j]] += 1;
222           }
223         }
224       }
225       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
226     }
227   }
228   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices);CHKERRQ(ierr);
229   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
230   if (maxreduce) {
231     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
232     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
233     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
234     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
235   } else {
236     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
237     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
238     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
239     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
240   }
241   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
242 
243   /* Resize preallocation if overestimated */
244   for (i=0;i<lrows;i++) {
245     dnz[i] = PetscMin(dnz[i],lcols);
246     onz[i] = PetscMin(onz[i],cols-lcols);
247   }
248   /* set preallocation */
249   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
250   for (i=0;i<lrows/bs;i++) {
251     dnz[i] = dnz[i*bs]/bs;
252     onz[i] = onz[i*bs]/bs;
253   }
254   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
255   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
256   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
257   if (issbaij) {
258     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
259   }
260   PetscFunctionReturn(0);
261 }
262 
263 #undef __FUNCT__
264 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
265 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
266 {
267   Mat_IS         *matis = (Mat_IS*)(mat->data);
268   Mat            local_mat;
269   /* info on mat */
270   PetscInt       bs,rows,cols,lrows,lcols;
271   PetscInt       local_rows,local_cols;
272   PetscBool      isdense,issbaij,isseqaij;
273   const PetscInt *global_indices_rows;
274   PetscMPIInt    nsubdomains;
275   /* values insertion */
276   PetscScalar    *array;
277   /* work */
278   PetscErrorCode ierr;
279 
280   PetscFunctionBegin;
281   /* MISSING CHECKS
282     - rectangular case not covered (it is not allowed by MATIS)
283   */
284   /* get info from mat */
285   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
286   if (nsubdomains == 1) {
287     if (reuse == MAT_INITIAL_MATRIX) {
288       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
289     } else {
290       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
291     }
292     PetscFunctionReturn(0);
293   }
294   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
295   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
296   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
297   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
298   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
299   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
300   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
301 
302   /* map indices of local mat rows to global values */
303   ierr = ISLocalToGlobalMappingGetIndices(mat->rmap->mapping,&global_indices_rows);CHKERRQ(ierr);
304 
305   if (reuse == MAT_INITIAL_MATRIX) {
306     MatType     new_mat_type;
307     PetscBool   issbaij_red;
308 
309     /* determining new matrix type */
310     ierr = MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
311     if (issbaij_red) {
312       new_mat_type = MATSBAIJ;
313     } else {
314       if (bs>1) {
315         new_mat_type = MATBAIJ;
316       } else {
317         new_mat_type = MATAIJ;
318       }
319     }
320 
321     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
322     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
323     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
324     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
325     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
326   } else {
327     PetscInt mbs,mrows,mcols,mlrows,mlcols;
328     /* some checks */
329     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
330     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
331     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
332     if (mrows != rows) {
333       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
334     }
335     if (mcols != cols) {
336       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
337     }
338     if (mlrows != lrows) {
339       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
340     }
341     if (mlcols != lcols) {
342       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
343     }
344     if (mbs != bs) {
345       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
346     }
347     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
348   }
349 
350   if (issbaij) {
351     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
352   } else {
353     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
354     local_mat = matis->A;
355   }
356 
357   /* Set values */
358   if (isdense) { /* special case for dense local matrices */
359     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
360     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
361     ierr = MatSetValues(*M,local_rows,global_indices_rows,local_cols,global_indices_rows,array,ADD_VALUES);CHKERRQ(ierr);
362     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
363   } else if (isseqaij) {
364     PetscInt  i,nvtxs,*xadj,*adjncy,*global_indices_cols;
365     PetscBool done;
366 
367     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
368     if (!done) {
369       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
370     }
371     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
372     ierr = PetscMalloc1(xadj[nvtxs],&global_indices_cols);CHKERRQ(ierr);
373     ierr = ISLocalToGlobalMappingApply(mat->rmap->mapping,xadj[nvtxs],adjncy,global_indices_cols);CHKERRQ(ierr);
374     for (i=0;i<nvtxs;i++) {
375       PetscInt    row,*cols,ncols;
376       PetscScalar *mat_vals;
377 
378       row = global_indices_rows[i];
379       ncols = xadj[i+1]-xadj[i];
380       cols = global_indices_cols+xadj[i];
381       mat_vals = array+xadj[i];
382       ierr = MatSetValues(*M,1,&row,ncols,cols,mat_vals,ADD_VALUES);CHKERRQ(ierr);
383     }
384     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
385     if (!done) {
386       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
387     }
388     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
389     ierr = PetscFree(global_indices_cols);CHKERRQ(ierr);
390   } else { /* very basic values insertion for all other matrix types */
391     PetscInt i,*global_indices_cols;
392 
393     ierr = PetscMalloc1(local_cols,&global_indices_cols);CHKERRQ(ierr);
394     for (i=0;i<local_rows;i++) {
395       PetscInt       j;
396       const PetscInt *local_indices;
397 
398       ierr = MatGetRow(local_mat,i,&j,&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr);
399       ierr = ISLocalToGlobalMappingApply(mat->rmap->mapping,j,local_indices,global_indices_cols);CHKERRQ(ierr);
400       ierr = MatSetValues(*M,1,&global_indices_rows[i],j,global_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
401       ierr = MatRestoreRow(local_mat,i,&j,&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr);
402     }
403     ierr = PetscFree(global_indices_cols);CHKERRQ(ierr);
404   }
405   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
406   ierr = ISLocalToGlobalMappingRestoreIndices(mat->rmap->mapping,&global_indices_rows);CHKERRQ(ierr);
407   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
408   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
409   if (isdense) {
410     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
411   }
412   PetscFunctionReturn(0);
413 }
414 
415 #undef __FUNCT__
416 #define __FUNCT__ "MatISGetMPIXAIJ"
417 /*@
418     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
419 
420   Input Parameter:
421 .  mat - the matrix (should be of type MATIS)
422 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
423 
424   Output Parameter:
425 .  newmat - the matrix in AIJ format
426 
427   Level: developer
428 
429   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
430 
431 .seealso: MATIS
432 @*/
433 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
439   PetscValidLogicalCollectiveEnum(mat,reuse,2);
440   PetscValidPointer(newmat,3);
441   if (reuse != MAT_INITIAL_MATRIX) {
442     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
443     PetscCheckSameComm(mat,1,*newmat,3);
444     if (mat == *newmat) {
445       SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
446     }
447   }
448   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
449   PetscFunctionReturn(0);
450 }
451 
452 #undef __FUNCT__
453 #define __FUNCT__ "MatDuplicate_IS"
454 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
455 {
456   PetscErrorCode ierr;
457   Mat_IS         *matis = (Mat_IS*)(mat->data);
458   PetscInt       bs,m,n,M,N;
459   Mat            B,localmat;
460 
461   PetscFunctionBegin;
462   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
463   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
464   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
465   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
466   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
467   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
468   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
469   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
470   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
471   *newmat = B;
472   PetscFunctionReturn(0);
473 }
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "MatIsHermitian_IS"
477 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
478 {
479   PetscErrorCode ierr;
480   Mat_IS         *matis = (Mat_IS*)A->data;
481   PetscBool      local_sym;
482 
483   PetscFunctionBegin;
484   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
485   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
486   PetscFunctionReturn(0);
487 }
488 
489 #undef __FUNCT__
490 #define __FUNCT__ "MatIsSymmetric_IS"
491 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
492 {
493   PetscErrorCode ierr;
494   Mat_IS         *matis = (Mat_IS*)A->data;
495   PetscBool      local_sym;
496 
497   PetscFunctionBegin;
498   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
499   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
500   PetscFunctionReturn(0);
501 }
502 
503 #undef __FUNCT__
504 #define __FUNCT__ "MatDestroy_IS"
505 PetscErrorCode MatDestroy_IS(Mat A)
506 {
507   PetscErrorCode ierr;
508   Mat_IS         *b = (Mat_IS*)A->data;
509 
510   PetscFunctionBegin;
511   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
512   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
513   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
514   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
515   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
516   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
517   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
518   ierr = PetscFree(A->data);CHKERRQ(ierr);
519   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
520   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
521   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
522   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
523   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
524   PetscFunctionReturn(0);
525 }
526 
527 #undef __FUNCT__
528 #define __FUNCT__ "MatMult_IS"
529 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
530 {
531   PetscErrorCode ierr;
532   Mat_IS         *is  = (Mat_IS*)A->data;
533   PetscScalar    zero = 0.0;
534 
535   PetscFunctionBegin;
536   /*  scatter the global vector x into the local work vector */
537   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
538   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
539 
540   /* multiply the local matrix */
541   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
542 
543   /* scatter product back into global memory */
544   ierr = VecSet(y,zero);CHKERRQ(ierr);
545   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
546   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
547   PetscFunctionReturn(0);
548 }
549 
550 #undef __FUNCT__
551 #define __FUNCT__ "MatMultAdd_IS"
552 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
553 {
554   Vec            temp_vec;
555   PetscErrorCode ierr;
556 
557   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
558   if (v3 != v2) {
559     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
560     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
561   } else {
562     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
563     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
564     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
565     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
566     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
567   }
568   PetscFunctionReturn(0);
569 }
570 
571 #undef __FUNCT__
572 #define __FUNCT__ "MatMultTranspose_IS"
573 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
574 {
575   Mat_IS         *is = (Mat_IS*)A->data;
576   PetscErrorCode ierr;
577 
578   PetscFunctionBegin;
579   /*  scatter the global vector x into the local work vector */
580   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
581   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
582 
583   /* multiply the local matrix */
584   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
585 
586   /* scatter product back into global vector */
587   ierr = VecSet(x,0);CHKERRQ(ierr);
588   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
589   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
590   PetscFunctionReturn(0);
591 }
592 
593 #undef __FUNCT__
594 #define __FUNCT__ "MatMultTransposeAdd_IS"
595 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
596 {
597   Vec            temp_vec;
598   PetscErrorCode ierr;
599 
600   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
601   if (v3 != v2) {
602     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
603     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
604   } else {
605     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
606     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
607     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
608     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
609     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
610   }
611   PetscFunctionReturn(0);
612 }
613 
614 #undef __FUNCT__
615 #define __FUNCT__ "MatView_IS"
616 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
617 {
618   Mat_IS         *a = (Mat_IS*)A->data;
619   PetscErrorCode ierr;
620   PetscViewer    sviewer;
621 
622   PetscFunctionBegin;
623   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
624   ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
625   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
626   ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
627   PetscFunctionReturn(0);
628 }
629 
630 #undef __FUNCT__
631 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
632 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
633 {
634   PetscErrorCode ierr;
635   PetscInt       nr,rbs,nc,cbs;
636   Mat_IS         *is = (Mat_IS*)A->data;
637   IS             from,to;
638   Vec            cglobal,rglobal;
639 
640   PetscFunctionBegin;
641   PetscCheckSameComm(A,1,rmapping,2);
642   PetscCheckSameComm(A,1,cmapping,3);
643   /* Destroy any previous data */
644   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
645   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
646   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
647   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
648   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
649   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
650   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
651 
652   /* Setup Layout and set local to global maps */
653   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
654   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
655   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
656   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
657 
658   /* Create the local matrix A */
659   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
660   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
661   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
662   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
663   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
664   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
665   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
666   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
667   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
668   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
669   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
670 
671   /* Create the local work vectors */
672   //ierr = MatSetUp(is->A);CHKERRQ(ierr);
673   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
674 
675   /* setup the global to local scatters */
676   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
677   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
678   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
679   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
680   if (rmapping != cmapping) {
681     ierr = ISDestroy(&to);CHKERRQ(ierr);
682     ierr = ISDestroy(&from);CHKERRQ(ierr);
683     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
684     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
685     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
686   } else {
687     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
688     is->cctx = is->rctx;
689   }
690   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
691   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
692   ierr = ISDestroy(&to);CHKERRQ(ierr);
693   ierr = ISDestroy(&from);CHKERRQ(ierr);
694   PetscFunctionReturn(0);
695 }
696 
697 #undef __FUNCT__
698 #define __FUNCT__ "MatSetValues_IS"
699 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
700 {
701   Mat_IS         *is = (Mat_IS*)mat->data;
702   PetscInt       rows_l[2048],cols_l[2048];
703   PetscErrorCode ierr;
704 
705   PetscFunctionBegin;
706 #if defined(PETSC_USE_DEBUG)
707   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
708 #endif
709   ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
710   ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
711   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
712   PetscFunctionReturn(0);
713 }
714 
715 #undef __FUNCT__
716 #define __FUNCT__ "MatSetValuesLocal_IS"
717 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
718 {
719   PetscErrorCode ierr;
720   Mat_IS         *is = (Mat_IS*)A->data;
721 
722   PetscFunctionBegin;
723   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
724   PetscFunctionReturn(0);
725 }
726 
727 #undef __FUNCT__
728 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
729 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
730 {
731   PetscErrorCode ierr;
732   Mat_IS         *is = (Mat_IS*)A->data;
733 
734   PetscFunctionBegin;
735   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
736   PetscFunctionReturn(0);
737 }
738 
739 #undef __FUNCT__
740 #define __FUNCT__ "MatZeroRows_IS"
741 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
742 {
743   PetscInt       n_l = 0, *rows_l = NULL;
744   PetscErrorCode ierr;
745 
746   PetscFunctionBegin;
747   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
748   if (n) {
749     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
750     ierr = ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
751   }
752   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
753   ierr = PetscFree(rows_l);CHKERRQ(ierr);
754   PetscFunctionReturn(0);
755 }
756 
757 #undef __FUNCT__
758 #define __FUNCT__ "MatZeroRowsLocal_IS"
759 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
760 {
761   Mat_IS         *is = (Mat_IS*)A->data;
762   PetscErrorCode ierr;
763   PetscInt       i;
764   PetscScalar    *array;
765 
766   PetscFunctionBegin;
767   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
768   {
769     /*
770        Set up is->x as a "counting vector". This is in order to MatMult_IS
771        work properly in the interface nodes.
772     */
773     Vec counter;
774     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
775     ierr = VecSet(counter,0.);CHKERRQ(ierr);
776     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
777     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
778     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
779     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
780     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
781     ierr = VecDestroy(&counter);CHKERRQ(ierr);
782   }
783   if (!n) {
784     is->pure_neumann = PETSC_TRUE;
785   } else {
786     is->pure_neumann = PETSC_FALSE;
787 
788     ierr = VecGetArray(is->y,&array);CHKERRQ(ierr);
789     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
790     for (i=0; i<n; i++) {
791       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
792     }
793     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
794     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
795     ierr = VecRestoreArray(is->y,&array);CHKERRQ(ierr);
796   }
797   PetscFunctionReturn(0);
798 }
799 
800 #undef __FUNCT__
801 #define __FUNCT__ "MatAssemblyBegin_IS"
802 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
803 {
804   Mat_IS         *is = (Mat_IS*)A->data;
805   PetscErrorCode ierr;
806 
807   PetscFunctionBegin;
808   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
809   PetscFunctionReturn(0);
810 }
811 
812 #undef __FUNCT__
813 #define __FUNCT__ "MatAssemblyEnd_IS"
814 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
815 {
816   Mat_IS         *is = (Mat_IS*)A->data;
817   PetscErrorCode ierr;
818 
819   PetscFunctionBegin;
820   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
821   PetscFunctionReturn(0);
822 }
823 
824 #undef __FUNCT__
825 #define __FUNCT__ "MatISGetLocalMat_IS"
826 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
827 {
828   Mat_IS *is = (Mat_IS*)mat->data;
829 
830   PetscFunctionBegin;
831   *local = is->A;
832   PetscFunctionReturn(0);
833 }
834 
835 #undef __FUNCT__
836 #define __FUNCT__ "MatISGetLocalMat"
837 /*@
838     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
839 
840   Input Parameter:
841 .  mat - the matrix
842 
843   Output Parameter:
844 .  local - the local matrix
845 
846   Level: advanced
847 
848   Notes:
849     This can be called if you have precomputed the nonzero structure of the
850   matrix and want to provide it to the inner matrix object to improve the performance
851   of the MatSetValues() operation.
852 
853 .seealso: MATIS
854 @*/
855 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
856 {
857   PetscErrorCode ierr;
858 
859   PetscFunctionBegin;
860   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
861   PetscValidPointer(local,2);
862   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
863   PetscFunctionReturn(0);
864 }
865 
866 #undef __FUNCT__
867 #define __FUNCT__ "MatISSetLocalMat_IS"
868 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
869 {
870   Mat_IS         *is = (Mat_IS*)mat->data;
871   PetscInt       nrows,ncols,orows,ocols;
872   PetscErrorCode ierr;
873 
874   PetscFunctionBegin;
875   if (is->A) {
876     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
877     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
878     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
879   }
880   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
881   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
882   is->A = local;
883   PetscFunctionReturn(0);
884 }
885 
886 #undef __FUNCT__
887 #define __FUNCT__ "MatISSetLocalMat"
888 /*@
889     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
890 
891   Input Parameter:
892 .  mat - the matrix
893 .  local - the local matrix
894 
895   Output Parameter:
896 
897   Level: advanced
898 
899   Notes:
900     This can be called if you have precomputed the local matrix and
901   want to provide it to the matrix object MATIS.
902 
903 .seealso: MATIS
904 @*/
905 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
906 {
907   PetscErrorCode ierr;
908 
909   PetscFunctionBegin;
910   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
911   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
912   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
913   PetscFunctionReturn(0);
914 }
915 
916 #undef __FUNCT__
917 #define __FUNCT__ "MatZeroEntries_IS"
918 PetscErrorCode MatZeroEntries_IS(Mat A)
919 {
920   Mat_IS         *a = (Mat_IS*)A->data;
921   PetscErrorCode ierr;
922 
923   PetscFunctionBegin;
924   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "MatScale_IS"
930 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
931 {
932   Mat_IS         *is = (Mat_IS*)A->data;
933   PetscErrorCode ierr;
934 
935   PetscFunctionBegin;
936   ierr = MatScale(is->A,a);CHKERRQ(ierr);
937   PetscFunctionReturn(0);
938 }
939 
940 #undef __FUNCT__
941 #define __FUNCT__ "MatGetDiagonal_IS"
942 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
943 {
944   Mat_IS         *is = (Mat_IS*)A->data;
945   PetscErrorCode ierr;
946 
947   PetscFunctionBegin;
948   /* get diagonal of the local matrix */
949   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
950 
951   /* scatter diagonal back into global vector */
952   ierr = VecSet(v,0);CHKERRQ(ierr);
953   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
954   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
955   PetscFunctionReturn(0);
956 }
957 
958 #undef __FUNCT__
959 #define __FUNCT__ "MatSetOption_IS"
960 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
961 {
962   Mat_IS         *a = (Mat_IS*)A->data;
963   PetscErrorCode ierr;
964 
965   PetscFunctionBegin;
966   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
967   PetscFunctionReturn(0);
968 }
969 
970 #undef __FUNCT__
971 #define __FUNCT__ "MatCreateIS"
972 /*@
973     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
974        process but not across processes.
975 
976    Input Parameters:
977 +     comm    - MPI communicator that will share the matrix
978 .     bs      - block size of the matrix
979 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
980 .     rmap    - local to global map for rows
981 -     cmap    - local to global map for cols
982 
983    Output Parameter:
984 .    A - the resulting matrix
985 
986    Level: advanced
987 
988    Notes: See MATIS for more details
989           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
990           by that process. The sizes of rmap and cmap define the size of the local matrices.
991           If either rmap or cmap are NULL, than the matrix is assumed to be square
992 
993 .seealso: MATIS, MatSetLocalToGlobalMapping()
994 @*/
995 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
996 {
997   PetscErrorCode ierr;
998 
999   PetscFunctionBegin;
1000   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1001   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1002   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1003   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1004   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1005   ierr = MatSetUp(*A);CHKERRQ(ierr);
1006   if (rmap && cmap) {
1007     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1008   } else if (!rmap) {
1009     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1010   } else {
1011     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1012   }
1013   PetscFunctionReturn(0);
1014 }
1015 
1016 /*MC
1017    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1018    This stores the matrices in globally unassembled form. Each processor
1019    assembles only its local Neumann problem and the parallel matrix vector
1020    product is handled "implicitly".
1021 
1022    Operations Provided:
1023 +  MatMult()
1024 .  MatMultAdd()
1025 .  MatMultTranspose()
1026 .  MatMultTransposeAdd()
1027 .  MatZeroEntries()
1028 .  MatSetOption()
1029 .  MatZeroRows()
1030 .  MatZeroRowsLocal()
1031 .  MatSetValues()
1032 .  MatSetValuesLocal()
1033 .  MatScale()
1034 .  MatGetDiagonal()
1035 -  MatSetLocalToGlobalMapping()
1036 
1037    Options Database Keys:
1038 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1039 
1040    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1041 
1042           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1043 
1044           You can do matrix preallocation on the local matrix after you obtain it with
1045           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1046 
1047   Level: advanced
1048 
1049 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC
1050 
1051 M*/
1052 
1053 #undef __FUNCT__
1054 #define __FUNCT__ "MatCreate_IS"
1055 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1056 {
1057   PetscErrorCode ierr;
1058   Mat_IS         *b;
1059 
1060   PetscFunctionBegin;
1061   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1062   A->data = (void*)b;
1063 
1064   /* matrix ops */
1065   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1066   A->ops->mult                    = MatMult_IS;
1067   A->ops->multadd                 = MatMultAdd_IS;
1068   A->ops->multtranspose           = MatMultTranspose_IS;
1069   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1070   A->ops->destroy                 = MatDestroy_IS;
1071   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1072   A->ops->setvalues               = MatSetValues_IS;
1073   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1074   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1075   A->ops->zerorows                = MatZeroRows_IS;
1076   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1077   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1078   A->ops->assemblyend             = MatAssemblyEnd_IS;
1079   A->ops->view                    = MatView_IS;
1080   A->ops->zeroentries             = MatZeroEntries_IS;
1081   A->ops->scale                   = MatScale_IS;
1082   A->ops->getdiagonal             = MatGetDiagonal_IS;
1083   A->ops->setoption               = MatSetOption_IS;
1084   A->ops->ishermitian             = MatIsHermitian_IS;
1085   A->ops->issymmetric             = MatIsSymmetric_IS;
1086   A->ops->duplicate               = MatDuplicate_IS;
1087 
1088   /* special MATIS functions */
1089   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1090   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1091   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1092   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1093   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1094   PetscFunctionReturn(0);
1095 }
1096