xref: /petsc/src/mat/impls/is/matis.c (revision c1b087ad4e8127e655b053aef64dda36d5a05659)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 static PetscErrorCode MatISComputeSF_Private(Mat);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatISComputeSF_Private"
21 static PetscErrorCode MatISComputeSF_Private(Mat B)
22 {
23   Mat_IS         *matis = (Mat_IS*)(B->data);
24   const PetscInt *gidxs;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
29   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
30   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
31   ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&gidxs);CHKERRQ(ierr);
32   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
33   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
34   ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&gidxs);CHKERRQ(ierr);
35   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatISSetPreallocation"
41 /*
42    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
43 
44    Collective on MPI_Comm
45 
46    Input Parameters:
47 +  B - the matrix
48 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
49            (same value is used for all local rows)
50 .  d_nnz - array containing the number of nonzeros in the various rows of the
51            DIAGONAL portion of the local submatrix (possibly different for each row)
52            or NULL, if d_nz is used to specify the nonzero structure.
53            The size of this array is equal to the number of local rows, i.e 'm'.
54            For matrices that will be factored, you must leave room for (and set)
55            the diagonal entry even if it is zero.
56 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
57            submatrix (same value is used for all local rows).
58 -  o_nnz - array containing the number of nonzeros in the various rows of the
59            OFF-DIAGONAL portion of the local submatrix (possibly different for
60            each row) or NULL, if o_nz is used to specify the nonzero
61            structure. The size of this array is equal to the number
62            of local rows, i.e 'm'.
63 
64    If the *_nnz parameter is given then the *_nz parameter is ignored
65 
66    Level: intermediate
67 
68    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
69           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
70           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
71 
72 .keywords: matrix
73 
74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
75 @*/
76 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
77 {
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
82   PetscValidType(B,1);
83   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatISSetPreallocation_IS"
89 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
90 {
91   Mat_IS         *matis = (Mat_IS*)(B->data);
92   PetscInt       bs,i,nlocalcols;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   if (!matis->A) {
97     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
98   }
99   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
100     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
101   }
102   if (!d_nnz) {
103     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
104   } else {
105     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
106   }
107   if (!o_nnz) {
108     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
109   } else {
110     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
111   }
112   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
113   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
114   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
115   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
116   for (i=0;i<matis->sf_nleaves;i++) {
117     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
118   }
119   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
120   for (i=0;i<matis->sf_nleaves/bs;i++) {
121     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
122   }
123   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
124   for (i=0;i<matis->sf_nleaves/bs;i++) {
125     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
126   }
127   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
133 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
134 {
135   Mat_IS          *matis = (Mat_IS*)(A->data);
136   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
137   const PetscInt* global_indices;
138   PetscInt        i,j,bs,rows,cols;
139   PetscInt        lrows,lcols;
140   PetscInt        local_rows,local_cols;
141   PetscMPIInt     nsubdomains;
142   PetscBool       isdense,issbaij;
143   PetscErrorCode  ierr;
144 
145   PetscFunctionBegin;
146   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
147   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
148   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
149   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
150   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
151   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
152   ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&global_indices);CHKERRQ(ierr);
153   if (issbaij) {
154     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
155   }
156   /*
157      An SF reduce is needed to sum up properly on shared interface dofs.
158      Note that generally preallocation is not exact, since it overestimates nonzeros
159   */
160   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
161     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
162   }
163   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
164   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
165   /* All processes need to compute entire row ownership */
166   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
167   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
168   for (i=0;i<nsubdomains;i++) {
169     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
170       row_ownership[j]=i;
171     }
172   }
173 
174   /*
175      my_dnz and my_onz contains exact contribution to preallocation from each local mat
176      then, they will be summed up properly. This way, preallocation is always sufficient
177   */
178   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
179   /* preallocation as a MATAIJ */
180   if (isdense) { /* special case for dense local matrices */
181     for (i=0;i<local_rows;i++) {
182       PetscInt index_row = global_indices[i];
183       for (j=i;j<local_rows;j++) {
184         PetscInt owner = row_ownership[index_row];
185         PetscInt index_col = global_indices[j];
186         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
187           my_dnz[i] += 1;
188         } else { /* offdiag block */
189           my_onz[i] += 1;
190         }
191         /* same as before, interchanging rows and cols */
192         if (i != j) {
193           owner = row_ownership[index_col];
194           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
195             my_dnz[j] += 1;
196           } else {
197             my_onz[j] += 1;
198           }
199         }
200       }
201     }
202   } else {
203     for (i=0;i<local_rows;i++) {
204       const PetscInt *cols;
205       PetscInt       ncols,index_row = global_indices[i];
206       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
207       for (j=0;j<ncols;j++) {
208         PetscInt owner = row_ownership[index_row];
209         PetscInt index_col = global_indices[cols[j]];
210         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
211           my_dnz[i] += 1;
212         } else { /* offdiag block */
213           my_onz[i] += 1;
214         }
215         /* same as before, interchanging rows and cols */
216         if (issbaij && index_col != index_row) {
217           owner = row_ownership[index_col];
218           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
219             my_dnz[cols[j]] += 1;
220           } else {
221             my_onz[cols[j]] += 1;
222           }
223         }
224       }
225       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
226     }
227   }
228   ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&global_indices);CHKERRQ(ierr);
229   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
230   if (maxreduce) {
231     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
232     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
233     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
234     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
235   } else {
236     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
237     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
238     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
239     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
240   }
241   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
242 
243   /* Resize preallocation if overestimated */
244   for (i=0;i<lrows;i++) {
245     dnz[i] = PetscMin(dnz[i],lcols);
246     onz[i] = PetscMin(onz[i],cols-lcols);
247   }
248   /* set preallocation */
249   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
250   for (i=0;i<lrows/bs;i++) {
251     dnz[i] = dnz[i*bs]/bs;
252     onz[i] = onz[i*bs]/bs;
253   }
254   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
255   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
256   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
257   if (issbaij) {
258     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
259   }
260   PetscFunctionReturn(0);
261 }
262 
263 #undef __FUNCT__
264 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
265 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
266 {
267   Mat_IS         *matis = (Mat_IS*)(mat->data);
268   Mat            local_mat;
269   /* info on mat */
270   PetscInt       bs,rows,cols,lrows,lcols;
271   PetscInt       local_rows,local_cols;
272   PetscBool      isdense,issbaij,isseqaij;
273   const PetscInt *global_indices_rows;
274   PetscMPIInt    nsubdomains;
275   /* values insertion */
276   PetscScalar    *array;
277   /* work */
278   PetscErrorCode ierr;
279 
280   PetscFunctionBegin;
281   /* MISSING CHECKS
282     - rectangular case not covered (it is not allowed by MATIS)
283   */
284   /* get info from mat */
285   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
286   if (nsubdomains == 1) {
287     if (reuse == MAT_INITIAL_MATRIX) {
288       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
289     } else {
290       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
291     }
292     PetscFunctionReturn(0);
293   }
294   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
295   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
296   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
297   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
298   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
299   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
300   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
301 
302   /* map indices of local mat rows to global values */
303   ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&global_indices_rows);CHKERRQ(ierr);
304 
305   if (reuse == MAT_INITIAL_MATRIX) {
306     MatType     new_mat_type;
307     PetscBool   issbaij_red;
308 
309     /* determining new matrix type */
310     ierr = MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
311     if (issbaij_red) {
312       new_mat_type = MATSBAIJ;
313     } else {
314       if (bs>1) {
315         new_mat_type = MATBAIJ;
316       } else {
317         new_mat_type = MATAIJ;
318       }
319     }
320 
321     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
322     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
323     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
324     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
325     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
326   } else {
327     PetscInt mbs,mrows,mcols,mlrows,mlcols;
328     /* some checks */
329     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
330     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
331     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
332     if (mrows != rows) {
333       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
334     }
335     if (mcols != cols) {
336       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
337     }
338     if (mlrows != lrows) {
339       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
340     }
341     if (mlcols != lcols) {
342       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
343     }
344     if (mbs != bs) {
345       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
346     }
347     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
348   }
349 
350   if (issbaij) {
351     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
352   } else {
353     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
354     local_mat = matis->A;
355   }
356 
357   /* Set values */
358   if (isdense) { /* special case for dense local matrices */
359     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
360     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
361     ierr = MatSetValues(*M,local_rows,global_indices_rows,local_cols,global_indices_rows,array,ADD_VALUES);CHKERRQ(ierr);
362     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
363   } else if (isseqaij) {
364     PetscInt  i,nvtxs,*xadj,*adjncy,*global_indices_cols;
365     PetscBool done;
366 
367     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
368     if (!done) {
369       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
370     }
371     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
372     ierr = PetscMalloc1(xadj[nvtxs],&global_indices_cols);CHKERRQ(ierr);
373     ierr = ISLocalToGlobalMappingApply(matis->mapping,xadj[nvtxs],adjncy,global_indices_cols);CHKERRQ(ierr);
374     for (i=0;i<nvtxs;i++) {
375       PetscInt    row,*cols,ncols;
376       PetscScalar *mat_vals;
377 
378       row = global_indices_rows[i];
379       ncols = xadj[i+1]-xadj[i];
380       cols = global_indices_cols+xadj[i];
381       mat_vals = array+xadj[i];
382       ierr = MatSetValues(*M,1,&row,ncols,cols,mat_vals,ADD_VALUES);CHKERRQ(ierr);
383     }
384     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
385     if (!done) {
386       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
387     }
388     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
389     ierr = PetscFree(global_indices_cols);CHKERRQ(ierr);
390   } else { /* very basic values insertion for all other matrix types */
391     PetscInt i,*global_indices_cols;
392 
393     ierr = PetscMalloc1(local_cols,&global_indices_cols);CHKERRQ(ierr);
394     for (i=0;i<local_rows;i++) {
395       PetscInt       j;
396       const PetscInt *local_indices;
397 
398       ierr = MatGetRow(local_mat,i,&j,&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr);
399       ierr = ISLocalToGlobalMappingApply(matis->mapping,j,local_indices,global_indices_cols);CHKERRQ(ierr);
400       ierr = MatSetValues(*M,1,&global_indices_rows[i],j,global_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
401       ierr = MatRestoreRow(local_mat,i,&j,&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr);
402     }
403     ierr = PetscFree(global_indices_cols);CHKERRQ(ierr);
404   }
405   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
406   ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&global_indices_rows);CHKERRQ(ierr);
407   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
408   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
409   if (isdense) {
410     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
411   }
412   PetscFunctionReturn(0);
413 }
414 
415 #undef __FUNCT__
416 #define __FUNCT__ "MatISGetMPIXAIJ"
417 /*@
418     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
419 
420   Input Parameter:
421 .  mat - the matrix (should be of type MATIS)
422 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
423 
424   Output Parameter:
425 .  newmat - the matrix in AIJ format
426 
427   Level: developer
428 
429   Notes:
430 
431 .seealso: MATIS
432 @*/
433 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
439   PetscValidLogicalCollectiveEnum(mat,reuse,2);
440   PetscValidPointer(newmat,3);
441   if (reuse != MAT_INITIAL_MATRIX) {
442     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
443     PetscCheckSameComm(mat,1,*newmat,3);
444   }
445   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
446   PetscFunctionReturn(0);
447 }
448 
449 #undef __FUNCT__
450 #define __FUNCT__ "MatDuplicate_IS"
451 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
452 {
453   PetscErrorCode ierr;
454   Mat_IS         *matis = (Mat_IS*)(mat->data);
455   PetscInt       bs,m,n,M,N;
456   Mat            B,localmat;
457 
458   PetscFunctionBegin;
459   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
460   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
461   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
462   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,matis->mapping,&B);CHKERRQ(ierr);
463   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
464   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
465   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
466   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
467   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
468   *newmat = B;
469   PetscFunctionReturn(0);
470 }
471 
472 #undef __FUNCT__
473 #define __FUNCT__ "MatIsHermitian_IS"
474 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
475 {
476   PetscErrorCode ierr;
477   Mat_IS         *matis = (Mat_IS*)A->data;
478   PetscBool      local_sym;
479 
480   PetscFunctionBegin;
481   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
482   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
483   PetscFunctionReturn(0);
484 }
485 
486 #undef __FUNCT__
487 #define __FUNCT__ "MatIsSymmetric_IS"
488 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
489 {
490   PetscErrorCode ierr;
491   Mat_IS         *matis = (Mat_IS*)A->data;
492   PetscBool      local_sym;
493 
494   PetscFunctionBegin;
495   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
496   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
497   PetscFunctionReturn(0);
498 }
499 
500 #undef __FUNCT__
501 #define __FUNCT__ "MatDestroy_IS"
502 PetscErrorCode MatDestroy_IS(Mat A)
503 {
504   PetscErrorCode ierr;
505   Mat_IS         *b = (Mat_IS*)A->data;
506 
507   PetscFunctionBegin;
508   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
509   ierr = VecScatterDestroy(&b->ctx);CHKERRQ(ierr);
510   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
511   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
512   ierr = ISLocalToGlobalMappingDestroy(&b->mapping);CHKERRQ(ierr);
513   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
514   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
515   ierr = PetscFree(A->data);CHKERRQ(ierr);
516   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
517   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
518   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
519   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
520   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "MatMult_IS"
526 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
527 {
528   PetscErrorCode ierr;
529   Mat_IS         *is  = (Mat_IS*)A->data;
530   PetscScalar    zero = 0.0;
531 
532   PetscFunctionBegin;
533   /*  scatter the global vector x into the local work vector */
534   ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
535   ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
536 
537   /* multiply the local matrix */
538   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
539 
540   /* scatter product back into global memory */
541   ierr = VecSet(y,zero);CHKERRQ(ierr);
542   ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
543   ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
544   PetscFunctionReturn(0);
545 }
546 
547 #undef __FUNCT__
548 #define __FUNCT__ "MatMultAdd_IS"
549 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
550 {
551   Vec            temp_vec;
552   PetscErrorCode ierr;
553 
554   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
555   if (v3 != v2) {
556     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
557     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
558   } else {
559     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
560     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
561     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
562     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
563     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
564   }
565   PetscFunctionReturn(0);
566 }
567 
568 #undef __FUNCT__
569 #define __FUNCT__ "MatMultTranspose_IS"
570 PetscErrorCode MatMultTranspose_IS(Mat A,Vec x,Vec y)
571 {
572   Mat_IS         *is = (Mat_IS*)A->data;
573   PetscErrorCode ierr;
574 
575   PetscFunctionBegin; /*  y = A' * x */
576   /*  scatter the global vector x into the local work vector */
577   ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
578   ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
579 
580   /* multiply the local matrix */
581   ierr = MatMultTranspose(is->A,is->x,is->y);CHKERRQ(ierr);
582 
583   /* scatter product back into global vector */
584   ierr = VecSet(y,0);CHKERRQ(ierr);
585   ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
586   ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
587   PetscFunctionReturn(0);
588 }
589 
590 #undef __FUNCT__
591 #define __FUNCT__ "MatMultTransposeAdd_IS"
592 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
593 {
594   Vec            temp_vec;
595   PetscErrorCode ierr;
596 
597   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
598   if (v3 != v2) {
599     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
600     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
601   } else {
602     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
603     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
604     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
605     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
606     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
607   }
608   PetscFunctionReturn(0);
609 }
610 
611 #undef __FUNCT__
612 #define __FUNCT__ "MatView_IS"
613 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
614 {
615   Mat_IS         *a = (Mat_IS*)A->data;
616   PetscErrorCode ierr;
617   PetscViewer    sviewer;
618 
619   PetscFunctionBegin;
620   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
621   ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
622   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
623   ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
624   PetscFunctionReturn(0);
625 }
626 
627 #undef __FUNCT__
628 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
629 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
630 {
631   PetscErrorCode ierr;
632   PetscInt       n,bs;
633   Mat_IS         *is = (Mat_IS*)A->data;
634   IS             from,to;
635   Vec            global;
636 
637   PetscFunctionBegin;
638   PetscCheckSameComm(A,1,rmapping,2);
639   if (rmapping != cmapping) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MATIS requires the row and column mappings to be identical");
640   if (is->mapping) { /* Currenly destroys the objects that will be created by this routine. Is there anything else that should be checked? */
641     ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr);
642     ierr = VecDestroy(&is->x);CHKERRQ(ierr);
643     ierr = VecDestroy(&is->y);CHKERRQ(ierr);
644     ierr = VecScatterDestroy(&is->ctx);CHKERRQ(ierr);
645     ierr = MatDestroy(&is->A);CHKERRQ(ierr);
646     ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
647     ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
648   }
649   ierr = PetscObjectReference((PetscObject)rmapping);CHKERRQ(ierr);
650   ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr);
651   is->mapping = rmapping;
652 /*
653   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
654   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
655 */
656 
657   /* Create the local matrix A */
658   ierr = ISLocalToGlobalMappingGetSize(rmapping,&n);CHKERRQ(ierr);
659   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&bs);CHKERRQ(ierr);
660   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
661   if (bs > 1) {
662     ierr = MatSetType(is->A,MATSEQBAIJ);CHKERRQ(ierr);
663   } else {
664     ierr = MatSetType(is->A,MATSEQAIJ);CHKERRQ(ierr);
665   }
666   ierr = MatSetSizes(is->A,n,n,n,n);CHKERRQ(ierr);
667   ierr = MatSetBlockSize(is->A,bs);CHKERRQ(ierr);
668   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
669   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
670   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
671 
672   /* Create the local work vectors */
673   ierr = VecCreate(PETSC_COMM_SELF,&is->x);CHKERRQ(ierr);
674   ierr = VecSetBlockSize(is->x,bs);CHKERRQ(ierr);
675   ierr = VecSetSizes(is->x,n,n);CHKERRQ(ierr);
676   ierr = VecSetOptionsPrefix(is->x,((PetscObject)A)->prefix);CHKERRQ(ierr);
677   ierr = VecAppendOptionsPrefix(is->x,"is_");CHKERRQ(ierr);
678   ierr = VecSetFromOptions(is->x);CHKERRQ(ierr);
679   ierr = VecDuplicate(is->x,&is->y);CHKERRQ(ierr);
680 
681   /* setup the global to local scatter */
682   ierr = ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);CHKERRQ(ierr);
683   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
684   ierr = MatCreateVecs(A,&global,NULL);CHKERRQ(ierr);
685   ierr = VecScatterCreate(global,from,is->x,to,&is->ctx);CHKERRQ(ierr);
686   ierr = VecDestroy(&global);CHKERRQ(ierr);
687   ierr = ISDestroy(&to);CHKERRQ(ierr);
688   ierr = ISDestroy(&from);CHKERRQ(ierr);
689   PetscFunctionReturn(0);
690 }
691 
692 #undef __FUNCT__
693 #define __FUNCT__ "MatSetValues_IS"
694 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
695 {
696   Mat_IS         *is = (Mat_IS*)mat->data;
697   PetscInt       rows_l[2048],cols_l[2048];
698   PetscErrorCode ierr;
699 
700   PetscFunctionBegin;
701 #if defined(PETSC_USE_DEBUG)
702   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
703 #endif
704   ierr = ISG2LMapApply(is->mapping,m,rows,rows_l);CHKERRQ(ierr);
705   ierr = ISG2LMapApply(is->mapping,n,cols,cols_l);CHKERRQ(ierr);
706   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
707   PetscFunctionReturn(0);
708 }
709 
710 #undef ISG2LMapSetUp
711 #undef ISG2LMapApply
712 
713 #undef __FUNCT__
714 #define __FUNCT__ "MatSetValuesLocal_IS"
715 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
716 {
717   PetscErrorCode ierr;
718   Mat_IS         *is = (Mat_IS*)A->data;
719 
720   PetscFunctionBegin;
721   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
722   PetscFunctionReturn(0);
723 }
724 
725 #undef __FUNCT__
726 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
727 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
728 {
729   PetscErrorCode ierr;
730   Mat_IS         *is = (Mat_IS*)A->data;
731 
732   PetscFunctionBegin;
733   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
734   PetscFunctionReturn(0);
735 }
736 
737 #undef __FUNCT__
738 #define __FUNCT__ "MatZeroRows_IS"
739 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
740 {
741   Mat_IS         *is = (Mat_IS*)A->data;
742   PetscInt       n_l = 0, *rows_l = NULL;
743   PetscErrorCode ierr;
744 
745   PetscFunctionBegin;
746   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
747   if (n) {
748     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
749     ierr = ISGlobalToLocalMappingApply(is->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
750   }
751   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
752   ierr = PetscFree(rows_l);CHKERRQ(ierr);
753   PetscFunctionReturn(0);
754 }
755 
756 #undef __FUNCT__
757 #define __FUNCT__ "MatZeroRowsLocal_IS"
758 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
759 {
760   Mat_IS         *is = (Mat_IS*)A->data;
761   PetscErrorCode ierr;
762   PetscInt       i;
763   PetscScalar    *array;
764 
765   PetscFunctionBegin;
766   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
767   {
768     /*
769        Set up is->x as a "counting vector". This is in order to MatMult_IS
770        work properly in the interface nodes.
771     */
772     Vec         counter;
773     PetscScalar one=1.0, zero=0.0;
774     ierr = MatCreateVecs(A,&counter,NULL);CHKERRQ(ierr);
775     ierr = VecSet(counter,zero);CHKERRQ(ierr);
776     ierr = VecSet(is->x,one);CHKERRQ(ierr);
777     ierr = VecScatterBegin(is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
778     ierr = VecScatterEnd  (is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
779     ierr = VecScatterBegin(is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
780     ierr = VecScatterEnd  (is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
781     ierr = VecDestroy(&counter);CHKERRQ(ierr);
782   }
783   if (!n) {
784     is->pure_neumann = PETSC_TRUE;
785   } else {
786     is->pure_neumann = PETSC_FALSE;
787 
788     ierr = VecGetArray(is->x,&array);CHKERRQ(ierr);
789     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
790     for (i=0; i<n; i++) {
791       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
792     }
793     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
794     ierr = MatAssemblyEnd  (is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
795     ierr = VecRestoreArray(is->x,&array);CHKERRQ(ierr);
796   }
797   PetscFunctionReturn(0);
798 }
799 
800 #undef __FUNCT__
801 #define __FUNCT__ "MatAssemblyBegin_IS"
802 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
803 {
804   Mat_IS         *is = (Mat_IS*)A->data;
805   PetscErrorCode ierr;
806 
807   PetscFunctionBegin;
808   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
809   PetscFunctionReturn(0);
810 }
811 
812 #undef __FUNCT__
813 #define __FUNCT__ "MatAssemblyEnd_IS"
814 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
815 {
816   Mat_IS         *is = (Mat_IS*)A->data;
817   PetscErrorCode ierr;
818 
819   PetscFunctionBegin;
820   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
821   PetscFunctionReturn(0);
822 }
823 
824 #undef __FUNCT__
825 #define __FUNCT__ "MatISGetLocalMat_IS"
826 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
827 {
828   Mat_IS *is = (Mat_IS*)mat->data;
829 
830   PetscFunctionBegin;
831   *local = is->A;
832   PetscFunctionReturn(0);
833 }
834 
835 #undef __FUNCT__
836 #define __FUNCT__ "MatISGetLocalMat"
837 /*@
838     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
839 
840   Input Parameter:
841 .  mat - the matrix
842 
843   Output Parameter:
844 .  local - the local matrix usually MATSEQAIJ
845 
846   Level: advanced
847 
848   Notes:
849     This can be called if you have precomputed the nonzero structure of the
850   matrix and want to provide it to the inner matrix object to improve the performance
851   of the MatSetValues() operation.
852 
853 .seealso: MATIS
854 @*/
855 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
856 {
857   PetscErrorCode ierr;
858 
859   PetscFunctionBegin;
860   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
861   PetscValidPointer(local,2);
862   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
863   PetscFunctionReturn(0);
864 }
865 
866 #undef __FUNCT__
867 #define __FUNCT__ "MatISSetLocalMat_IS"
868 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
869 {
870   Mat_IS         *is = (Mat_IS*)mat->data;
871   PetscInt       nrows,ncols,orows,ocols;
872   PetscErrorCode ierr;
873 
874   PetscFunctionBegin;
875   if (is->A) {
876     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
877     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
878     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
879   }
880   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
881   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
882   is->A = local;
883   PetscFunctionReturn(0);
884 }
885 
886 #undef __FUNCT__
887 #define __FUNCT__ "MatISSetLocalMat"
888 /*@
889     MatISSetLocalMat - Set the local matrix stored inside a MATIS matrix.
890 
891   Input Parameter:
892 .  mat - the matrix
893 .  local - the local matrix usually MATSEQAIJ
894 
895   Output Parameter:
896 
897   Level: advanced
898 
899   Notes:
900     This can be called if you have precomputed the local matrix and
901   want to provide it to the matrix object MATIS.
902 
903 .seealso: MATIS
904 @*/
905 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
906 {
907   PetscErrorCode ierr;
908 
909   PetscFunctionBegin;
910   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
911   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
912   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
913   PetscFunctionReturn(0);
914 }
915 
916 #undef __FUNCT__
917 #define __FUNCT__ "MatZeroEntries_IS"
918 PetscErrorCode MatZeroEntries_IS(Mat A)
919 {
920   Mat_IS         *a = (Mat_IS*)A->data;
921   PetscErrorCode ierr;
922 
923   PetscFunctionBegin;
924   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "MatScale_IS"
930 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
931 {
932   Mat_IS         *is = (Mat_IS*)A->data;
933   PetscErrorCode ierr;
934 
935   PetscFunctionBegin;
936   ierr = MatScale(is->A,a);CHKERRQ(ierr);
937   PetscFunctionReturn(0);
938 }
939 
940 #undef __FUNCT__
941 #define __FUNCT__ "MatGetDiagonal_IS"
942 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
943 {
944   Mat_IS         *is = (Mat_IS*)A->data;
945   PetscErrorCode ierr;
946 
947   PetscFunctionBegin;
948   /* get diagonal of the local matrix */
949   ierr = MatGetDiagonal(is->A,is->x);CHKERRQ(ierr);
950 
951   /* scatter diagonal back into global vector */
952   ierr = VecSet(v,0);CHKERRQ(ierr);
953   ierr = VecScatterBegin(is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
954   ierr = VecScatterEnd  (is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
955   PetscFunctionReturn(0);
956 }
957 
958 #undef __FUNCT__
959 #define __FUNCT__ "MatSetOption_IS"
960 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
961 {
962   Mat_IS         *a = (Mat_IS*)A->data;
963   PetscErrorCode ierr;
964 
965   PetscFunctionBegin;
966   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
967   PetscFunctionReturn(0);
968 }
969 
970 #undef __FUNCT__
971 #define __FUNCT__ "MatCreateIS"
972 /*@
973     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
974        process but not across processes.
975 
976    Input Parameters:
977 +     comm - MPI communicator that will share the matrix
978 .     bs - local and global block size of the matrix
979 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
980 -     map - mapping that defines the global number for each local number
981 
982    Output Parameter:
983 .    A - the resulting matrix
984 
985    Level: advanced
986 
987    Notes: See MATIS for more details
988           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
989           by that process. m + nghosts (or n + nghosts) is the length of map since map maps all local points
990           plus the ghost points to global indices.
991 
992 .seealso: MATIS, MatSetLocalToGlobalMapping()
993 @*/
994 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping map,Mat *A)
995 {
996   PetscErrorCode ierr;
997 
998   PetscFunctionBegin;
999   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1000   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1001   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1002   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1003   ierr = MatSetUp(*A);CHKERRQ(ierr);
1004   ierr = MatSetLocalToGlobalMapping(*A,map,map);CHKERRQ(ierr);
1005   PetscFunctionReturn(0);
1006 }
1007 
1008 /*MC
1009    MATIS - MATIS = "is" - A matrix type to be used for using the Neumann-Neumann type preconditioners, see PCBDDC.
1010    This stores the matrices in globally unassembled form. Each processor
1011    assembles only its local Neumann problem and the parallel matrix vector
1012    product is handled "implicitly".
1013 
1014    Operations Provided:
1015 +  MatMult()
1016 .  MatMultAdd()
1017 .  MatMultTranspose()
1018 .  MatMultTransposeAdd()
1019 .  MatZeroEntries()
1020 .  MatSetOption()
1021 .  MatZeroRows()
1022 .  MatZeroRowsLocal()
1023 .  MatSetValues()
1024 .  MatSetValuesLocal()
1025 .  MatScale()
1026 .  MatGetDiagonal()
1027 -  MatSetLocalToGlobalMapping()
1028 
1029    Options Database Keys:
1030 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1031 
1032    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1033 
1034           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1035 
1036           You can do matrix preallocation on the local matrix after you obtain it with
1037           MatISGetLocalMat()
1038 
1039   Level: advanced
1040 
1041 .seealso: PC, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), PCBDDC
1042 
1043 M*/
1044 
1045 #undef __FUNCT__
1046 #define __FUNCT__ "MatCreate_IS"
1047 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1048 {
1049   PetscErrorCode ierr;
1050   Mat_IS         *b;
1051 
1052   PetscFunctionBegin;
1053   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1054   A->data = (void*)b;
1055   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1056 
1057   A->ops->mult                    = MatMult_IS;
1058   A->ops->multadd                 = MatMultAdd_IS;
1059   A->ops->multtranspose           = MatMultTranspose_IS;
1060   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1061   A->ops->destroy                 = MatDestroy_IS;
1062   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1063   A->ops->setvalues               = MatSetValues_IS;
1064   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1065   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1066   A->ops->zerorows                = MatZeroRows_IS;
1067   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1068   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1069   A->ops->assemblyend             = MatAssemblyEnd_IS;
1070   A->ops->view                    = MatView_IS;
1071   A->ops->zeroentries             = MatZeroEntries_IS;
1072   A->ops->scale                   = MatScale_IS;
1073   A->ops->getdiagonal             = MatGetDiagonal_IS;
1074   A->ops->setoption               = MatSetOption_IS;
1075   A->ops->ishermitian             = MatIsHermitian_IS;
1076   A->ops->issymmetric             = MatIsSymmetric_IS;
1077   A->ops->duplicate               = MatDuplicate_IS;
1078 
1079   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1080   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1081 
1082   /* zeros pointer */
1083   b->A           = 0;
1084   b->ctx         = 0;
1085   b->x           = 0;
1086   b->y           = 0;
1087   b->mapping     = 0;
1088   b->sf          = 0;
1089   b->sf_rootdata = 0;
1090   b->sf_leafdata = 0;
1091 
1092   /* special MATIS functions */
1093   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1094   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1095   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1096   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1097   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1098   PetscFunctionReturn(0);
1099 }
1100