xref: /petsc/src/mat/impls/is/matis.c (revision 686e3a490684ad76a6ce8320d41ecb571c0dc10d)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 static PetscErrorCode MatISComputeSF_Private(Mat);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatISComputeSF_Private"
21 static PetscErrorCode MatISComputeSF_Private(Mat B)
22 {
23   Mat_IS         *matis = (Mat_IS*)(B->data);
24   const PetscInt *gidxs;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
29   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
30   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
31   ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&gidxs);CHKERRQ(ierr);
32   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
33   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
34   ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&gidxs);CHKERRQ(ierr);
35   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatISSetPreallocation"
41 /*
42    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
43 
44    Collective on MPI_Comm
45 
46    Input Parameters:
47 +  B - the matrix
48 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
49            (same value is used for all local rows)
50 .  d_nnz - array containing the number of nonzeros in the various rows of the
51            DIAGONAL portion of the local submatrix (possibly different for each row)
52            or NULL, if d_nz is used to specify the nonzero structure.
53            The size of this array is equal to the number of local rows, i.e 'm'.
54            For matrices that will be factored, you must leave room for (and set)
55            the diagonal entry even if it is zero.
56 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
57            submatrix (same value is used for all local rows).
58 -  o_nnz - array containing the number of nonzeros in the various rows of the
59            OFF-DIAGONAL portion of the local submatrix (possibly different for
60            each row) or NULL, if o_nz is used to specify the nonzero
61            structure. The size of this array is equal to the number
62            of local rows, i.e 'm'.
63 
64    If the *_nnz parameter is given then the *_nz parameter is ignored
65 
66    Level: intermediate
67 
68    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
69           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
70           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
71 
72 .keywords: matrix
73 
74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
75 @*/
76 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
77 {
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
82   PetscValidType(B,1);
83   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatISSetPreallocation_IS"
89 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
90 {
91   Mat_IS         *matis = (Mat_IS*)(B->data);
92   PetscInt       bs,i,nlocalcols;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   if (!matis->A) {
97     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
98   }
99   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
100     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
101   }
102   if (!d_nnz) {
103     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
104   } else {
105     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
106   }
107   if (!o_nnz) {
108     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
109   } else {
110     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
111   }
112   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
113   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
114   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
115   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
116   for (i=0;i<matis->sf_nleaves;i++) {
117     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
118   }
119   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
120   for (i=0;i<matis->sf_nleaves/bs;i++) {
121     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
122   }
123   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
124   for (i=0;i<matis->sf_nleaves/bs;i++) {
125     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
126   }
127   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
133 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
134 {
135   Mat_IS          *matis = (Mat_IS*)(A->data);
136   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
137   const PetscInt* global_indices;
138   PetscInt        i,j,bs,rows,cols;
139   PetscInt        lrows,lcols;
140   PetscInt        local_rows,local_cols;
141   PetscMPIInt     nsubdomains;
142   PetscBool       isdense,issbaij;
143   PetscErrorCode  ierr;
144 
145   PetscFunctionBegin;
146   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
147   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
148   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
149   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
150   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
151   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
152   ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&global_indices);CHKERRQ(ierr);
153   if (issbaij) {
154     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
155   }
156   /*
157      An SF reduce is needed to sum up properly on shared interface dofs.
158      Note that generally preallocation is not exact, since it overestimates nonzeros
159   */
160   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
161     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
162   }
163   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
164   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
165   /* All processes need to compute entire row ownership */
166   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
167   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
168   for (i=0;i<nsubdomains;i++) {
169     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
170       row_ownership[j]=i;
171     }
172   }
173 
174   /*
175      my_dnz and my_onz contains exact contribution to preallocation from each local mat
176      then, they will be summed up properly. This way, preallocation is always sufficient
177   */
178   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
179   /* preallocation as a MATAIJ */
180   if (isdense) { /* special case for dense local matrices */
181     for (i=0;i<local_rows;i++) {
182       PetscInt index_row = global_indices[i];
183       for (j=i;j<local_rows;j++) {
184         PetscInt owner = row_ownership[index_row];
185         PetscInt index_col = global_indices[j];
186         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
187           my_dnz[i] += 1;
188         } else { /* offdiag block */
189           my_onz[i] += 1;
190         }
191         /* same as before, interchanging rows and cols */
192         if (i != j) {
193           owner = row_ownership[index_col];
194           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
195             my_dnz[j] += 1;
196           } else {
197             my_onz[j] += 1;
198           }
199         }
200       }
201     }
202   } else {
203     for (i=0;i<local_rows;i++) {
204       const PetscInt *cols;
205       PetscInt       ncols,index_row = global_indices[i];
206       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
207       for (j=0;j<ncols;j++) {
208         PetscInt owner = row_ownership[index_row];
209         PetscInt index_col = global_indices[cols[j]];
210         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
211           my_dnz[i] += 1;
212         } else { /* offdiag block */
213           my_onz[i] += 1;
214         }
215         /* same as before, interchanging rows and cols */
216         if (issbaij) {
217           owner = row_ownership[index_col];
218           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
219             my_dnz[j] += 1;
220           } else {
221             my_onz[j] += 1;
222           }
223         }
224       }
225       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
226     }
227   }
228   ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&global_indices);CHKERRQ(ierr);
229   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
230   if (maxreduce) {
231     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
232     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
233     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
234     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
235   } else {
236     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
237     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
238     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
239     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
240   }
241   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
242 
243   /* Resize preallocation if overestimated */
244   for (i=0;i<lrows;i++) {
245     dnz[i] = PetscMin(dnz[i],lcols);
246     onz[i] = PetscMin(onz[i],cols-lcols);
247   }
248   /* set preallocation */
249   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
250   for (i=0;i<lrows/bs;i++) {
251     dnz[i] = dnz[i*bs]/bs;
252     onz[i] = onz[i*bs]/bs;
253   }
254   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
255   for (i=0;i<lrows/bs;i++) {
256     dnz[i] = dnz[i]-i;
257   }
258   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
259   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
260   if (issbaij) {
261     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
262   }
263   PetscFunctionReturn(0);
264 }
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
268 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
269 {
270   Mat_IS                 *matis = (Mat_IS*)(mat->data);
271   /* info on mat */
272   /* ISLocalToGlobalMapping rmapping,cmapping; */
273   PetscInt               bs,rows,cols;
274   PetscInt               local_rows,local_cols;
275   PetscBool              isdense,issbaij,isseqaij;
276   const PetscInt         *global_indices_rows;
277   PetscMPIInt            nsubdomains;
278   /* values insertion */
279   PetscScalar            *array;
280   /* work */
281   PetscErrorCode         ierr;
282 
283   PetscFunctionBegin;
284   /* MISSING CHECKS
285     - rectangular case not covered (it is not allowed by MATIS)
286   */
287   /* get info from mat */
288   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
289   if (nsubdomains == 1) {
290     if (reuse == MAT_INITIAL_MATRIX) {
291       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
292     } else {
293       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
294     }
295     PetscFunctionReturn(0);
296   }
297   /* ierr = MatGetLocalToGlobalMapping(mat,&rmapping,&cmapping);CHKERRQ(ierr); */
298   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
299   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
300   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
301   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
302   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
303   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
304 
305   /* map indices of local mat rows to global values */
306   ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&global_indices_rows);CHKERRQ(ierr);
307 
308   if (issbaij) {
309     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
310   }
311 
312   if (reuse == MAT_INITIAL_MATRIX) {
313     MatType     new_mat_type;
314     PetscBool   issbaij_red;
315 
316     /* determining new matrix type */
317     ierr = MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
318     if (issbaij_red) {
319       new_mat_type = MATSBAIJ;
320     } else {
321       if (bs>1) {
322         new_mat_type = MATBAIJ;
323       } else {
324         new_mat_type = MATAIJ;
325       }
326     }
327 
328     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
329     ierr = MatSetSizes(*M,PETSC_DECIDE,PETSC_DECIDE,rows,cols);CHKERRQ(ierr);
330     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
331     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
332     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
333   } else {
334     PetscInt mbs,mrows,mcols;
335     /* some checks */
336     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
337     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
338     if (mrows != rows) {
339       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
340     }
341     if (mrows != rows) {
342       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
343     }
344     if (mbs != bs) {
345       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
346     }
347     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
348   }
349   /* set local to global mappings */
350   ierr = MatSetLocalToGlobalMapping(*M,matis->mapping,matis->mapping);CHKERRQ(ierr);
351 
352   /* Set values */
353   if (isdense) { /* special case for dense local matrices */
354     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
355     ierr = MatDenseGetArray(matis->A,&array);CHKERRQ(ierr);
356     ierr = MatSetValues(*M,local_rows,global_indices_rows,local_cols,global_indices_rows,array,ADD_VALUES);CHKERRQ(ierr);
357     ierr = MatDenseRestoreArray(matis->A,&array);CHKERRQ(ierr);
358   } else if (isseqaij) {
359     PetscInt  i,nvtxs,*xadj,*adjncy,*global_indices_cols;
360     PetscBool done;
361 
362     ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
363     if (!done) {
364       SETERRQ1(PetscObjectComm((PetscObject)matis->A),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
365     }
366     ierr = MatSeqAIJGetArray(matis->A,&array);CHKERRQ(ierr);
367     ierr = PetscMalloc1(xadj[nvtxs],&global_indices_cols);CHKERRQ(ierr);
368     ierr = ISLocalToGlobalMappingApply(matis->mapping,xadj[nvtxs],adjncy,global_indices_cols);CHKERRQ(ierr);
369     for (i=0;i<nvtxs;i++) {
370       PetscInt    row,*cols,ncols;
371       PetscScalar *mat_vals;
372 
373       row = global_indices_rows[i];
374       ncols = xadj[i+1]-xadj[i];
375       cols = global_indices_cols+xadj[i];
376       mat_vals = array+xadj[i];
377       ierr = MatSetValues(*M,1,&row,ncols,cols,mat_vals,ADD_VALUES);CHKERRQ(ierr);
378     }
379     ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
380     if (!done) {
381       SETERRQ1(PetscObjectComm((PetscObject)matis->A),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
382     }
383     ierr = MatSeqAIJRestoreArray(matis->A,&array);CHKERRQ(ierr);
384     ierr = PetscFree(global_indices_cols);CHKERRQ(ierr);
385   } else { /* very basic values insertion for all other matrix types */
386     PetscInt i;
387     for (i=0;i<local_rows;i++) {
388       PetscInt       j;
389       const PetscInt *local_indices;
390 
391       ierr = MatGetRow(matis->A,i,&j,&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr);
392       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices,array,ADD_VALUES);CHKERRQ(ierr);
393       ierr = MatRestoreRow(matis->A,i,&j,&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr);
394     }
395   }
396   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
397   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
398   if (isdense) {
399     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
400   }
401   if (issbaij) {
402     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
403   }
404   ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&global_indices_rows);CHKERRQ(ierr);
405   PetscFunctionReturn(0);
406 }
407 
408 #undef __FUNCT__
409 #define __FUNCT__ "MatISGetMPIXAIJ"
410 /*@
411     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
412 
413   Input Parameter:
414 .  mat - the matrix (should be of type MATIS)
415 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
416 
417   Output Parameter:
418 .  newmat - the matrix in AIJ format
419 
420   Level: developer
421 
422   Notes:
423 
424 .seealso: MATIS
425 @*/
426 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
427 {
428   PetscErrorCode ierr;
429 
430   PetscFunctionBegin;
431   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
432   PetscValidLogicalCollectiveEnum(mat,reuse,2);
433   PetscValidPointer(newmat,3);
434   if (reuse != MAT_INITIAL_MATRIX) {
435     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
436     PetscCheckSameComm(mat,1,*newmat,3);
437   }
438   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
439   PetscFunctionReturn(0);
440 }
441 
442 #undef __FUNCT__
443 #define __FUNCT__ "MatDuplicate_IS"
444 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
445 {
446   PetscErrorCode ierr;
447   Mat_IS         *matis = (Mat_IS*)(mat->data);
448   PetscInt       bs,m,n,M,N;
449   Mat            B,localmat;
450 
451   PetscFunctionBegin;
452   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
453   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
454   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
455   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,matis->mapping,&B);CHKERRQ(ierr);
456   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
457   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
458   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
459   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
460   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
461   *newmat = B;
462   PetscFunctionReturn(0);
463 }
464 
465 #undef __FUNCT__
466 #define __FUNCT__ "MatIsHermitian_IS"
467 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
468 {
469   PetscErrorCode ierr;
470   Mat_IS         *matis = (Mat_IS*)A->data;
471   PetscBool      local_sym;
472 
473   PetscFunctionBegin;
474   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
475   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
476   PetscFunctionReturn(0);
477 }
478 
479 #undef __FUNCT__
480 #define __FUNCT__ "MatIsSymmetric_IS"
481 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
482 {
483   PetscErrorCode ierr;
484   Mat_IS         *matis = (Mat_IS*)A->data;
485   PetscBool      local_sym;
486 
487   PetscFunctionBegin;
488   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
489   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
490   PetscFunctionReturn(0);
491 }
492 
493 #undef __FUNCT__
494 #define __FUNCT__ "MatDestroy_IS"
495 PetscErrorCode MatDestroy_IS(Mat A)
496 {
497   PetscErrorCode ierr;
498   Mat_IS         *b = (Mat_IS*)A->data;
499 
500   PetscFunctionBegin;
501   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
502   ierr = VecScatterDestroy(&b->ctx);CHKERRQ(ierr);
503   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
504   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
505   ierr = ISLocalToGlobalMappingDestroy(&b->mapping);CHKERRQ(ierr);
506   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
507   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
508   ierr = PetscFree(A->data);CHKERRQ(ierr);
509   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
510   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
511   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
512   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
513   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
514   PetscFunctionReturn(0);
515 }
516 
517 #undef __FUNCT__
518 #define __FUNCT__ "MatMult_IS"
519 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
520 {
521   PetscErrorCode ierr;
522   Mat_IS         *is  = (Mat_IS*)A->data;
523   PetscScalar    zero = 0.0;
524 
525   PetscFunctionBegin;
526   /*  scatter the global vector x into the local work vector */
527   ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
528   ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
529 
530   /* multiply the local matrix */
531   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
532 
533   /* scatter product back into global memory */
534   ierr = VecSet(y,zero);CHKERRQ(ierr);
535   ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
536   ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
537   PetscFunctionReturn(0);
538 }
539 
540 #undef __FUNCT__
541 #define __FUNCT__ "MatMultAdd_IS"
542 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
543 {
544   Vec            temp_vec;
545   PetscErrorCode ierr;
546 
547   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
548   if (v3 != v2) {
549     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
550     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
551   } else {
552     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
553     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
554     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
555     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
556     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
557   }
558   PetscFunctionReturn(0);
559 }
560 
561 #undef __FUNCT__
562 #define __FUNCT__ "MatMultTranspose_IS"
563 PetscErrorCode MatMultTranspose_IS(Mat A,Vec x,Vec y)
564 {
565   Mat_IS         *is = (Mat_IS*)A->data;
566   PetscErrorCode ierr;
567 
568   PetscFunctionBegin; /*  y = A' * x */
569   /*  scatter the global vector x into the local work vector */
570   ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
571   ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
572 
573   /* multiply the local matrix */
574   ierr = MatMultTranspose(is->A,is->x,is->y);CHKERRQ(ierr);
575 
576   /* scatter product back into global vector */
577   ierr = VecSet(y,0);CHKERRQ(ierr);
578   ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
579   ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
580   PetscFunctionReturn(0);
581 }
582 
583 #undef __FUNCT__
584 #define __FUNCT__ "MatMultTransposeAdd_IS"
585 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
586 {
587   Vec            temp_vec;
588   PetscErrorCode ierr;
589 
590   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
591   if (v3 != v2) {
592     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
593     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
594   } else {
595     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
596     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
597     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
598     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
599     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
600   }
601   PetscFunctionReturn(0);
602 }
603 
604 #undef __FUNCT__
605 #define __FUNCT__ "MatView_IS"
606 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
607 {
608   Mat_IS         *a = (Mat_IS*)A->data;
609   PetscErrorCode ierr;
610   PetscViewer    sviewer;
611 
612   PetscFunctionBegin;
613   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
614   ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
615   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
616   ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
617   PetscFunctionReturn(0);
618 }
619 
620 #undef __FUNCT__
621 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
622 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
623 {
624   PetscErrorCode ierr;
625   PetscInt       n,bs;
626   Mat_IS         *is = (Mat_IS*)A->data;
627   IS             from,to;
628   Vec            global;
629 
630   PetscFunctionBegin;
631   PetscCheckSameComm(A,1,rmapping,2);
632   if (rmapping != cmapping) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MATIS requires the row and column mappings to be identical");
633   if (is->mapping) { /* Currenly destroys the objects that will be created by this routine. Is there anything else that should be checked? */
634     ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr);
635     ierr = VecDestroy(&is->x);CHKERRQ(ierr);
636     ierr = VecDestroy(&is->y);CHKERRQ(ierr);
637     ierr = VecScatterDestroy(&is->ctx);CHKERRQ(ierr);
638     ierr = MatDestroy(&is->A);CHKERRQ(ierr);
639     ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
640     ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
641   }
642   ierr = PetscObjectReference((PetscObject)rmapping);CHKERRQ(ierr);
643   ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr);
644   is->mapping = rmapping;
645 /*
646   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
647   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
648 */
649 
650   /* Create the local matrix A */
651   ierr = ISLocalToGlobalMappingGetSize(rmapping,&n);CHKERRQ(ierr);
652   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&bs);CHKERRQ(ierr);
653   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
654   if (bs > 1) {
655     ierr = MatSetType(is->A,MATSEQBAIJ);CHKERRQ(ierr);
656   } else {
657     ierr = MatSetType(is->A,MATSEQAIJ);CHKERRQ(ierr);
658   }
659   ierr = MatSetSizes(is->A,n,n,n,n);CHKERRQ(ierr);
660   ierr = MatSetBlockSize(is->A,bs);CHKERRQ(ierr);
661   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
662   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
663   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
664 
665   /* Create the local work vectors */
666   ierr = VecCreate(PETSC_COMM_SELF,&is->x);CHKERRQ(ierr);
667   ierr = VecSetBlockSize(is->x,bs);CHKERRQ(ierr);
668   ierr = VecSetSizes(is->x,n,n);CHKERRQ(ierr);
669   ierr = VecSetOptionsPrefix(is->x,((PetscObject)A)->prefix);CHKERRQ(ierr);
670   ierr = VecAppendOptionsPrefix(is->x,"is_");CHKERRQ(ierr);
671   ierr = VecSetFromOptions(is->x);CHKERRQ(ierr);
672   ierr = VecDuplicate(is->x,&is->y);CHKERRQ(ierr);
673 
674   /* setup the global to local scatter */
675   ierr = ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);CHKERRQ(ierr);
676   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
677   ierr = MatCreateVecs(A,&global,NULL);CHKERRQ(ierr);
678   ierr = VecScatterCreate(global,from,is->x,to,&is->ctx);CHKERRQ(ierr);
679   ierr = VecDestroy(&global);CHKERRQ(ierr);
680   ierr = ISDestroy(&to);CHKERRQ(ierr);
681   ierr = ISDestroy(&from);CHKERRQ(ierr);
682   PetscFunctionReturn(0);
683 }
684 
685 #undef __FUNCT__
686 #define __FUNCT__ "MatSetValues_IS"
687 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
688 {
689   Mat_IS         *is = (Mat_IS*)mat->data;
690   PetscInt       rows_l[2048],cols_l[2048];
691   PetscErrorCode ierr;
692 
693   PetscFunctionBegin;
694 #if defined(PETSC_USE_DEBUG)
695   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
696 #endif
697   ierr = ISG2LMapApply(is->mapping,m,rows,rows_l);CHKERRQ(ierr);
698   ierr = ISG2LMapApply(is->mapping,n,cols,cols_l);CHKERRQ(ierr);
699   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
700   PetscFunctionReturn(0);
701 }
702 
703 #undef ISG2LMapSetUp
704 #undef ISG2LMapApply
705 
706 #undef __FUNCT__
707 #define __FUNCT__ "MatSetValuesLocal_IS"
708 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
709 {
710   PetscErrorCode ierr;
711   Mat_IS         *is = (Mat_IS*)A->data;
712 
713   PetscFunctionBegin;
714   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
715   PetscFunctionReturn(0);
716 }
717 
718 #undef __FUNCT__
719 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
720 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
721 {
722   PetscErrorCode ierr;
723   Mat_IS         *is = (Mat_IS*)A->data;
724 
725   PetscFunctionBegin;
726   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
727   PetscFunctionReturn(0);
728 }
729 
730 #undef __FUNCT__
731 #define __FUNCT__ "MatZeroRows_IS"
732 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
733 {
734   Mat_IS         *is = (Mat_IS*)A->data;
735   PetscInt       n_l = 0, *rows_l = NULL;
736   PetscErrorCode ierr;
737 
738   PetscFunctionBegin;
739   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
740   if (n) {
741     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
742     ierr = ISGlobalToLocalMappingApply(is->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
743   }
744   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
745   ierr = PetscFree(rows_l);CHKERRQ(ierr);
746   PetscFunctionReturn(0);
747 }
748 
749 #undef __FUNCT__
750 #define __FUNCT__ "MatZeroRowsLocal_IS"
751 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
752 {
753   Mat_IS         *is = (Mat_IS*)A->data;
754   PetscErrorCode ierr;
755   PetscInt       i;
756   PetscScalar    *array;
757 
758   PetscFunctionBegin;
759   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
760   {
761     /*
762        Set up is->x as a "counting vector". This is in order to MatMult_IS
763        work properly in the interface nodes.
764     */
765     Vec         counter;
766     PetscScalar one=1.0, zero=0.0;
767     ierr = MatCreateVecs(A,&counter,NULL);CHKERRQ(ierr);
768     ierr = VecSet(counter,zero);CHKERRQ(ierr);
769     ierr = VecSet(is->x,one);CHKERRQ(ierr);
770     ierr = VecScatterBegin(is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
771     ierr = VecScatterEnd  (is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
772     ierr = VecScatterBegin(is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
773     ierr = VecScatterEnd  (is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
774     ierr = VecDestroy(&counter);CHKERRQ(ierr);
775   }
776   if (!n) {
777     is->pure_neumann = PETSC_TRUE;
778   } else {
779     is->pure_neumann = PETSC_FALSE;
780 
781     ierr = VecGetArray(is->x,&array);CHKERRQ(ierr);
782     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
783     for (i=0; i<n; i++) {
784       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
785     }
786     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
787     ierr = MatAssemblyEnd  (is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
788     ierr = VecRestoreArray(is->x,&array);CHKERRQ(ierr);
789   }
790   PetscFunctionReturn(0);
791 }
792 
793 #undef __FUNCT__
794 #define __FUNCT__ "MatAssemblyBegin_IS"
795 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
796 {
797   Mat_IS         *is = (Mat_IS*)A->data;
798   PetscErrorCode ierr;
799 
800   PetscFunctionBegin;
801   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
802   PetscFunctionReturn(0);
803 }
804 
805 #undef __FUNCT__
806 #define __FUNCT__ "MatAssemblyEnd_IS"
807 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
808 {
809   Mat_IS         *is = (Mat_IS*)A->data;
810   PetscErrorCode ierr;
811 
812   PetscFunctionBegin;
813   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
814   PetscFunctionReturn(0);
815 }
816 
817 #undef __FUNCT__
818 #define __FUNCT__ "MatISGetLocalMat_IS"
819 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
820 {
821   Mat_IS *is = (Mat_IS*)mat->data;
822 
823   PetscFunctionBegin;
824   *local = is->A;
825   PetscFunctionReturn(0);
826 }
827 
828 #undef __FUNCT__
829 #define __FUNCT__ "MatISGetLocalMat"
830 /*@
831     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
832 
833   Input Parameter:
834 .  mat - the matrix
835 
836   Output Parameter:
837 .  local - the local matrix usually MATSEQAIJ
838 
839   Level: advanced
840 
841   Notes:
842     This can be called if you have precomputed the nonzero structure of the
843   matrix and want to provide it to the inner matrix object to improve the performance
844   of the MatSetValues() operation.
845 
846 .seealso: MATIS
847 @*/
848 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
849 {
850   PetscErrorCode ierr;
851 
852   PetscFunctionBegin;
853   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
854   PetscValidPointer(local,2);
855   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
856   PetscFunctionReturn(0);
857 }
858 
859 #undef __FUNCT__
860 #define __FUNCT__ "MatISSetLocalMat_IS"
861 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
862 {
863   Mat_IS         *is = (Mat_IS*)mat->data;
864   PetscInt       nrows,ncols,orows,ocols;
865   PetscErrorCode ierr;
866 
867   PetscFunctionBegin;
868   if (is->A) {
869     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
870     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
871     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
872   }
873   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
874   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
875   is->A = local;
876   PetscFunctionReturn(0);
877 }
878 
879 #undef __FUNCT__
880 #define __FUNCT__ "MatISSetLocalMat"
881 /*@
882     MatISSetLocalMat - Set the local matrix stored inside a MATIS matrix.
883 
884   Input Parameter:
885 .  mat - the matrix
886 .  local - the local matrix usually MATSEQAIJ
887 
888   Output Parameter:
889 
890   Level: advanced
891 
892   Notes:
893     This can be called if you have precomputed the local matrix and
894   want to provide it to the matrix object MATIS.
895 
896 .seealso: MATIS
897 @*/
898 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
899 {
900   PetscErrorCode ierr;
901 
902   PetscFunctionBegin;
903   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
904   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
905   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
906   PetscFunctionReturn(0);
907 }
908 
909 #undef __FUNCT__
910 #define __FUNCT__ "MatZeroEntries_IS"
911 PetscErrorCode MatZeroEntries_IS(Mat A)
912 {
913   Mat_IS         *a = (Mat_IS*)A->data;
914   PetscErrorCode ierr;
915 
916   PetscFunctionBegin;
917   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
918   PetscFunctionReturn(0);
919 }
920 
921 #undef __FUNCT__
922 #define __FUNCT__ "MatScale_IS"
923 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
924 {
925   Mat_IS         *is = (Mat_IS*)A->data;
926   PetscErrorCode ierr;
927 
928   PetscFunctionBegin;
929   ierr = MatScale(is->A,a);CHKERRQ(ierr);
930   PetscFunctionReturn(0);
931 }
932 
933 #undef __FUNCT__
934 #define __FUNCT__ "MatGetDiagonal_IS"
935 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
936 {
937   Mat_IS         *is = (Mat_IS*)A->data;
938   PetscErrorCode ierr;
939 
940   PetscFunctionBegin;
941   /* get diagonal of the local matrix */
942   ierr = MatGetDiagonal(is->A,is->x);CHKERRQ(ierr);
943 
944   /* scatter diagonal back into global vector */
945   ierr = VecSet(v,0);CHKERRQ(ierr);
946   ierr = VecScatterBegin(is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
947   ierr = VecScatterEnd  (is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
948   PetscFunctionReturn(0);
949 }
950 
951 #undef __FUNCT__
952 #define __FUNCT__ "MatSetOption_IS"
953 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
954 {
955   Mat_IS         *a = (Mat_IS*)A->data;
956   PetscErrorCode ierr;
957 
958   PetscFunctionBegin;
959   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
960   PetscFunctionReturn(0);
961 }
962 
963 #undef __FUNCT__
964 #define __FUNCT__ "MatCreateIS"
965 /*@
966     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
967        process but not across processes.
968 
969    Input Parameters:
970 +     comm - MPI communicator that will share the matrix
971 .     bs - local and global block size of the matrix
972 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
973 -     map - mapping that defines the global number for each local number
974 
975    Output Parameter:
976 .    A - the resulting matrix
977 
978    Level: advanced
979 
980    Notes: See MATIS for more details
981           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
982           by that process. m + nghosts (or n + nghosts) is the length of map since map maps all local points
983           plus the ghost points to global indices.
984 
985 .seealso: MATIS, MatSetLocalToGlobalMapping()
986 @*/
987 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping map,Mat *A)
988 {
989   PetscErrorCode ierr;
990 
991   PetscFunctionBegin;
992   ierr = MatCreate(comm,A);CHKERRQ(ierr);
993   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
994   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
995   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
996   ierr = MatSetUp(*A);CHKERRQ(ierr);
997   ierr = MatSetLocalToGlobalMapping(*A,map,map);CHKERRQ(ierr);
998   PetscFunctionReturn(0);
999 }
1000 
1001 /*MC
1002    MATIS - MATIS = "is" - A matrix type to be used for using the Neumann-Neumann type preconditioners, see PCBDDC.
1003    This stores the matrices in globally unassembled form. Each processor
1004    assembles only its local Neumann problem and the parallel matrix vector
1005    product is handled "implicitly".
1006 
1007    Operations Provided:
1008 +  MatMult()
1009 .  MatMultAdd()
1010 .  MatMultTranspose()
1011 .  MatMultTransposeAdd()
1012 .  MatZeroEntries()
1013 .  MatSetOption()
1014 .  MatZeroRows()
1015 .  MatZeroRowsLocal()
1016 .  MatSetValues()
1017 .  MatSetValuesLocal()
1018 .  MatScale()
1019 .  MatGetDiagonal()
1020 -  MatSetLocalToGlobalMapping()
1021 
1022    Options Database Keys:
1023 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1024 
1025    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1026 
1027           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1028 
1029           You can do matrix preallocation on the local matrix after you obtain it with
1030           MatISGetLocalMat()
1031 
1032   Level: advanced
1033 
1034 .seealso: PC, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), PCBDDC
1035 
1036 M*/
1037 
1038 #undef __FUNCT__
1039 #define __FUNCT__ "MatCreate_IS"
1040 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1041 {
1042   PetscErrorCode ierr;
1043   Mat_IS         *b;
1044 
1045   PetscFunctionBegin;
1046   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1047   A->data = (void*)b;
1048   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1049 
1050   A->ops->mult                    = MatMult_IS;
1051   A->ops->multadd                 = MatMultAdd_IS;
1052   A->ops->multtranspose           = MatMultTranspose_IS;
1053   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1054   A->ops->destroy                 = MatDestroy_IS;
1055   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1056   A->ops->setvalues               = MatSetValues_IS;
1057   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1058   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1059   A->ops->zerorows                = MatZeroRows_IS;
1060   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1061   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1062   A->ops->assemblyend             = MatAssemblyEnd_IS;
1063   A->ops->view                    = MatView_IS;
1064   A->ops->zeroentries             = MatZeroEntries_IS;
1065   A->ops->scale                   = MatScale_IS;
1066   A->ops->getdiagonal             = MatGetDiagonal_IS;
1067   A->ops->setoption               = MatSetOption_IS;
1068   A->ops->ishermitian             = MatIsHermitian_IS;
1069   A->ops->issymmetric             = MatIsSymmetric_IS;
1070   A->ops->duplicate               = MatDuplicate_IS;
1071 
1072   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1073   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1074 
1075   /* zeros pointer */
1076   b->A           = 0;
1077   b->ctx         = 0;
1078   b->x           = 0;
1079   b->y           = 0;
1080   b->mapping     = 0;
1081   b->sf          = 0;
1082   b->sf_rootdata = 0;
1083   b->sf_leafdata = 0;
1084 
1085   /* special MATIS functions */
1086   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1087   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1088   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1089   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1090   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1091   PetscFunctionReturn(0);
1092 }
1093