xref: /petsc/src/mat/impls/is/matis.c (revision 3bbff08aae55fef3c6bc0ec24cdbd560153bb73a)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 static PetscErrorCode MatISComputeSF_Private(Mat);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatISComputeSF_Private"
21 static PetscErrorCode MatISComputeSF_Private(Mat B)
22 {
23   Mat_IS         *matis = (Mat_IS*)(B->data);
24   const PetscInt *gidxs;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
29   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
30   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
31   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
32   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
33   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
34   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
35   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatISSetPreallocation"
41 /*@
42    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
43 
44    Collective on MPI_Comm
45 
46    Input Parameters:
47 +  B - the matrix
48 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
49            (same value is used for all local rows)
50 .  d_nnz - array containing the number of nonzeros in the various rows of the
51            DIAGONAL portion of the local submatrix (possibly different for each row)
52            or NULL, if d_nz is used to specify the nonzero structure.
53            The size of this array is equal to the number of local rows, i.e 'm'.
54            For matrices that will be factored, you must leave room for (and set)
55            the diagonal entry even if it is zero.
56 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
57            submatrix (same value is used for all local rows).
58 -  o_nnz - array containing the number of nonzeros in the various rows of the
59            OFF-DIAGONAL portion of the local submatrix (possibly different for
60            each row) or NULL, if o_nz is used to specify the nonzero
61            structure. The size of this array is equal to the number
62            of local rows, i.e 'm'.
63 
64    If the *_nnz parameter is given then the *_nz parameter is ignored
65 
66    Level: intermediate
67 
68    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
69           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
70           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
71 
72 .keywords: matrix
73 
74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
75 @*/
76 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
77 {
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
82   PetscValidType(B,1);
83   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatISSetPreallocation_IS"
89 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
90 {
91   Mat_IS         *matis = (Mat_IS*)(B->data);
92   PetscInt       bs,i,nlocalcols;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   if (!matis->A) {
97     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
98   }
99   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
100     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
101   }
102   if (!d_nnz) {
103     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
104   } else {
105     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
106   }
107   if (!o_nnz) {
108     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
109   } else {
110     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
111   }
112   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
113   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
114   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
115   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
116   for (i=0;i<matis->sf_nleaves;i++) {
117     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
118   }
119   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
120   for (i=0;i<matis->sf_nleaves/bs;i++) {
121     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
122   }
123   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
124   for (i=0;i<matis->sf_nleaves/bs;i++) {
125     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
126   }
127   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
133 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
134 {
135   Mat_IS          *matis = (Mat_IS*)(A->data);
136   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
137   const PetscInt* global_indices;
138   PetscInt        i,j,bs,rows,cols;
139   PetscInt        lrows,lcols;
140   PetscInt        local_rows,local_cols;
141   PetscMPIInt     nsubdomains;
142   PetscBool       isdense,issbaij;
143   PetscErrorCode  ierr;
144 
145   PetscFunctionBegin;
146   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
147   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
148   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
149   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
150   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
151   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
152   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices);CHKERRQ(ierr);
153   if (issbaij) {
154     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
155   }
156   /*
157      An SF reduce is needed to sum up properly on shared interface dofs.
158      Note that generally preallocation is not exact, since it overestimates nonzeros
159   */
160   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
161     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
162   }
163   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
164   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
165   /* All processes need to compute entire row ownership */
166   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
167   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
168   for (i=0;i<nsubdomains;i++) {
169     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
170       row_ownership[j]=i;
171     }
172   }
173 
174   /*
175      my_dnz and my_onz contains exact contribution to preallocation from each local mat
176      then, they will be summed up properly. This way, preallocation is always sufficient
177   */
178   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
179   /* preallocation as a MATAIJ */
180   if (isdense) { /* special case for dense local matrices */
181     for (i=0;i<local_rows;i++) {
182       PetscInt index_row = global_indices[i];
183       for (j=i;j<local_rows;j++) {
184         PetscInt owner = row_ownership[index_row];
185         PetscInt index_col = global_indices[j];
186         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
187           my_dnz[i] += 1;
188         } else { /* offdiag block */
189           my_onz[i] += 1;
190         }
191         /* same as before, interchanging rows and cols */
192         if (i != j) {
193           owner = row_ownership[index_col];
194           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
195             my_dnz[j] += 1;
196           } else {
197             my_onz[j] += 1;
198           }
199         }
200       }
201     }
202   } else {
203     for (i=0;i<local_rows;i++) {
204       const PetscInt *cols;
205       PetscInt       ncols,index_row = global_indices[i];
206       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
207       for (j=0;j<ncols;j++) {
208         PetscInt owner = row_ownership[index_row];
209         PetscInt index_col = global_indices[cols[j]];
210         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
211           my_dnz[i] += 1;
212         } else { /* offdiag block */
213           my_onz[i] += 1;
214         }
215         /* same as before, interchanging rows and cols */
216         if (issbaij && index_col != index_row) {
217           owner = row_ownership[index_col];
218           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
219             my_dnz[cols[j]] += 1;
220           } else {
221             my_onz[cols[j]] += 1;
222           }
223         }
224       }
225       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
226     }
227   }
228   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices);CHKERRQ(ierr);
229   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
230   if (maxreduce) {
231     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
232     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
233     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
234     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
235   } else {
236     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
237     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
238     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
239     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
240   }
241   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
242 
243   /* Resize preallocation if overestimated */
244   for (i=0;i<lrows;i++) {
245     dnz[i] = PetscMin(dnz[i],lcols);
246     onz[i] = PetscMin(onz[i],cols-lcols);
247   }
248   /* set preallocation */
249   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
250   for (i=0;i<lrows/bs;i++) {
251     dnz[i] = dnz[i*bs]/bs;
252     onz[i] = onz[i*bs]/bs;
253   }
254   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
255   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
256   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
257   if (issbaij) {
258     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
259   }
260   PetscFunctionReturn(0);
261 }
262 
263 #undef __FUNCT__
264 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
265 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
266 {
267   Mat_IS         *matis = (Mat_IS*)(mat->data);
268   Mat            local_mat;
269   /* info on mat */
270   PetscInt       bs,rows,cols,lrows,lcols;
271   PetscInt       local_rows,local_cols;
272   PetscBool      isdense,issbaij,isseqaij;
273   const PetscInt *global_indices_rows;
274   PetscMPIInt    nsubdomains;
275   /* values insertion */
276   PetscScalar    *array;
277   /* work */
278   PetscErrorCode ierr;
279 
280   PetscFunctionBegin;
281   /* MISSING CHECKS
282     - rectangular case not covered (it is not allowed by MATIS)
283   */
284   /* get info from mat */
285   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
286   if (nsubdomains == 1) {
287     if (reuse == MAT_INITIAL_MATRIX) {
288       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
289     } else {
290       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
291     }
292     PetscFunctionReturn(0);
293   }
294   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
295   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
296   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
297   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
298   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
299   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
300   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
301 
302   /* map indices of local mat rows to global values */
303   ierr = ISLocalToGlobalMappingGetIndices(mat->rmap->mapping,&global_indices_rows);CHKERRQ(ierr);
304 
305   if (reuse == MAT_INITIAL_MATRIX) {
306     MatType     new_mat_type;
307     PetscBool   issbaij_red;
308 
309     /* determining new matrix type */
310     ierr = MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
311     if (issbaij_red) {
312       new_mat_type = MATSBAIJ;
313     } else {
314       if (bs>1) {
315         new_mat_type = MATBAIJ;
316       } else {
317         new_mat_type = MATAIJ;
318       }
319     }
320 
321     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
322     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
323     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
324     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
325     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
326   } else {
327     PetscInt mbs,mrows,mcols,mlrows,mlcols;
328     /* some checks */
329     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
330     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
331     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
332     if (mrows != rows) {
333       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
334     }
335     if (mcols != cols) {
336       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
337     }
338     if (mlrows != lrows) {
339       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
340     }
341     if (mlcols != lcols) {
342       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
343     }
344     if (mbs != bs) {
345       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
346     }
347     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
348   }
349 
350   if (issbaij) {
351     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
352   } else {
353     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
354     local_mat = matis->A;
355   }
356 
357   /* Set values */
358   if (isdense) { /* special case for dense local matrices */
359     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
360     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
361     ierr = MatSetValues(*M,local_rows,global_indices_rows,local_cols,global_indices_rows,array,ADD_VALUES);CHKERRQ(ierr);
362     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
363   } else if (isseqaij) {
364     PetscInt  i,nvtxs,*xadj,*adjncy,*global_indices_cols;
365     PetscBool done;
366 
367     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
368     if (!done) {
369       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
370     }
371     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
372     ierr = PetscMalloc1(xadj[nvtxs],&global_indices_cols);CHKERRQ(ierr);
373     ierr = ISLocalToGlobalMappingApply(mat->rmap->mapping,xadj[nvtxs],adjncy,global_indices_cols);CHKERRQ(ierr);
374     for (i=0;i<nvtxs;i++) {
375       PetscInt    row,*cols,ncols;
376       PetscScalar *mat_vals;
377 
378       row = global_indices_rows[i];
379       ncols = xadj[i+1]-xadj[i];
380       cols = global_indices_cols+xadj[i];
381       mat_vals = array+xadj[i];
382       ierr = MatSetValues(*M,1,&row,ncols,cols,mat_vals,ADD_VALUES);CHKERRQ(ierr);
383     }
384     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
385     if (!done) {
386       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
387     }
388     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
389     ierr = PetscFree(global_indices_cols);CHKERRQ(ierr);
390   } else { /* very basic values insertion for all other matrix types */
391     PetscInt i,*global_indices_cols;
392 
393     ierr = PetscMalloc1(local_cols,&global_indices_cols);CHKERRQ(ierr);
394     for (i=0;i<local_rows;i++) {
395       PetscInt       j;
396       const PetscInt *local_indices;
397 
398       ierr = MatGetRow(local_mat,i,&j,&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr);
399       ierr = ISLocalToGlobalMappingApply(mat->rmap->mapping,j,local_indices,global_indices_cols);CHKERRQ(ierr);
400       ierr = MatSetValues(*M,1,&global_indices_rows[i],j,global_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
401       ierr = MatRestoreRow(local_mat,i,&j,&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr);
402     }
403     ierr = PetscFree(global_indices_cols);CHKERRQ(ierr);
404   }
405   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
406   ierr = ISLocalToGlobalMappingRestoreIndices(mat->rmap->mapping,&global_indices_rows);CHKERRQ(ierr);
407   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
408   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
409   if (isdense) {
410     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
411   }
412   PetscFunctionReturn(0);
413 }
414 
415 #undef __FUNCT__
416 #define __FUNCT__ "MatISGetMPIXAIJ"
417 /*@
418     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
419 
420   Input Parameter:
421 .  mat - the matrix (should be of type MATIS)
422 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
423 
424   Output Parameter:
425 .  newmat - the matrix in AIJ format
426 
427   Level: developer
428 
429   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
430 
431 .seealso: MATIS
432 @*/
433 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
439   PetscValidLogicalCollectiveEnum(mat,reuse,2);
440   PetscValidPointer(newmat,3);
441   if (reuse != MAT_INITIAL_MATRIX) {
442     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
443     PetscCheckSameComm(mat,1,*newmat,3);
444     if (mat == *newmat) {
445       SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
446     }
447   }
448   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
449   PetscFunctionReturn(0);
450 }
451 
452 #undef __FUNCT__
453 #define __FUNCT__ "MatDuplicate_IS"
454 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
455 {
456   PetscErrorCode ierr;
457   Mat_IS         *matis = (Mat_IS*)(mat->data);
458   PetscInt       bs,m,n,M,N;
459   Mat            B,localmat;
460 
461   PetscFunctionBegin;
462   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
463   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
464   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
465   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,&B);CHKERRQ(ierr);
466   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
467   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
468   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
469   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
470   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
471   *newmat = B;
472   PetscFunctionReturn(0);
473 }
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "MatIsHermitian_IS"
477 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
478 {
479   PetscErrorCode ierr;
480   Mat_IS         *matis = (Mat_IS*)A->data;
481   PetscBool      local_sym;
482 
483   PetscFunctionBegin;
484   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
485   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
486   PetscFunctionReturn(0);
487 }
488 
489 #undef __FUNCT__
490 #define __FUNCT__ "MatIsSymmetric_IS"
491 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
492 {
493   PetscErrorCode ierr;
494   Mat_IS         *matis = (Mat_IS*)A->data;
495   PetscBool      local_sym;
496 
497   PetscFunctionBegin;
498   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
499   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
500   PetscFunctionReturn(0);
501 }
502 
503 #undef __FUNCT__
504 #define __FUNCT__ "MatDestroy_IS"
505 PetscErrorCode MatDestroy_IS(Mat A)
506 {
507   PetscErrorCode ierr;
508   Mat_IS         *b = (Mat_IS*)A->data;
509 
510   PetscFunctionBegin;
511   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
512   ierr = VecScatterDestroy(&b->ctx);CHKERRQ(ierr);
513   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
514   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
515   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
516   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
517   ierr = PetscFree(A->data);CHKERRQ(ierr);
518   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
519   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
520   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
521   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
522   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
523   PetscFunctionReturn(0);
524 }
525 
526 #undef __FUNCT__
527 #define __FUNCT__ "MatMult_IS"
528 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
529 {
530   PetscErrorCode ierr;
531   Mat_IS         *is  = (Mat_IS*)A->data;
532   PetscScalar    zero = 0.0;
533 
534   PetscFunctionBegin;
535   /*  scatter the global vector x into the local work vector */
536   ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
537   ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
538 
539   /* multiply the local matrix */
540   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
541 
542   /* scatter product back into global memory */
543   ierr = VecSet(y,zero);CHKERRQ(ierr);
544   ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
545   ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
546   PetscFunctionReturn(0);
547 }
548 
549 #undef __FUNCT__
550 #define __FUNCT__ "MatMultAdd_IS"
551 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
552 {
553   Vec            temp_vec;
554   PetscErrorCode ierr;
555 
556   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
557   if (v3 != v2) {
558     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
559     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
560   } else {
561     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
562     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
563     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
564     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
565     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
566   }
567   PetscFunctionReturn(0);
568 }
569 
570 #undef __FUNCT__
571 #define __FUNCT__ "MatMultTranspose_IS"
572 PetscErrorCode MatMultTranspose_IS(Mat A,Vec x,Vec y)
573 {
574   Mat_IS         *is = (Mat_IS*)A->data;
575   PetscErrorCode ierr;
576 
577   PetscFunctionBegin; /*  y = A' * x */
578   /*  scatter the global vector x into the local work vector */
579   ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
580   ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
581 
582   /* multiply the local matrix */
583   ierr = MatMultTranspose(is->A,is->x,is->y);CHKERRQ(ierr);
584 
585   /* scatter product back into global vector */
586   ierr = VecSet(y,0);CHKERRQ(ierr);
587   ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
588   ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
589   PetscFunctionReturn(0);
590 }
591 
592 #undef __FUNCT__
593 #define __FUNCT__ "MatMultTransposeAdd_IS"
594 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
595 {
596   Vec            temp_vec;
597   PetscErrorCode ierr;
598 
599   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
600   if (v3 != v2) {
601     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
602     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
603   } else {
604     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
605     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
606     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
607     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
608     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
609   }
610   PetscFunctionReturn(0);
611 }
612 
613 #undef __FUNCT__
614 #define __FUNCT__ "MatView_IS"
615 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
616 {
617   Mat_IS         *a = (Mat_IS*)A->data;
618   PetscErrorCode ierr;
619   PetscViewer    sviewer;
620 
621   PetscFunctionBegin;
622   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
623   ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
624   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
625   ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
626   PetscFunctionReturn(0);
627 }
628 
629 #undef __FUNCT__
630 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
631 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
632 {
633   PetscErrorCode ierr;
634   PetscInt       n,bs;
635   Mat_IS         *is = (Mat_IS*)A->data;
636   IS             from,to;
637   Vec            global;
638 
639   PetscFunctionBegin;
640   PetscCheckSameComm(A,1,rmapping,2);
641   if (rmapping != cmapping) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MATIS requires the row and column mappings to be identical");
642   /* Destroy any previous data */
643   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
644   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
645   ierr = VecScatterDestroy(&is->ctx);CHKERRQ(ierr);
646   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
647   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
648   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
649 
650   /* Setup Layout and set local to global maps */
651   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
652   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
653   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
654   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
655 
656   /* Create the local matrix A */
657   ierr = ISLocalToGlobalMappingGetSize(rmapping,&n);CHKERRQ(ierr);
658   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&bs);CHKERRQ(ierr);
659   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
660   if (bs > 1) {
661     ierr = MatSetType(is->A,MATSEQBAIJ);CHKERRQ(ierr);
662   } else {
663     ierr = MatSetType(is->A,MATSEQAIJ);CHKERRQ(ierr);
664   }
665   ierr = MatSetSizes(is->A,n,n,n,n);CHKERRQ(ierr);
666   ierr = MatSetBlockSize(is->A,bs);CHKERRQ(ierr);
667   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
668   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
669   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
670 
671   /* Create the local work vectors */
672   ierr = MatSetUp(is->A);CHKERRQ(ierr);
673   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
674 
675   /* setup the global to local scatter */
676   ierr = ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);CHKERRQ(ierr);
677   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
678   ierr = MatCreateVecs(A,&global,NULL);CHKERRQ(ierr);
679   ierr = VecScatterCreate(global,from,is->x,to,&is->ctx);CHKERRQ(ierr);
680   ierr = VecDestroy(&global);CHKERRQ(ierr);
681   ierr = ISDestroy(&to);CHKERRQ(ierr);
682   ierr = ISDestroy(&from);CHKERRQ(ierr);
683   PetscFunctionReturn(0);
684 }
685 
686 #undef __FUNCT__
687 #define __FUNCT__ "MatSetValues_IS"
688 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
689 {
690   Mat_IS         *is = (Mat_IS*)mat->data;
691   PetscInt       rows_l[2048],cols_l[2048];
692   PetscErrorCode ierr;
693 
694   PetscFunctionBegin;
695 #if defined(PETSC_USE_DEBUG)
696   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
697 #endif
698   ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
699   ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
700   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
701   PetscFunctionReturn(0);
702 }
703 
704 #undef __FUNCT__
705 #define __FUNCT__ "MatSetValuesLocal_IS"
706 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
707 {
708   PetscErrorCode ierr;
709   Mat_IS         *is = (Mat_IS*)A->data;
710 
711   PetscFunctionBegin;
712   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
713   PetscFunctionReturn(0);
714 }
715 
716 #undef __FUNCT__
717 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
718 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
719 {
720   PetscErrorCode ierr;
721   Mat_IS         *is = (Mat_IS*)A->data;
722 
723   PetscFunctionBegin;
724   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
725   PetscFunctionReturn(0);
726 }
727 
728 #undef __FUNCT__
729 #define __FUNCT__ "MatZeroRows_IS"
730 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
731 {
732   PetscInt       n_l = 0, *rows_l = NULL;
733   PetscErrorCode ierr;
734 
735   PetscFunctionBegin;
736   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
737   if (n) {
738     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
739     ierr = ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
740   }
741   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
742   ierr = PetscFree(rows_l);CHKERRQ(ierr);
743   PetscFunctionReturn(0);
744 }
745 
746 #undef __FUNCT__
747 #define __FUNCT__ "MatZeroRowsLocal_IS"
748 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
749 {
750   Mat_IS         *is = (Mat_IS*)A->data;
751   PetscErrorCode ierr;
752   PetscInt       i;
753   PetscScalar    *array;
754 
755   PetscFunctionBegin;
756   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
757   {
758     /*
759        Set up is->x as a "counting vector". This is in order to MatMult_IS
760        work properly in the interface nodes.
761     */
762     Vec         counter;
763     PetscScalar one=1.0, zero=0.0;
764     ierr = MatCreateVecs(A,&counter,NULL);CHKERRQ(ierr);
765     ierr = VecSet(counter,zero);CHKERRQ(ierr);
766     ierr = VecSet(is->x,one);CHKERRQ(ierr);
767     ierr = VecScatterBegin(is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
768     ierr = VecScatterEnd(is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
769     ierr = VecScatterBegin(is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
770     ierr = VecScatterEnd(is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
771     ierr = VecDestroy(&counter);CHKERRQ(ierr);
772   }
773   if (!n) {
774     is->pure_neumann = PETSC_TRUE;
775   } else {
776     is->pure_neumann = PETSC_FALSE;
777 
778     ierr = VecGetArray(is->x,&array);CHKERRQ(ierr);
779     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
780     for (i=0; i<n; i++) {
781       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
782     }
783     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
784     ierr = MatAssemblyEnd  (is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
785     ierr = VecRestoreArray(is->x,&array);CHKERRQ(ierr);
786   }
787   PetscFunctionReturn(0);
788 }
789 
790 #undef __FUNCT__
791 #define __FUNCT__ "MatAssemblyBegin_IS"
792 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
793 {
794   Mat_IS         *is = (Mat_IS*)A->data;
795   PetscErrorCode ierr;
796 
797   PetscFunctionBegin;
798   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
799   PetscFunctionReturn(0);
800 }
801 
802 #undef __FUNCT__
803 #define __FUNCT__ "MatAssemblyEnd_IS"
804 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
805 {
806   Mat_IS         *is = (Mat_IS*)A->data;
807   PetscErrorCode ierr;
808 
809   PetscFunctionBegin;
810   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
811   PetscFunctionReturn(0);
812 }
813 
814 #undef __FUNCT__
815 #define __FUNCT__ "MatISGetLocalMat_IS"
816 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
817 {
818   Mat_IS *is = (Mat_IS*)mat->data;
819 
820   PetscFunctionBegin;
821   *local = is->A;
822   PetscFunctionReturn(0);
823 }
824 
825 #undef __FUNCT__
826 #define __FUNCT__ "MatISGetLocalMat"
827 /*@
828     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
829 
830   Input Parameter:
831 .  mat - the matrix
832 
833   Output Parameter:
834 .  local - the local matrix
835 
836   Level: advanced
837 
838   Notes:
839     This can be called if you have precomputed the nonzero structure of the
840   matrix and want to provide it to the inner matrix object to improve the performance
841   of the MatSetValues() operation.
842 
843 .seealso: MATIS
844 @*/
845 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
846 {
847   PetscErrorCode ierr;
848 
849   PetscFunctionBegin;
850   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
851   PetscValidPointer(local,2);
852   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
853   PetscFunctionReturn(0);
854 }
855 
856 #undef __FUNCT__
857 #define __FUNCT__ "MatISSetLocalMat_IS"
858 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
859 {
860   Mat_IS         *is = (Mat_IS*)mat->data;
861   PetscInt       nrows,ncols,orows,ocols;
862   PetscErrorCode ierr;
863 
864   PetscFunctionBegin;
865   if (is->A) {
866     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
867     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
868     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
869   }
870   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
871   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
872   is->A = local;
873   PetscFunctionReturn(0);
874 }
875 
876 #undef __FUNCT__
877 #define __FUNCT__ "MatISSetLocalMat"
878 /*@
879     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
880 
881   Input Parameter:
882 .  mat - the matrix
883 .  local - the local matrix
884 
885   Output Parameter:
886 
887   Level: advanced
888 
889   Notes:
890     This can be called if you have precomputed the local matrix and
891   want to provide it to the matrix object MATIS.
892 
893 .seealso: MATIS
894 @*/
895 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
896 {
897   PetscErrorCode ierr;
898 
899   PetscFunctionBegin;
900   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
901   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
902   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
903   PetscFunctionReturn(0);
904 }
905 
906 #undef __FUNCT__
907 #define __FUNCT__ "MatZeroEntries_IS"
908 PetscErrorCode MatZeroEntries_IS(Mat A)
909 {
910   Mat_IS         *a = (Mat_IS*)A->data;
911   PetscErrorCode ierr;
912 
913   PetscFunctionBegin;
914   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
915   PetscFunctionReturn(0);
916 }
917 
918 #undef __FUNCT__
919 #define __FUNCT__ "MatScale_IS"
920 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
921 {
922   Mat_IS         *is = (Mat_IS*)A->data;
923   PetscErrorCode ierr;
924 
925   PetscFunctionBegin;
926   ierr = MatScale(is->A,a);CHKERRQ(ierr);
927   PetscFunctionReturn(0);
928 }
929 
930 #undef __FUNCT__
931 #define __FUNCT__ "MatGetDiagonal_IS"
932 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
933 {
934   Mat_IS         *is = (Mat_IS*)A->data;
935   PetscErrorCode ierr;
936 
937   PetscFunctionBegin;
938   /* get diagonal of the local matrix */
939   ierr = MatGetDiagonal(is->A,is->x);CHKERRQ(ierr);
940 
941   /* scatter diagonal back into global vector */
942   ierr = VecSet(v,0);CHKERRQ(ierr);
943   ierr = VecScatterBegin(is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
944   ierr = VecScatterEnd  (is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
945   PetscFunctionReturn(0);
946 }
947 
948 #undef __FUNCT__
949 #define __FUNCT__ "MatSetOption_IS"
950 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
951 {
952   Mat_IS         *a = (Mat_IS*)A->data;
953   PetscErrorCode ierr;
954 
955   PetscFunctionBegin;
956   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
957   PetscFunctionReturn(0);
958 }
959 
960 #undef __FUNCT__
961 #define __FUNCT__ "MatCreateIS"
962 /*@
963     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
964        process but not across processes.
965 
966    Input Parameters:
967 +     comm - MPI communicator that will share the matrix
968 .     bs - local and global block size of the matrix
969 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
970 -     map - mapping that defines the global number for each local number
971 
972    Output Parameter:
973 .    A - the resulting matrix
974 
975    Level: advanced
976 
977    Notes: See MATIS for more details
978           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
979           by that process. m + nghosts (or n + nghosts) is the length of map since map maps all local points
980           plus the ghost points to global indices.
981 
982 .seealso: MATIS, MatSetLocalToGlobalMapping()
983 @*/
984 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping map,Mat *A)
985 {
986   PetscErrorCode ierr;
987 
988   PetscFunctionBegin;
989   ierr = MatCreate(comm,A);CHKERRQ(ierr);
990   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
991   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
992   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
993   ierr = MatSetUp(*A);CHKERRQ(ierr);
994   ierr = MatSetLocalToGlobalMapping(*A,map,map);CHKERRQ(ierr);
995   PetscFunctionReturn(0);
996 }
997 
998 /*MC
999    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1000    This stores the matrices in globally unassembled form. Each processor
1001    assembles only its local Neumann problem and the parallel matrix vector
1002    product is handled "implicitly".
1003 
1004    Operations Provided:
1005 +  MatMult()
1006 .  MatMultAdd()
1007 .  MatMultTranspose()
1008 .  MatMultTransposeAdd()
1009 .  MatZeroEntries()
1010 .  MatSetOption()
1011 .  MatZeroRows()
1012 .  MatZeroRowsLocal()
1013 .  MatSetValues()
1014 .  MatSetValuesLocal()
1015 .  MatScale()
1016 .  MatGetDiagonal()
1017 -  MatSetLocalToGlobalMapping()
1018 
1019    Options Database Keys:
1020 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1021 
1022    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1023 
1024           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1025 
1026           You can do matrix preallocation on the local matrix after you obtain it with
1027           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1028 
1029   Level: advanced
1030 
1031 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC
1032 
1033 M*/
1034 
1035 #undef __FUNCT__
1036 #define __FUNCT__ "MatCreate_IS"
1037 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1038 {
1039   PetscErrorCode ierr;
1040   Mat_IS         *b;
1041 
1042   PetscFunctionBegin;
1043   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1044   A->data = (void*)b;
1045   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1046 
1047   A->ops->mult                    = MatMult_IS;
1048   A->ops->multadd                 = MatMultAdd_IS;
1049   A->ops->multtranspose           = MatMultTranspose_IS;
1050   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1051   A->ops->destroy                 = MatDestroy_IS;
1052   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1053   A->ops->setvalues               = MatSetValues_IS;
1054   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1055   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1056   A->ops->zerorows                = MatZeroRows_IS;
1057   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1058   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1059   A->ops->assemblyend             = MatAssemblyEnd_IS;
1060   A->ops->view                    = MatView_IS;
1061   A->ops->zeroentries             = MatZeroEntries_IS;
1062   A->ops->scale                   = MatScale_IS;
1063   A->ops->getdiagonal             = MatGetDiagonal_IS;
1064   A->ops->setoption               = MatSetOption_IS;
1065   A->ops->ishermitian             = MatIsHermitian_IS;
1066   A->ops->issymmetric             = MatIsSymmetric_IS;
1067   A->ops->duplicate               = MatDuplicate_IS;
1068 
1069   /* zeros pointer */
1070   b->A           = 0;
1071   b->ctx         = 0;
1072   b->x           = 0;
1073   b->y           = 0;
1074   b->sf          = 0;
1075   b->sf_rootdata = 0;
1076   b->sf_leafdata = 0;
1077 
1078   /* special MATIS functions */
1079   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1080   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1081   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1082   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1083   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1084   PetscFunctionReturn(0);
1085 }
1086