xref: /petsc/src/mat/impls/is/matis.c (revision af25d912c737f21fca88746c26cd8b5ca8321f21)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "MatISSetUpSF"
19 /*@
20    MatISSetUpSF - Setup star forest object used by MatIS during MatISSetPreallocation.
21 
22    Collective on MPI_Comm
23 
24    Input Parameters:
25 +  A - the matrix
26 
27    Level: advanced
28 
29    Notes: This function does not be to be called by the user.
30 
31 .keywords: matrix
32 
33 .seealso: MatCreate(), MatCreateIS(), MatISSetPreallocation(), MatISGetLocalMat()
34 @*/
35 PetscErrorCode  MatISSetUpSF(Mat A)
36 {
37   PetscErrorCode ierr;
38 
39   PetscFunctionBegin;
40   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
41   PetscValidType(A,1);
42   ierr = PetscTryMethod(A,"MatISSetUpSF_C",(Mat),(A));CHKERRQ(ierr);
43   PetscFunctionReturn(0);
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "MatISSetUpSF_IS"
48 static PetscErrorCode MatISSetUpSF_IS(Mat B)
49 {
50   Mat_IS         *matis = (Mat_IS*)(B->data);
51   const PetscInt *gidxs;
52   PetscErrorCode ierr;
53 
54   PetscFunctionBegin;
55   if (matis->sf) {
56     PetscFunctionReturn(0);
57   }
58   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
59   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
60   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
61   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
62   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
63   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
64   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
65   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
66   PetscFunctionReturn(0);
67 }
68 
69 #undef __FUNCT__
70 #define __FUNCT__ "MatISSetPreallocation"
71 /*@
72    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
73 
74    Collective on MPI_Comm
75 
76    Input Parameters:
77 +  B - the matrix
78 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
79            (same value is used for all local rows)
80 .  d_nnz - array containing the number of nonzeros in the various rows of the
81            DIAGONAL portion of the local submatrix (possibly different for each row)
82            or NULL, if d_nz is used to specify the nonzero structure.
83            The size of this array is equal to the number of local rows, i.e 'm'.
84            For matrices that will be factored, you must leave room for (and set)
85            the diagonal entry even if it is zero.
86 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
87            submatrix (same value is used for all local rows).
88 -  o_nnz - array containing the number of nonzeros in the various rows of the
89            OFF-DIAGONAL portion of the local submatrix (possibly different for
90            each row) or NULL, if o_nz is used to specify the nonzero
91            structure. The size of this array is equal to the number
92            of local rows, i.e 'm'.
93 
94    If the *_nnz parameter is given then the *_nz parameter is ignored
95 
96    Level: intermediate
97 
98    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
99           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
100           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
101 
102 .keywords: matrix
103 
104 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
105 @*/
106 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
107 {
108   PetscErrorCode ierr;
109 
110   PetscFunctionBegin;
111   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
112   PetscValidType(B,1);
113   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
114   PetscFunctionReturn(0);
115 }
116 
117 #undef __FUNCT__
118 #define __FUNCT__ "MatISSetPreallocation_IS"
119 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
120 {
121   Mat_IS         *matis = (Mat_IS*)(B->data);
122   PetscInt       bs,i,nlocalcols;
123   PetscErrorCode ierr;
124 
125   PetscFunctionBegin;
126   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
127   ierr = MatISSetUpSF(B);CHKERRQ(ierr);
128   if (!d_nnz) {
129     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
130   } else {
131     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
132   }
133   if (!o_nnz) {
134     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
135   } else {
136     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
137   }
138   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
139   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
140   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
141   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
142   for (i=0;i<matis->sf_nleaves;i++) {
143     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
144   }
145   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
146   for (i=0;i<matis->sf_nleaves/bs;i++) {
147     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
148   }
149   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
150   for (i=0;i<matis->sf_nleaves/bs;i++) {
151     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
152   }
153   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
154   PetscFunctionReturn(0);
155 }
156 
157 #undef __FUNCT__
158 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
159 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
160 {
161   Mat_IS          *matis = (Mat_IS*)(A->data);
162   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
163   const PetscInt  *global_indices_r,*global_indices_c;
164   PetscInt        i,j,bs,rows,cols;
165   PetscInt        lrows,lcols;
166   PetscInt        local_rows,local_cols;
167   PetscMPIInt     nsubdomains;
168   PetscBool       isdense,issbaij;
169   PetscErrorCode  ierr;
170 
171   PetscFunctionBegin;
172   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
173   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
174   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
175   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
176   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
177   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
178   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
179   if (A->rmap->mapping != A->cmap->mapping) {
180     ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
181   } else {
182     global_indices_c = global_indices_r;
183   }
184 
185   if (issbaij) {
186     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
187   }
188   /*
189      An SF reduce is needed to sum up properly on shared rows.
190      Note that generally preallocation is not exact, since it overestimates nonzeros
191   */
192   ierr = MatISSetUpSF(A);CHKERRQ(ierr);
193   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
194   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
195   /* All processes need to compute entire row ownership */
196   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
197   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
198   for (i=0;i<nsubdomains;i++) {
199     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
200       row_ownership[j] = i;
201     }
202   }
203 
204   /*
205      my_dnz and my_onz contains exact contribution to preallocation from each local mat
206      then, they will be summed up properly. This way, preallocation is always sufficient
207   */
208   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
209   /* preallocation as a MATAIJ */
210   if (isdense) { /* special case for dense local matrices */
211     for (i=0;i<local_rows;i++) {
212       PetscInt index_row = global_indices_r[i];
213       for (j=i;j<local_rows;j++) {
214         PetscInt owner = row_ownership[index_row];
215         PetscInt index_col = global_indices_c[j];
216         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
217           my_dnz[i] += 1;
218         } else { /* offdiag block */
219           my_onz[i] += 1;
220         }
221         /* same as before, interchanging rows and cols */
222         if (i != j) {
223           owner = row_ownership[index_col];
224           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
225             my_dnz[j] += 1;
226           } else {
227             my_onz[j] += 1;
228           }
229         }
230       }
231     }
232   } else { /* TODO: this could be optimized using MatGetRowIJ */
233     for (i=0;i<local_rows;i++) {
234       const PetscInt *cols;
235       PetscInt       ncols,index_row = global_indices_r[i];
236       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
237       for (j=0;j<ncols;j++) {
238         PetscInt owner = row_ownership[index_row];
239         PetscInt index_col = global_indices_c[cols[j]];
240         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
241           my_dnz[i] += 1;
242         } else { /* offdiag block */
243           my_onz[i] += 1;
244         }
245         /* same as before, interchanging rows and cols */
246         if (issbaij && index_col != index_row) {
247           owner = row_ownership[index_col];
248           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
249             my_dnz[cols[j]] += 1;
250           } else {
251             my_onz[cols[j]] += 1;
252           }
253         }
254       }
255       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
256     }
257   }
258   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
259   if (global_indices_c != global_indices_r) {
260     ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
261   }
262   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
263 
264   /* Reduce my_dnz and my_onz */
265   if (maxreduce) {
266     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
267     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
268     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
269     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
270   } else {
271     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
272     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
273     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
274     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
275   }
276   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
277 
278   /* Resize preallocation if overestimated */
279   for (i=0;i<lrows;i++) {
280     dnz[i] = PetscMin(dnz[i],lcols);
281     onz[i] = PetscMin(onz[i],cols-lcols);
282   }
283   /* set preallocation */
284   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
285   for (i=0;i<lrows/bs;i++) {
286     dnz[i] = dnz[i*bs]/bs;
287     onz[i] = onz[i*bs]/bs;
288   }
289   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
290   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
291   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
292   if (issbaij) {
293     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
294   }
295   PetscFunctionReturn(0);
296 }
297 
298 #undef __FUNCT__
299 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
300 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
301 {
302   Mat_IS         *matis = (Mat_IS*)(mat->data);
303   Mat            local_mat;
304   /* info on mat */
305   PetscInt       bs,rows,cols,lrows,lcols;
306   PetscInt       local_rows,local_cols;
307   PetscBool      isdense,issbaij,isseqaij;
308   PetscMPIInt    nsubdomains;
309   /* values insertion */
310   PetscScalar    *array;
311   /* work */
312   PetscErrorCode ierr;
313 
314   PetscFunctionBegin;
315   /* get info from mat */
316   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
317   if (nsubdomains == 1) {
318     if (reuse == MAT_INITIAL_MATRIX) {
319       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
320     } else {
321       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
322     }
323     PetscFunctionReturn(0);
324   }
325   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
326   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
327   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
328   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
329   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
330   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
331   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
332 
333   if (reuse == MAT_INITIAL_MATRIX) {
334     MatType     new_mat_type;
335     PetscBool   issbaij_red;
336 
337     /* determining new matrix type */
338     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
339     if (issbaij_red) {
340       new_mat_type = MATSBAIJ;
341     } else {
342       if (bs>1) {
343         new_mat_type = MATBAIJ;
344       } else {
345         new_mat_type = MATAIJ;
346       }
347     }
348 
349     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
350     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
351     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
352     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
353     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
354   } else {
355     PetscInt mbs,mrows,mcols,mlrows,mlcols;
356     /* some checks */
357     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
358     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
359     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
360     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
361     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
362     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
363     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
364     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
365     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
366   }
367 
368   if (issbaij) {
369     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
370   } else {
371     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
372     local_mat = matis->A;
373   }
374 
375   /* Set values */
376   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
377   if (isdense) { /* special case for dense local matrices */
378     PetscInt i,*dummy_rows,*dummy_cols;
379 
380     if (local_rows != local_cols) {
381       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
382       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
383       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
384     } else {
385       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
386       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
387       dummy_cols = dummy_rows;
388     }
389     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
390     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
391     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
392     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
393     if (dummy_rows != dummy_cols) {
394       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
395     } else {
396       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
397     }
398   } else if (isseqaij) {
399     PetscInt  i,nvtxs,*xadj,*adjncy;
400     PetscBool done;
401 
402     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
403     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
404     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
405     for (i=0;i<nvtxs;i++) {
406       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
407     }
408     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
409     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
410     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
411   } else { /* very basic values insertion for all other matrix types */
412     PetscInt i;
413 
414     for (i=0;i<local_rows;i++) {
415       PetscInt       j;
416       const PetscInt *local_indices_cols;
417 
418       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
419       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
420       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
421     }
422   }
423   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
424   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
425   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
426   if (isdense) {
427     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
428   }
429   PetscFunctionReturn(0);
430 }
431 
432 #undef __FUNCT__
433 #define __FUNCT__ "MatISGetMPIXAIJ"
434 /*@
435     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
436 
437   Input Parameter:
438 .  mat - the matrix (should be of type MATIS)
439 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
440 
441   Output Parameter:
442 .  newmat - the matrix in AIJ format
443 
444   Level: developer
445 
446   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
447 
448 .seealso: MATIS
449 @*/
450 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
451 {
452   PetscErrorCode ierr;
453 
454   PetscFunctionBegin;
455   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
456   PetscValidLogicalCollectiveEnum(mat,reuse,2);
457   PetscValidPointer(newmat,3);
458   if (reuse != MAT_INITIAL_MATRIX) {
459     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
460     PetscCheckSameComm(mat,1,*newmat,3);
461     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
462   }
463   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
464   PetscFunctionReturn(0);
465 }
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "MatDuplicate_IS"
469 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
470 {
471   PetscErrorCode ierr;
472   Mat_IS         *matis = (Mat_IS*)(mat->data);
473   PetscInt       bs,m,n,M,N;
474   Mat            B,localmat;
475 
476   PetscFunctionBegin;
477   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
478   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
479   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
480   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
481   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
482   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
483   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
484   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
485   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
486   *newmat = B;
487   PetscFunctionReturn(0);
488 }
489 
490 #undef __FUNCT__
491 #define __FUNCT__ "MatIsHermitian_IS"
492 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
493 {
494   PetscErrorCode ierr;
495   Mat_IS         *matis = (Mat_IS*)A->data;
496   PetscBool      local_sym;
497 
498   PetscFunctionBegin;
499   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
500   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
501   PetscFunctionReturn(0);
502 }
503 
504 #undef __FUNCT__
505 #define __FUNCT__ "MatIsSymmetric_IS"
506 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
507 {
508   PetscErrorCode ierr;
509   Mat_IS         *matis = (Mat_IS*)A->data;
510   PetscBool      local_sym;
511 
512   PetscFunctionBegin;
513   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
514   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
515   PetscFunctionReturn(0);
516 }
517 
518 #undef __FUNCT__
519 #define __FUNCT__ "MatDestroy_IS"
520 PetscErrorCode MatDestroy_IS(Mat A)
521 {
522   PetscErrorCode ierr;
523   Mat_IS         *b = (Mat_IS*)A->data;
524 
525   PetscFunctionBegin;
526   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
527   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
528   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
529   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
530   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
531   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
532   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
533   ierr = PetscFree(A->data);CHKERRQ(ierr);
534   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
535   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
536   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
537   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
538   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
539   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",NULL);CHKERRQ(ierr);
540   PetscFunctionReturn(0);
541 }
542 
543 #undef __FUNCT__
544 #define __FUNCT__ "MatMult_IS"
545 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
546 {
547   PetscErrorCode ierr;
548   Mat_IS         *is  = (Mat_IS*)A->data;
549   PetscScalar    zero = 0.0;
550 
551   PetscFunctionBegin;
552   /*  scatter the global vector x into the local work vector */
553   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
554   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
555 
556   /* multiply the local matrix */
557   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
558 
559   /* scatter product back into global memory */
560   ierr = VecSet(y,zero);CHKERRQ(ierr);
561   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
562   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
563   PetscFunctionReturn(0);
564 }
565 
566 #undef __FUNCT__
567 #define __FUNCT__ "MatMultAdd_IS"
568 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
569 {
570   Vec            temp_vec;
571   PetscErrorCode ierr;
572 
573   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
574   if (v3 != v2) {
575     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
576     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
577   } else {
578     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
579     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
580     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
581     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
582     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
583   }
584   PetscFunctionReturn(0);
585 }
586 
587 #undef __FUNCT__
588 #define __FUNCT__ "MatMultTranspose_IS"
589 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
590 {
591   Mat_IS         *is = (Mat_IS*)A->data;
592   PetscErrorCode ierr;
593 
594   PetscFunctionBegin;
595   /*  scatter the global vector x into the local work vector */
596   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
597   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
598 
599   /* multiply the local matrix */
600   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
601 
602   /* scatter product back into global vector */
603   ierr = VecSet(x,0);CHKERRQ(ierr);
604   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
605   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
606   PetscFunctionReturn(0);
607 }
608 
609 #undef __FUNCT__
610 #define __FUNCT__ "MatMultTransposeAdd_IS"
611 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
612 {
613   Vec            temp_vec;
614   PetscErrorCode ierr;
615 
616   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
617   if (v3 != v2) {
618     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
619     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
620   } else {
621     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
622     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
623     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
624     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
625     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
626   }
627   PetscFunctionReturn(0);
628 }
629 
630 #undef __FUNCT__
631 #define __FUNCT__ "MatView_IS"
632 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
633 {
634   Mat_IS         *a = (Mat_IS*)A->data;
635   PetscErrorCode ierr;
636   PetscViewer    sviewer;
637 
638   PetscFunctionBegin;
639   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
640   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
641   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
642   PetscFunctionReturn(0);
643 }
644 
645 #undef __FUNCT__
646 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
647 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
648 {
649   PetscErrorCode ierr;
650   PetscInt       nr,rbs,nc,cbs;
651   Mat_IS         *is = (Mat_IS*)A->data;
652   IS             from,to;
653   Vec            cglobal,rglobal;
654 
655   PetscFunctionBegin;
656   PetscCheckSameComm(A,1,rmapping,2);
657   PetscCheckSameComm(A,1,cmapping,3);
658   /* Destroy any previous data */
659   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
660   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
661   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
662   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
663   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
664   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
665   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
666 
667   /* Setup Layout and set local to global maps */
668   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
669   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
670   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
671   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
672 
673   /* Create the local matrix A */
674   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
675   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
676   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
677   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
678   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
679   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
680   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
681   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
682   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
683   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
684   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
685 
686   /* Create the local work vectors */
687   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
688 
689   /* setup the global to local scatters */
690   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
691   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
692   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
693   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
694   if (rmapping != cmapping) {
695     ierr = ISDestroy(&to);CHKERRQ(ierr);
696     ierr = ISDestroy(&from);CHKERRQ(ierr);
697     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
698     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
699     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
700   } else {
701     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
702     is->cctx = is->rctx;
703   }
704   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
705   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
706   ierr = ISDestroy(&to);CHKERRQ(ierr);
707   ierr = ISDestroy(&from);CHKERRQ(ierr);
708   PetscFunctionReturn(0);
709 }
710 
711 #undef __FUNCT__
712 #define __FUNCT__ "MatSetValues_IS"
713 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
714 {
715   Mat_IS         *is = (Mat_IS*)mat->data;
716   PetscInt       rows_l[2048],cols_l[2048];
717   PetscErrorCode ierr;
718 
719   PetscFunctionBegin;
720 #if defined(PETSC_USE_DEBUG)
721   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
722 #endif
723   ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
724   ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
725   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
726   PetscFunctionReturn(0);
727 }
728 
729 #undef __FUNCT__
730 #define __FUNCT__ "MatSetValuesLocal_IS"
731 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
732 {
733   PetscErrorCode ierr;
734   Mat_IS         *is = (Mat_IS*)A->data;
735 
736   PetscFunctionBegin;
737   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
738   PetscFunctionReturn(0);
739 }
740 
741 #undef __FUNCT__
742 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
743 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
744 {
745   PetscErrorCode ierr;
746   Mat_IS         *is = (Mat_IS*)A->data;
747 
748   PetscFunctionBegin;
749   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
750   PetscFunctionReturn(0);
751 }
752 
753 #undef __FUNCT__
754 #define __FUNCT__ "MatZeroRows_IS"
755 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
756 {
757   PetscInt       n_l = 0, *rows_l = NULL;
758   PetscErrorCode ierr;
759 
760   PetscFunctionBegin;
761   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
762   if (n) {
763     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
764     ierr = ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
765   }
766   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
767   ierr = PetscFree(rows_l);CHKERRQ(ierr);
768   PetscFunctionReturn(0);
769 }
770 
771 #undef __FUNCT__
772 #define __FUNCT__ "MatZeroRowsLocal_IS"
773 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
774 {
775   Mat_IS         *is = (Mat_IS*)A->data;
776   PetscErrorCode ierr;
777   PetscInt       i;
778   PetscScalar    *array;
779 
780   PetscFunctionBegin;
781   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
782   {
783     /*
784        Set up is->x as a "counting vector". This is in order to MatMult_IS
785        work properly in the interface nodes.
786     */
787     Vec counter;
788     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
789     ierr = VecSet(counter,0.);CHKERRQ(ierr);
790     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
791     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
792     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
793     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
794     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
795     ierr = VecDestroy(&counter);CHKERRQ(ierr);
796   }
797   if (!n) {
798     is->pure_neumann = PETSC_TRUE;
799   } else {
800     is->pure_neumann = PETSC_FALSE;
801 
802     ierr = VecGetArray(is->y,&array);CHKERRQ(ierr);
803     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
804     for (i=0; i<n; i++) {
805       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
806     }
807     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
808     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
809     ierr = VecRestoreArray(is->y,&array);CHKERRQ(ierr);
810   }
811   PetscFunctionReturn(0);
812 }
813 
814 #undef __FUNCT__
815 #define __FUNCT__ "MatAssemblyBegin_IS"
816 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
817 {
818   Mat_IS         *is = (Mat_IS*)A->data;
819   PetscErrorCode ierr;
820 
821   PetscFunctionBegin;
822   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
823   PetscFunctionReturn(0);
824 }
825 
826 #undef __FUNCT__
827 #define __FUNCT__ "MatAssemblyEnd_IS"
828 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
829 {
830   Mat_IS         *is = (Mat_IS*)A->data;
831   PetscErrorCode ierr;
832 
833   PetscFunctionBegin;
834   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
835   PetscFunctionReturn(0);
836 }
837 
838 #undef __FUNCT__
839 #define __FUNCT__ "MatISGetLocalMat_IS"
840 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
841 {
842   Mat_IS *is = (Mat_IS*)mat->data;
843 
844   PetscFunctionBegin;
845   *local = is->A;
846   PetscFunctionReturn(0);
847 }
848 
849 #undef __FUNCT__
850 #define __FUNCT__ "MatISGetLocalMat"
851 /*@
852     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
853 
854   Input Parameter:
855 .  mat - the matrix
856 
857   Output Parameter:
858 .  local - the local matrix
859 
860   Level: advanced
861 
862   Notes:
863     This can be called if you have precomputed the nonzero structure of the
864   matrix and want to provide it to the inner matrix object to improve the performance
865   of the MatSetValues() operation.
866 
867 .seealso: MATIS
868 @*/
869 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
870 {
871   PetscErrorCode ierr;
872 
873   PetscFunctionBegin;
874   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
875   PetscValidPointer(local,2);
876   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
877   PetscFunctionReturn(0);
878 }
879 
880 #undef __FUNCT__
881 #define __FUNCT__ "MatISSetLocalMat_IS"
882 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
883 {
884   Mat_IS         *is = (Mat_IS*)mat->data;
885   PetscInt       nrows,ncols,orows,ocols;
886   PetscErrorCode ierr;
887 
888   PetscFunctionBegin;
889   if (is->A) {
890     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
891     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
892     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
893   }
894   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
895   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
896   is->A = local;
897   PetscFunctionReturn(0);
898 }
899 
900 #undef __FUNCT__
901 #define __FUNCT__ "MatISSetLocalMat"
902 /*@
903     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
904 
905   Input Parameter:
906 .  mat - the matrix
907 .  local - the local matrix
908 
909   Output Parameter:
910 
911   Level: advanced
912 
913   Notes:
914     This can be called if you have precomputed the local matrix and
915   want to provide it to the matrix object MATIS.
916 
917 .seealso: MATIS
918 @*/
919 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
920 {
921   PetscErrorCode ierr;
922 
923   PetscFunctionBegin;
924   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
925   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
926   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
927   PetscFunctionReturn(0);
928 }
929 
930 #undef __FUNCT__
931 #define __FUNCT__ "MatZeroEntries_IS"
932 PetscErrorCode MatZeroEntries_IS(Mat A)
933 {
934   Mat_IS         *a = (Mat_IS*)A->data;
935   PetscErrorCode ierr;
936 
937   PetscFunctionBegin;
938   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
939   PetscFunctionReturn(0);
940 }
941 
942 #undef __FUNCT__
943 #define __FUNCT__ "MatScale_IS"
944 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
945 {
946   Mat_IS         *is = (Mat_IS*)A->data;
947   PetscErrorCode ierr;
948 
949   PetscFunctionBegin;
950   ierr = MatScale(is->A,a);CHKERRQ(ierr);
951   PetscFunctionReturn(0);
952 }
953 
954 #undef __FUNCT__
955 #define __FUNCT__ "MatGetDiagonal_IS"
956 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
957 {
958   Mat_IS         *is = (Mat_IS*)A->data;
959   PetscErrorCode ierr;
960 
961   PetscFunctionBegin;
962   /* get diagonal of the local matrix */
963   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
964 
965   /* scatter diagonal back into global vector */
966   ierr = VecSet(v,0);CHKERRQ(ierr);
967   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
968   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
969   PetscFunctionReturn(0);
970 }
971 
972 #undef __FUNCT__
973 #define __FUNCT__ "MatSetOption_IS"
974 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
975 {
976   Mat_IS         *a = (Mat_IS*)A->data;
977   PetscErrorCode ierr;
978 
979   PetscFunctionBegin;
980   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
981   PetscFunctionReturn(0);
982 }
983 
984 #undef __FUNCT__
985 #define __FUNCT__ "MatCreateIS"
986 /*@
987     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
988        process but not across processes.
989 
990    Input Parameters:
991 +     comm    - MPI communicator that will share the matrix
992 .     bs      - block size of the matrix
993 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
994 .     rmap    - local to global map for rows
995 -     cmap    - local to global map for cols
996 
997    Output Parameter:
998 .    A - the resulting matrix
999 
1000    Level: advanced
1001 
1002    Notes: See MATIS for more details
1003           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
1004           by that process. The sizes of rmap and cmap define the size of the local matrices.
1005           If either rmap or cmap are NULL, than the matrix is assumed to be square
1006 
1007 .seealso: MATIS, MatSetLocalToGlobalMapping()
1008 @*/
1009 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1010 {
1011   PetscErrorCode ierr;
1012 
1013   PetscFunctionBegin;
1014   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1015   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1016   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1017   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1018   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1019   ierr = MatSetUp(*A);CHKERRQ(ierr);
1020   if (rmap && cmap) {
1021     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1022   } else if (!rmap) {
1023     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1024   } else {
1025     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1026   }
1027   PetscFunctionReturn(0);
1028 }
1029 
1030 /*MC
1031    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1032    This stores the matrices in globally unassembled form. Each processor
1033    assembles only its local Neumann problem and the parallel matrix vector
1034    product is handled "implicitly".
1035 
1036    Operations Provided:
1037 +  MatMult()
1038 .  MatMultAdd()
1039 .  MatMultTranspose()
1040 .  MatMultTransposeAdd()
1041 .  MatZeroEntries()
1042 .  MatSetOption()
1043 .  MatZeroRows()
1044 .  MatZeroRowsLocal()
1045 .  MatSetValues()
1046 .  MatSetValuesLocal()
1047 .  MatScale()
1048 .  MatGetDiagonal()
1049 -  MatSetLocalToGlobalMapping()
1050 
1051    Options Database Keys:
1052 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1053 
1054    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1055 
1056           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1057 
1058           You can do matrix preallocation on the local matrix after you obtain it with
1059           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1060 
1061   Level: advanced
1062 
1063 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC
1064 
1065 M*/
1066 
1067 #undef __FUNCT__
1068 #define __FUNCT__ "MatCreate_IS"
1069 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1070 {
1071   PetscErrorCode ierr;
1072   Mat_IS         *b;
1073 
1074   PetscFunctionBegin;
1075   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1076   A->data = (void*)b;
1077 
1078   /* matrix ops */
1079   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1080   A->ops->mult                    = MatMult_IS;
1081   A->ops->multadd                 = MatMultAdd_IS;
1082   A->ops->multtranspose           = MatMultTranspose_IS;
1083   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1084   A->ops->destroy                 = MatDestroy_IS;
1085   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1086   A->ops->setvalues               = MatSetValues_IS;
1087   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1088   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1089   A->ops->zerorows                = MatZeroRows_IS;
1090   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1091   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1092   A->ops->assemblyend             = MatAssemblyEnd_IS;
1093   A->ops->view                    = MatView_IS;
1094   A->ops->zeroentries             = MatZeroEntries_IS;
1095   A->ops->scale                   = MatScale_IS;
1096   A->ops->getdiagonal             = MatGetDiagonal_IS;
1097   A->ops->setoption               = MatSetOption_IS;
1098   A->ops->ishermitian             = MatIsHermitian_IS;
1099   A->ops->issymmetric             = MatIsSymmetric_IS;
1100   A->ops->duplicate               = MatDuplicate_IS;
1101 
1102   /* special MATIS functions */
1103   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1104   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1105   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1106   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1107   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",MatISSetUpSF_IS);CHKERRQ(ierr);
1108   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1109   PetscFunctionReturn(0);
1110 }
1111