xref: /petsc/src/mat/impls/is/matis.c (revision 527b2640c901bc3d3edc3ecfd58a621bdc06cd0c)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4     This stores the matrices in globally unassembled form. Each processor
5     assembles only its local Neumann problem and the parallel matrix vector
6     product is handled "implicitly".
7 
8     Currently this allows for only one subdomain per processor.
9 */
10 
11 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
12 
13 static PetscErrorCode MatISComputeSF_Private(Mat);
14 static PetscErrorCode MatISZeroRowsLocal_Private(Mat,PetscInt,const PetscInt[],PetscScalar);
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "MatGetLocalSubMatrix_IS"
18 PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat)
19 {
20   Mat                    lA,lsubmat;
21   ISLocalToGlobalMapping rl2g,cl2g;
22   IS                     is,isn;
23   const PetscInt         *rg,*rl;
24 #if defined(PETSC_USE_DEBUG)
25   PetscInt               nrg;
26 #endif
27   PetscInt               N,M,nrl,i,*idxs;
28   PetscErrorCode         ierr;
29 
30   PetscFunctionBegin;
31   /* extract local submatrix (it will complain if row and col exceed the local sizes) */
32   ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr);
33   ierr = MatGetSubMatrix(lA,row,col,MAT_INITIAL_MATRIX,&lsubmat);CHKERRQ(ierr);
34   /* compute new l2g map for rows */
35   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
36   ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr);
37   ierr = ISGetIndices(row,&rl);CHKERRQ(ierr);
38 #if defined(PETSC_USE_DEBUG)
39   ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr);
40   for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %d -> %d greater than maximum possible %d\n",i,rl[i],nrg);
41 #endif
42   ierr = PetscMalloc1(nrl,&idxs);CHKERRQ(ierr);
43   for (i=0;i<nrl;i++) idxs[i] = rg[rl[i]];
44   ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr);
45   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
46   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrl,idxs,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
47   ierr = PetscFree(idxs);CHKERRQ(ierr);
48   ierr = ISRenumber(is,NULL,&M,&isn);CHKERRQ(ierr);
49   ierr = ISDestroy(&is);CHKERRQ(ierr);
50   ierr = ISLocalToGlobalMappingCreateIS(isn,&rl2g);CHKERRQ(ierr);
51   ierr = ISDestroy(&isn);CHKERRQ(ierr);
52   /* compute new l2g map for columns */
53   if (col != row || A->rmap->mapping != A->cmap->mapping) {
54     const PetscInt *cg,*cl;
55 #if defined(PETSC_USE_DEBUG)
56     PetscInt       ncg;
57 #endif
58     PetscInt       ncl;
59 
60     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
61     ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr);
62     ierr = ISGetIndices(col,&cl);CHKERRQ(ierr);
63 #if defined(PETSC_USE_DEBUG)
64     ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr);
65     for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %d -> %d greater than maximum possible %d\n",i,cl[i],ncg);
66 #endif
67     ierr = PetscMalloc1(ncl,&idxs);CHKERRQ(ierr);
68     for (i=0;i<ncl;i++) idxs[i] = cg[cl[i]];
69     ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr);
70     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
71     ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncl,idxs,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
72     ierr = PetscFree(idxs);CHKERRQ(ierr);
73     ierr = ISRenumber(is,NULL,&N,&isn);CHKERRQ(ierr);
74     ierr = ISDestroy(&is);CHKERRQ(ierr);
75     ierr = ISLocalToGlobalMappingCreateIS(isn,&cl2g);CHKERRQ(ierr);
76     ierr = ISDestroy(&isn);CHKERRQ(ierr);
77   } else {
78     ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr);
79     cl2g = rl2g;
80     N = M;
81   }
82   /* create the MATIS submatrix */
83   ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr);
84   ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
85   ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr);
86   ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr);
87   ierr = MatISSetLocalMat(*submat,lsubmat);CHKERRQ(ierr);
88   ierr = MatDestroy(&lsubmat);CHKERRQ(ierr);
89   ierr = MatSetUp(*submat);CHKERRQ(ierr);
90   ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
91   ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
92   ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
93   ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
94   PetscFunctionReturn(0);
95 }
96 
97 #undef __FUNCT__
98 #define __FUNCT__ "MatCopy_IS"
99 PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str)
100 {
101   Mat_IS         *a = (Mat_IS*)A->data,*b;
102   PetscBool      ismatis;
103   PetscErrorCode ierr;
104 
105   PetscFunctionBegin;
106   ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr);
107   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented");
108   b = (Mat_IS*)B->data;
109   ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
110   PetscFunctionReturn(0);
111 }
112 
113 #undef __FUNCT__
114 #define __FUNCT__ "MatMissingDiagonal_IS"
115 PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool  *missing,PetscInt *d)
116 {
117   Vec               v;
118   const PetscScalar *array;
119   PetscInt          i,n;
120   PetscErrorCode    ierr;
121 
122   PetscFunctionBegin;
123   *missing = PETSC_FALSE;
124   ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr);
125   ierr = MatGetDiagonal(A,v);CHKERRQ(ierr);
126   ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr);
127   ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr);
128   for (i=0;i<n;i++) if (array[i] == 0.) break;
129   ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr);
130   ierr = VecDestroy(&v);CHKERRQ(ierr);
131   if (i != n) *missing = PETSC_TRUE;
132   if (d) {
133     *d = -1;
134     if (*missing) {
135       PetscInt rstart;
136       ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
137       *d = i+rstart;
138     }
139   }
140   PetscFunctionReturn(0);
141 }
142 
143 #undef __FUNCT__
144 #define __FUNCT__ "MatISComputeSF_Private"
145 static PetscErrorCode MatISComputeSF_Private(Mat B)
146 {
147   Mat_IS         *matis = (Mat_IS*)(B->data);
148   const PetscInt *gidxs;
149   PetscErrorCode ierr;
150 
151   PetscFunctionBegin;
152   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
153   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
154   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
155   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
156   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
157   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
158   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
159   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNCT__
164 #define __FUNCT__ "MatISSetPreallocation"
165 /*@
166    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
167 
168    Collective on MPI_Comm
169 
170    Input Parameters:
171 +  B - the matrix
172 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
173            (same value is used for all local rows)
174 .  d_nnz - array containing the number of nonzeros in the various rows of the
175            DIAGONAL portion of the local submatrix (possibly different for each row)
176            or NULL, if d_nz is used to specify the nonzero structure.
177            The size of this array is equal to the number of local rows, i.e 'm'.
178            For matrices that will be factored, you must leave room for (and set)
179            the diagonal entry even if it is zero.
180 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
181            submatrix (same value is used for all local rows).
182 -  o_nnz - array containing the number of nonzeros in the various rows of the
183            OFF-DIAGONAL portion of the local submatrix (possibly different for
184            each row) or NULL, if o_nz is used to specify the nonzero
185            structure. The size of this array is equal to the number
186            of local rows, i.e 'm'.
187 
188    If the *_nnz parameter is given then the *_nz parameter is ignored
189 
190    Level: intermediate
191 
192    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
193           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
194           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
195 
196 .keywords: matrix
197 
198 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS
199 @*/
200 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
201 {
202   PetscErrorCode ierr;
203 
204   PetscFunctionBegin;
205   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
206   PetscValidType(B,1);
207   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
208   PetscFunctionReturn(0);
209 }
210 
211 #undef __FUNCT__
212 #define __FUNCT__ "MatISSetPreallocation_IS"
213 static PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
214 {
215   Mat_IS         *matis = (Mat_IS*)(B->data);
216   PetscInt       bs,i,nlocalcols;
217   PetscErrorCode ierr;
218 
219   PetscFunctionBegin;
220   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
221   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
222     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
223   }
224   if (!d_nnz) {
225     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
226   } else {
227     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
228   }
229   if (!o_nnz) {
230     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
231   } else {
232     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
233   }
234   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
235   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
236   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
237   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
238   for (i=0;i<matis->sf_nleaves;i++) {
239     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
240   }
241   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
242   for (i=0;i<matis->sf_nleaves/bs;i++) {
243     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
244   }
245   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
246   for (i=0;i<matis->sf_nleaves/bs;i++) {
247     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
248   }
249   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
250   PetscFunctionReturn(0);
251 }
252 
253 #undef __FUNCT__
254 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
255 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
256 {
257   Mat_IS          *matis = (Mat_IS*)(A->data);
258   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
259   const PetscInt  *global_indices_r,*global_indices_c;
260   PetscInt        i,j,bs,rows,cols;
261   PetscInt        lrows,lcols;
262   PetscInt        local_rows,local_cols;
263   PetscMPIInt     nsubdomains;
264   PetscBool       isdense,issbaij;
265   PetscErrorCode  ierr;
266 
267   PetscFunctionBegin;
268   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
269   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
270   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
271   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
272   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
273   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
274   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
275   if (A->rmap->mapping != A->cmap->mapping) {
276     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
277   } else {
278     global_indices_c = global_indices_r;
279   }
280 
281   if (issbaij) {
282     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
283   }
284   /*
285      An SF reduce is needed to sum up properly on shared rows.
286      Note that generally preallocation is not exact, since it overestimates nonzeros
287   */
288   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
289     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
290   }
291   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
292   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
293   /* All processes need to compute entire row ownership */
294   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
295   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
296   for (i=0;i<nsubdomains;i++) {
297     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
298       row_ownership[j] = i;
299     }
300   }
301   ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
302 
303   /*
304      my_dnz and my_onz contains exact contribution to preallocation from each local mat
305      then, they will be summed up properly. This way, preallocation is always sufficient
306   */
307   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
308   /* preallocation as a MATAIJ */
309   if (isdense) { /* special case for dense local matrices */
310     for (i=0;i<local_rows;i++) {
311       PetscInt index_row = global_indices_r[i];
312       for (j=i;j<local_rows;j++) {
313         PetscInt owner = row_ownership[index_row];
314         PetscInt index_col = global_indices_c[j];
315         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
316           my_dnz[i] += 1;
317         } else { /* offdiag block */
318           my_onz[i] += 1;
319         }
320         /* same as before, interchanging rows and cols */
321         if (i != j) {
322           owner = row_ownership[index_col];
323           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
324             my_dnz[j] += 1;
325           } else {
326             my_onz[j] += 1;
327           }
328         }
329       }
330     }
331   } else { /* TODO: this could be optimized using MatGetRowIJ */
332     for (i=0;i<local_rows;i++) {
333       const PetscInt *cols;
334       PetscInt       ncols,index_row = global_indices_r[i];
335       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
336       for (j=0;j<ncols;j++) {
337         PetscInt owner = row_ownership[index_row];
338         PetscInt index_col = global_indices_c[cols[j]];
339         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
340           my_dnz[i] += 1;
341         } else { /* offdiag block */
342           my_onz[i] += 1;
343         }
344         /* same as before, interchanging rows and cols */
345         if (issbaij && index_col != index_row) {
346           owner = row_ownership[index_col];
347           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
348             my_dnz[cols[j]] += 1;
349           } else {
350             my_onz[cols[j]] += 1;
351           }
352         }
353       }
354       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
355     }
356   }
357   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
358   if (global_indices_c != global_indices_r) {
359     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
360   }
361   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
362 
363   /* Reduce my_dnz and my_onz */
364   if (maxreduce) {
365     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
366     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
367     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
368     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
369   } else {
370     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
371     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
372     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
373     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
374   }
375   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
376 
377   /* Resize preallocation if overestimated */
378   for (i=0;i<lrows;i++) {
379     dnz[i] = PetscMin(dnz[i],lcols);
380     onz[i] = PetscMin(onz[i],cols-lcols);
381   }
382   /* set preallocation */
383   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
384   for (i=0;i<lrows/bs;i++) {
385     dnz[i] = dnz[i*bs]/bs;
386     onz[i] = onz[i*bs]/bs;
387   }
388   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
389   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
390   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
391   if (issbaij) {
392     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
393   }
394   PetscFunctionReturn(0);
395 }
396 
397 #undef __FUNCT__
398 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
399 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
400 {
401   Mat_IS         *matis = (Mat_IS*)(mat->data);
402   Mat            local_mat;
403   /* info on mat */
404   PetscInt       bs,rows,cols,lrows,lcols;
405   PetscInt       local_rows,local_cols;
406   PetscBool      isdense,issbaij,isseqaij;
407   PetscMPIInt    nsubdomains;
408   /* values insertion */
409   PetscScalar    *array;
410   /* work */
411   PetscErrorCode ierr;
412 
413   PetscFunctionBegin;
414   /* get info from mat */
415   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
416   if (nsubdomains == 1) {
417     if (reuse == MAT_INITIAL_MATRIX) {
418       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
419     } else {
420       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
421     }
422     PetscFunctionReturn(0);
423   }
424   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
425   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
426   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
427   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
428   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
429   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
430   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
431 
432   if (reuse == MAT_INITIAL_MATRIX) {
433     MatType     new_mat_type;
434     PetscBool   issbaij_red;
435 
436     /* determining new matrix type */
437     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
438     if (issbaij_red) {
439       new_mat_type = MATSBAIJ;
440     } else {
441       if (bs>1) {
442         new_mat_type = MATBAIJ;
443       } else {
444         new_mat_type = MATAIJ;
445       }
446     }
447 
448     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
449     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
450     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
451     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
452     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
453   } else {
454     PetscInt mbs,mrows,mcols,mlrows,mlcols;
455     /* some checks */
456     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
457     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
458     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
459     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
460     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
461     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
462     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
463     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
464     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
465   }
466 
467   if (issbaij) {
468     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
469   } else {
470     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
471     local_mat = matis->A;
472   }
473 
474   /* Set values */
475   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
476   if (isdense) { /* special case for dense local matrices */
477     PetscInt i,*dummy_rows,*dummy_cols;
478 
479     if (local_rows != local_cols) {
480       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
481       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
482       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
483     } else {
484       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
485       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
486       dummy_cols = dummy_rows;
487     }
488     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
489     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
490     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
491     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
492     if (dummy_rows != dummy_cols) {
493       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
494     } else {
495       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
496     }
497   } else if (isseqaij) {
498     PetscInt  i,nvtxs,*xadj,*adjncy;
499     PetscBool done;
500 
501     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
502     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
503     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
504     for (i=0;i<nvtxs;i++) {
505       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
506     }
507     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
508     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
509     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
510   } else { /* very basic values insertion for all other matrix types */
511     PetscInt i;
512 
513     for (i=0;i<local_rows;i++) {
514       PetscInt       j;
515       const PetscInt *local_indices_cols;
516 
517       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
518       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
519       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
520     }
521   }
522   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
523   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
524   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
525   if (isdense) {
526     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
527   }
528   PetscFunctionReturn(0);
529 }
530 
531 #undef __FUNCT__
532 #define __FUNCT__ "MatISGetMPIXAIJ"
533 /*@
534     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
535 
536   Input Parameter:
537 .  mat - the matrix (should be of type MATIS)
538 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
539 
540   Output Parameter:
541 .  newmat - the matrix in AIJ format
542 
543   Level: developer
544 
545   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
546 
547 .seealso: MATIS
548 @*/
549 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
550 {
551   PetscErrorCode ierr;
552 
553   PetscFunctionBegin;
554   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
555   PetscValidLogicalCollectiveEnum(mat,reuse,2);
556   PetscValidPointer(newmat,3);
557   if (reuse != MAT_INITIAL_MATRIX) {
558     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
559     PetscCheckSameComm(mat,1,*newmat,3);
560     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
561   }
562   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
563   PetscFunctionReturn(0);
564 }
565 
566 #undef __FUNCT__
567 #define __FUNCT__ "MatDuplicate_IS"
568 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
569 {
570   PetscErrorCode ierr;
571   Mat_IS         *matis = (Mat_IS*)(mat->data);
572   PetscInt       bs,m,n,M,N;
573   Mat            B,localmat;
574 
575   PetscFunctionBegin;
576   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
577   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
578   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
579   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
580   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
581   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
582   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
583   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
584   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
585   *newmat = B;
586   PetscFunctionReturn(0);
587 }
588 
589 #undef __FUNCT__
590 #define __FUNCT__ "MatIsHermitian_IS"
591 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
592 {
593   PetscErrorCode ierr;
594   Mat_IS         *matis = (Mat_IS*)A->data;
595   PetscBool      local_sym;
596 
597   PetscFunctionBegin;
598   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
599   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
600   PetscFunctionReturn(0);
601 }
602 
603 #undef __FUNCT__
604 #define __FUNCT__ "MatIsSymmetric_IS"
605 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
606 {
607   PetscErrorCode ierr;
608   Mat_IS         *matis = (Mat_IS*)A->data;
609   PetscBool      local_sym;
610 
611   PetscFunctionBegin;
612   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
613   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
614   PetscFunctionReturn(0);
615 }
616 
617 #undef __FUNCT__
618 #define __FUNCT__ "MatDestroy_IS"
619 PetscErrorCode MatDestroy_IS(Mat A)
620 {
621   PetscErrorCode ierr;
622   Mat_IS         *b = (Mat_IS*)A->data;
623 
624   PetscFunctionBegin;
625   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
626   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
627   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
628   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
629   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
630   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
631   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
632   ierr = PetscFree(A->data);CHKERRQ(ierr);
633   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
634   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
635   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
636   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
637   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
638   PetscFunctionReturn(0);
639 }
640 
641 #undef __FUNCT__
642 #define __FUNCT__ "MatMult_IS"
643 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
644 {
645   PetscErrorCode ierr;
646   Mat_IS         *is  = (Mat_IS*)A->data;
647   PetscScalar    zero = 0.0;
648 
649   PetscFunctionBegin;
650   /*  scatter the global vector x into the local work vector */
651   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
652   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
653 
654   /* multiply the local matrix */
655   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
656 
657   /* scatter product back into global memory */
658   ierr = VecSet(y,zero);CHKERRQ(ierr);
659   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
660   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
661   PetscFunctionReturn(0);
662 }
663 
664 #undef __FUNCT__
665 #define __FUNCT__ "MatMultAdd_IS"
666 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
667 {
668   Vec            temp_vec;
669   PetscErrorCode ierr;
670 
671   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
672   if (v3 != v2) {
673     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
674     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
675   } else {
676     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
677     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
678     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
679     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
680     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
681   }
682   PetscFunctionReturn(0);
683 }
684 
685 #undef __FUNCT__
686 #define __FUNCT__ "MatMultTranspose_IS"
687 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
688 {
689   Mat_IS         *is = (Mat_IS*)A->data;
690   PetscErrorCode ierr;
691 
692   PetscFunctionBegin;
693   /*  scatter the global vector x into the local work vector */
694   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
695   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
696 
697   /* multiply the local matrix */
698   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
699 
700   /* scatter product back into global vector */
701   ierr = VecSet(x,0);CHKERRQ(ierr);
702   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
703   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
704   PetscFunctionReturn(0);
705 }
706 
707 #undef __FUNCT__
708 #define __FUNCT__ "MatMultTransposeAdd_IS"
709 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
710 {
711   Vec            temp_vec;
712   PetscErrorCode ierr;
713 
714   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
715   if (v3 != v2) {
716     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
717     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
718   } else {
719     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
720     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
721     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
722     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
723     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
724   }
725   PetscFunctionReturn(0);
726 }
727 
728 #undef __FUNCT__
729 #define __FUNCT__ "MatView_IS"
730 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
731 {
732   Mat_IS         *a = (Mat_IS*)A->data;
733   PetscErrorCode ierr;
734   PetscViewer    sviewer;
735 
736   PetscFunctionBegin;
737   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
738   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
739   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
740   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
741   PetscFunctionReturn(0);
742 }
743 
744 #undef __FUNCT__
745 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
746 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
747 {
748   PetscErrorCode ierr;
749   PetscInt       nr,rbs,nc,cbs;
750   Mat_IS         *is = (Mat_IS*)A->data;
751   IS             from,to;
752   Vec            cglobal,rglobal;
753 
754   PetscFunctionBegin;
755   PetscCheckSameComm(A,1,rmapping,2);
756   PetscCheckSameComm(A,1,cmapping,3);
757   /* Destroy any previous data */
758   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
759   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
760   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
761   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
762   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
763   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
764   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
765 
766   /* Setup Layout and set local to global maps */
767   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
768   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
769   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
770   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
771 
772   /* Create the local matrix A */
773   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
774   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
775   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
776   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
777   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
778   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
779   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
780   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
781   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
782   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
783   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
784 
785   /* Create the local work vectors */
786   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
787 
788   /* setup the global to local scatters */
789   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
790   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
791   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
792   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
793   if (rmapping != cmapping) {
794     ierr = ISDestroy(&to);CHKERRQ(ierr);
795     ierr = ISDestroy(&from);CHKERRQ(ierr);
796     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
797     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
798     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
799   } else {
800     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
801     is->cctx = is->rctx;
802   }
803   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
804   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
805   ierr = ISDestroy(&to);CHKERRQ(ierr);
806   ierr = ISDestroy(&from);CHKERRQ(ierr);
807   ierr = MatSetUp(A);CHKERRQ(ierr);
808   PetscFunctionReturn(0);
809 }
810 
811 #undef __FUNCT__
812 #define __FUNCT__ "MatSetValues_IS"
813 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
814 {
815   Mat_IS         *is = (Mat_IS*)mat->data;
816   PetscErrorCode ierr;
817 #if defined(PETSC_USE_DEBUG)
818   PetscInt       i,zm,zn;
819 #endif
820   PetscInt       rows_l[2048],cols_l[2048];
821 
822   PetscFunctionBegin;
823 #if defined(PETSC_USE_DEBUG)
824   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
825   /* count negative indices */
826   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
827   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
828 #endif
829   ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
830   ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
831 #if defined(PETSC_USE_DEBUG)
832   /* count negative indices (should be the same as before) */
833   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
834   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
835   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
836   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
837 #endif
838   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
839   PetscFunctionReturn(0);
840 }
841 
842 #undef __FUNCT__
843 #define __FUNCT__ "MatSetValuesBlocked_IS"
844 PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
845 {
846   Mat_IS         *is = (Mat_IS*)mat->data;
847   PetscErrorCode ierr;
848 #if defined(PETSC_USE_DEBUG)
849   PetscInt       i,zm,zn;
850 #endif
851   PetscInt       rows_l[2048],cols_l[2048];
852 
853   PetscFunctionBegin;
854 #if defined(PETSC_USE_DEBUG)
855   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
856   /* count negative indices */
857   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
858   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
859 #endif
860   ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
861   ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
862 #if defined(PETSC_USE_DEBUG)
863   /* count negative indices (should be the same as before) */
864   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
865   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
866   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
867   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
868 #endif
869   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
870   PetscFunctionReturn(0);
871 }
872 
873 #undef __FUNCT__
874 #define __FUNCT__ "MatSetValuesLocal_IS"
875 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
876 {
877   PetscErrorCode ierr;
878   Mat_IS         *is = (Mat_IS*)A->data;
879 
880   PetscFunctionBegin;
881   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
882   PetscFunctionReturn(0);
883 }
884 
885 #undef __FUNCT__
886 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
887 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
888 {
889   PetscErrorCode ierr;
890   Mat_IS         *is = (Mat_IS*)A->data;
891 
892   PetscFunctionBegin;
893   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
894   PetscFunctionReturn(0);
895 }
896 
897 #undef __FUNCT__
898 #define __FUNCT__ "MatZeroRows_IS"
899 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
900 {
901   Mat_IS         *matis = (Mat_IS*)A->data;
902   PetscInt       nr,nl,len,i;
903   PetscInt       *lrows;
904   PetscErrorCode ierr;
905 
906   PetscFunctionBegin;
907   /* get locally owned rows */
908   ierr = MatZeroRowsMapLocal_Private(A,n,rows,&len,&lrows);CHKERRQ(ierr);
909   /* fix right hand side if needed */
910   if (x && b) {
911     const PetscScalar *xx;
912     PetscScalar       *bb;
913 
914     ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr);
915     ierr = VecGetArray(b, &bb);CHKERRQ(ierr);
916     for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]];
917     ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr);
918     ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr);
919   }
920   /* get rows associated to the local matrices */
921   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
922     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
923   }
924   ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr);
925   ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr);
926   ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
927   for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1;
928   ierr = PetscFree(lrows);CHKERRQ(ierr);
929   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
930   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
931   ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr);
932   for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i;
933   ierr = MatISZeroRowsLocal_Private(A,nr,lrows,diag);CHKERRQ(ierr);
934   ierr = PetscFree(lrows);CHKERRQ(ierr);
935   PetscFunctionReturn(0);
936 }
937 
938 #undef __FUNCT__
939 #define __FUNCT__ "MatISZeroRowsLocal_Private"
940 static PetscErrorCode MatISZeroRowsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag)
941 {
942   Mat_IS         *is = (Mat_IS*)A->data;
943   PetscErrorCode ierr;
944 
945   PetscFunctionBegin;
946   if (diag != 0.) {
947     /*
948        Set up is->x as a "counting vector". This is in order to MatMult_IS
949        work properly in the interface nodes.
950     */
951     Vec counter;
952     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
953     ierr = VecSet(counter,0.);CHKERRQ(ierr);
954     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
955     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
956     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
957     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
958     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
959     ierr = VecDestroy(&counter);CHKERRQ(ierr);
960   }
961 
962   if (!n) {
963     is->pure_neumann = PETSC_TRUE;
964   } else {
965     PetscInt i;
966     is->pure_neumann = PETSC_FALSE;
967 
968     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
969     if (diag != 0.) {
970       const PetscScalar *array;
971       ierr = VecGetArrayRead(is->y,&array);CHKERRQ(ierr);
972       for (i=0; i<n; i++) {
973         ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
974       }
975       ierr = VecRestoreArrayRead(is->y,&array);CHKERRQ(ierr);
976     }
977     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
978     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
979   }
980   PetscFunctionReturn(0);
981 }
982 
983 #undef __FUNCT__
984 #define __FUNCT__ "MatAssemblyBegin_IS"
985 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
986 {
987   Mat_IS         *is = (Mat_IS*)A->data;
988   PetscErrorCode ierr;
989 
990   PetscFunctionBegin;
991   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
992   PetscFunctionReturn(0);
993 }
994 
995 #undef __FUNCT__
996 #define __FUNCT__ "MatAssemblyEnd_IS"
997 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
998 {
999   Mat_IS         *is = (Mat_IS*)A->data;
1000   PetscErrorCode ierr;
1001 
1002   PetscFunctionBegin;
1003   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
1004   PetscFunctionReturn(0);
1005 }
1006 
1007 #undef __FUNCT__
1008 #define __FUNCT__ "MatISGetLocalMat_IS"
1009 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
1010 {
1011   Mat_IS *is = (Mat_IS*)mat->data;
1012 
1013   PetscFunctionBegin;
1014   *local = is->A;
1015   PetscFunctionReturn(0);
1016 }
1017 
1018 #undef __FUNCT__
1019 #define __FUNCT__ "MatISGetLocalMat"
1020 /*@
1021     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
1022 
1023   Input Parameter:
1024 .  mat - the matrix
1025 
1026   Output Parameter:
1027 .  local - the local matrix
1028 
1029   Level: advanced
1030 
1031   Notes:
1032     This can be called if you have precomputed the nonzero structure of the
1033   matrix and want to provide it to the inner matrix object to improve the performance
1034   of the MatSetValues() operation.
1035 
1036 .seealso: MATIS
1037 @*/
1038 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
1039 {
1040   PetscErrorCode ierr;
1041 
1042   PetscFunctionBegin;
1043   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1044   PetscValidPointer(local,2);
1045   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
1046   PetscFunctionReturn(0);
1047 }
1048 
1049 #undef __FUNCT__
1050 #define __FUNCT__ "MatISSetLocalMat_IS"
1051 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
1052 {
1053   Mat_IS         *is = (Mat_IS*)mat->data;
1054   PetscInt       nrows,ncols,orows,ocols;
1055   PetscErrorCode ierr;
1056 
1057   PetscFunctionBegin;
1058   if (is->A) {
1059     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
1060     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
1061     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
1062   }
1063   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
1064   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
1065   is->A = local;
1066   PetscFunctionReturn(0);
1067 }
1068 
1069 #undef __FUNCT__
1070 #define __FUNCT__ "MatISSetLocalMat"
1071 /*@
1072     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
1073 
1074   Input Parameter:
1075 .  mat - the matrix
1076 .  local - the local matrix
1077 
1078   Output Parameter:
1079 
1080   Level: advanced
1081 
1082   Notes:
1083     This can be called if you have precomputed the local matrix and
1084   want to provide it to the matrix object MATIS.
1085 
1086 .seealso: MATIS
1087 @*/
1088 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
1089 {
1090   PetscErrorCode ierr;
1091 
1092   PetscFunctionBegin;
1093   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1094   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
1095   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
1096   PetscFunctionReturn(0);
1097 }
1098 
1099 #undef __FUNCT__
1100 #define __FUNCT__ "MatZeroEntries_IS"
1101 PetscErrorCode MatZeroEntries_IS(Mat A)
1102 {
1103   Mat_IS         *a = (Mat_IS*)A->data;
1104   PetscErrorCode ierr;
1105 
1106   PetscFunctionBegin;
1107   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
1108   PetscFunctionReturn(0);
1109 }
1110 
1111 #undef __FUNCT__
1112 #define __FUNCT__ "MatScale_IS"
1113 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
1114 {
1115   Mat_IS         *is = (Mat_IS*)A->data;
1116   PetscErrorCode ierr;
1117 
1118   PetscFunctionBegin;
1119   ierr = MatScale(is->A,a);CHKERRQ(ierr);
1120   PetscFunctionReturn(0);
1121 }
1122 
1123 #undef __FUNCT__
1124 #define __FUNCT__ "MatGetDiagonal_IS"
1125 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
1126 {
1127   Mat_IS         *is = (Mat_IS*)A->data;
1128   PetscErrorCode ierr;
1129 
1130   PetscFunctionBegin;
1131   /* get diagonal of the local matrix */
1132   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
1133 
1134   /* scatter diagonal back into global vector */
1135   ierr = VecSet(v,0);CHKERRQ(ierr);
1136   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1137   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1138   PetscFunctionReturn(0);
1139 }
1140 
1141 #undef __FUNCT__
1142 #define __FUNCT__ "MatSetOption_IS"
1143 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
1144 {
1145   Mat_IS         *a = (Mat_IS*)A->data;
1146   PetscErrorCode ierr;
1147 
1148   PetscFunctionBegin;
1149   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1150   PetscFunctionReturn(0);
1151 }
1152 
1153 #undef __FUNCT__
1154 #define __FUNCT__ "MatCreateIS"
1155 /*@
1156     MatCreateIS - Creates a "process" unassembled matrix, assembled on each
1157        process but not across processes.
1158 
1159    Input Parameters:
1160 +     comm    - MPI communicator that will share the matrix
1161 .     bs      - block size of the matrix
1162 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
1163 .     rmap    - local to global map for rows
1164 -     cmap    - local to global map for cols
1165 
1166    Output Parameter:
1167 .    A - the resulting matrix
1168 
1169    Level: advanced
1170 
1171    Notes: See MATIS for more details.
1172           m and n are NOT related to the size of the map; they are the size of the part of the vector owned
1173           by that process. The sizes of rmap and cmap define the size of the local matrices.
1174           If either rmap or cmap are NULL, then the matrix is assumed to be square.
1175 
1176 .seealso: MATIS, MatSetLocalToGlobalMapping()
1177 @*/
1178 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1179 {
1180   PetscErrorCode ierr;
1181 
1182   PetscFunctionBegin;
1183   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1184   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1185   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1186   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1187   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1188   if (rmap && cmap) {
1189     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1190   } else if (!rmap) {
1191     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1192   } else {
1193     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1194   }
1195   PetscFunctionReturn(0);
1196 }
1197 
1198 /*MC
1199    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1200    This stores the matrices in globally unassembled form. Each processor
1201    assembles only its local Neumann problem and the parallel matrix vector
1202    product is handled "implicitly".
1203 
1204    Operations Provided:
1205 +  MatMult()
1206 .  MatMultAdd()
1207 .  MatMultTranspose()
1208 .  MatMultTransposeAdd()
1209 .  MatZeroEntries()
1210 .  MatSetOption()
1211 .  MatZeroRows()
1212 .  MatSetValues()
1213 .  MatSetValuesBlocked()
1214 .  MatSetValuesLocal()
1215 .  MatSetValuesBlockedLocal()
1216 .  MatScale()
1217 .  MatGetDiagonal()
1218 .  MatMissingDiagonal()
1219 .  MatDuplicate()
1220 .  MatCopy()
1221 -  MatSetLocalToGlobalMapping()
1222 
1223    Options Database Keys:
1224 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1225 
1226    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1227 
1228           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1229 
1230           You can do matrix preallocation on the local matrix after you obtain it with
1231           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1232 
1233   Level: advanced
1234 
1235 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC, MatCreateIS()
1236 
1237 M*/
1238 
1239 #undef __FUNCT__
1240 #define __FUNCT__ "MatCreate_IS"
1241 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1242 {
1243   PetscErrorCode ierr;
1244   Mat_IS         *b;
1245 
1246   PetscFunctionBegin;
1247   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1248   A->data = (void*)b;
1249 
1250   /* matrix ops */
1251   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1252   A->ops->mult                    = MatMult_IS;
1253   A->ops->multadd                 = MatMultAdd_IS;
1254   A->ops->multtranspose           = MatMultTranspose_IS;
1255   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1256   A->ops->destroy                 = MatDestroy_IS;
1257   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1258   A->ops->setvalues               = MatSetValues_IS;
1259   A->ops->setvaluesblocked        = MatSetValuesBlocked_IS;
1260   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1261   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1262   A->ops->zerorows                = MatZeroRows_IS;
1263   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1264   A->ops->assemblyend             = MatAssemblyEnd_IS;
1265   A->ops->view                    = MatView_IS;
1266   A->ops->zeroentries             = MatZeroEntries_IS;
1267   A->ops->scale                   = MatScale_IS;
1268   A->ops->getdiagonal             = MatGetDiagonal_IS;
1269   A->ops->setoption               = MatSetOption_IS;
1270   A->ops->ishermitian             = MatIsHermitian_IS;
1271   A->ops->issymmetric             = MatIsSymmetric_IS;
1272   A->ops->duplicate               = MatDuplicate_IS;
1273   A->ops->missingdiagonal         = MatMissingDiagonal_IS;
1274   A->ops->copy                    = MatCopy_IS;
1275   A->ops->getlocalsubmatrix       = MatGetLocalSubMatrix_IS;
1276 
1277   /* special MATIS functions */
1278   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1279   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1280   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1281   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1282   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1283   PetscFunctionReturn(0);
1284 }
1285