xref: /petsc/src/mat/impls/is/matis.c (revision 0a40dfa3a44c361d47d0cfe46087c71944fd67f5)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4     This stores the matrices in globally unassembled form. Each processor
5     assembles only its local Neumann problem and the parallel matrix vector
6     product is handled "implicitly".
7 
8     Currently this allows for only one subdomain per processor.
9 */
10 
11 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
12 
13 static PetscErrorCode MatISComputeSF_Private(Mat);
14 static PetscErrorCode MatISZeroRowsLocal_Private(Mat,PetscInt,const PetscInt[],PetscScalar);
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "MatGetLocalSubMatrix_IS"
18 PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat)
19 {
20   Mat                    lA,lsubmat;
21   ISLocalToGlobalMapping rl2g,cl2g;
22   IS                     is,isn;
23   const PetscInt         *rg,*rl;
24 #if defined(PETSC_USE_DEBUG)
25   PetscInt               nrg;
26 #endif
27   PetscInt               N,M,nrl,i,*idxs;
28   PetscErrorCode         ierr;
29 
30   PetscFunctionBegin;
31   /* extract local submatrix (it will complain if row and col exceed the local sizes) */
32   ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr);
33   ierr = MatGetSubMatrix(lA,row,col,MAT_INITIAL_MATRIX,&lsubmat);CHKERRQ(ierr);
34   /* compute new l2g map for rows */
35   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
36   ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr);
37   ierr = ISGetIndices(row,&rl);CHKERRQ(ierr);
38 #if defined(PETSC_USE_DEBUG)
39   ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr);
40   for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %d -> %d greater than maximum possible %d\n",i,rl[i],nrg);
41 #endif
42   ierr = PetscMalloc1(nrl,&idxs);CHKERRQ(ierr);
43   for (i=0;i<nrl;i++) idxs[i] = rg[rl[i]];
44   ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr);
45   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
46   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrl,idxs,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
47   ierr = PetscFree(idxs);CHKERRQ(ierr);
48   ierr = ISRenumber(is,NULL,&M,&isn);CHKERRQ(ierr);
49   ierr = ISDestroy(&is);CHKERRQ(ierr);
50   ierr = ISLocalToGlobalMappingCreateIS(isn,&rl2g);CHKERRQ(ierr);
51   ierr = ISDestroy(&isn);CHKERRQ(ierr);
52   /* compute new l2g map for columns */
53   if (col != row || A->rmap->mapping != A->cmap->mapping) {
54     const PetscInt *cg,*cl;
55 #if defined(PETSC_USE_DEBUG)
56     PetscInt       ncg;
57 #endif
58     PetscInt       ncl;
59 
60     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
61     ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr);
62     ierr = ISGetIndices(col,&cl);CHKERRQ(ierr);
63 #if defined(PETSC_USE_DEBUG)
64     ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr);
65     for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %d -> %d greater than maximum possible %d\n",i,cl[i],ncg);
66 #endif
67     ierr = PetscMalloc1(ncl,&idxs);CHKERRQ(ierr);
68     for (i=0;i<ncl;i++) idxs[i] = cg[cl[i]];
69     ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr);
70     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
71     ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncl,idxs,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
72     ierr = PetscFree(idxs);CHKERRQ(ierr);
73     ierr = ISRenumber(is,NULL,&N,&isn);CHKERRQ(ierr);
74     ierr = ISDestroy(&is);CHKERRQ(ierr);
75     ierr = ISLocalToGlobalMappingCreateIS(isn,&cl2g);CHKERRQ(ierr);
76     ierr = ISDestroy(&isn);CHKERRQ(ierr);
77   } else {
78     ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr);
79     cl2g = rl2g;
80     N = M;
81   }
82   /* create the MATIS submatrix */
83   ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr);
84   ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
85   ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr);
86   ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr);
87   ierr = MatISSetLocalMat(*submat,lsubmat);CHKERRQ(ierr);
88   ierr = MatDestroy(&lsubmat);CHKERRQ(ierr);
89   ierr = MatSetUp(*submat);CHKERRQ(ierr);
90   ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
91   ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
92   ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
93   ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
94   PetscFunctionReturn(0);
95 }
96 
97 #undef __FUNCT__
98 #define __FUNCT__ "MatCopy_IS"
99 PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str)
100 {
101   Mat_IS         *a = (Mat_IS*)A->data,*b;
102   PetscBool      ismatis;
103   PetscErrorCode ierr;
104 
105   PetscFunctionBegin;
106   ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr);
107   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented");
108   b = (Mat_IS*)B->data;
109   ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
110   PetscFunctionReturn(0);
111 }
112 
113 #undef __FUNCT__
114 #define __FUNCT__ "MatMissingDiagonal_IS"
115 PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool  *missing,PetscInt *d)
116 {
117   Mat_IS         *a = (Mat_IS*)A->data;
118   PetscErrorCode ierr;
119 
120   PetscFunctionBegin;
121   if (d) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Need to be implemented");
122   ierr = MatMissingDiagonal(a->A,missing,NULL);CHKERRQ(ierr);
123   PetscFunctionReturn(0);
124 }
125 
126 #undef __FUNCT__
127 #define __FUNCT__ "MatISComputeSF_Private"
128 static PetscErrorCode MatISComputeSF_Private(Mat B)
129 {
130   Mat_IS         *matis = (Mat_IS*)(B->data);
131   const PetscInt *gidxs;
132   PetscErrorCode ierr;
133 
134   PetscFunctionBegin;
135   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
136   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
137   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
138   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
139   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
140   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
141   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
142   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
143   PetscFunctionReturn(0);
144 }
145 
146 #undef __FUNCT__
147 #define __FUNCT__ "MatISSetPreallocation"
148 /*@
149    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
150 
151    Collective on MPI_Comm
152 
153    Input Parameters:
154 +  B - the matrix
155 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
156            (same value is used for all local rows)
157 .  d_nnz - array containing the number of nonzeros in the various rows of the
158            DIAGONAL portion of the local submatrix (possibly different for each row)
159            or NULL, if d_nz is used to specify the nonzero structure.
160            The size of this array is equal to the number of local rows, i.e 'm'.
161            For matrices that will be factored, you must leave room for (and set)
162            the diagonal entry even if it is zero.
163 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
164            submatrix (same value is used for all local rows).
165 -  o_nnz - array containing the number of nonzeros in the various rows of the
166            OFF-DIAGONAL portion of the local submatrix (possibly different for
167            each row) or NULL, if o_nz is used to specify the nonzero
168            structure. The size of this array is equal to the number
169            of local rows, i.e 'm'.
170 
171    If the *_nnz parameter is given then the *_nz parameter is ignored
172 
173    Level: intermediate
174 
175    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
176           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
177           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
178 
179 .keywords: matrix
180 
181 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS
182 @*/
183 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
184 {
185   PetscErrorCode ierr;
186 
187   PetscFunctionBegin;
188   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
189   PetscValidType(B,1);
190   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
191   PetscFunctionReturn(0);
192 }
193 
194 #undef __FUNCT__
195 #define __FUNCT__ "MatISSetPreallocation_IS"
196 static PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
197 {
198   Mat_IS         *matis = (Mat_IS*)(B->data);
199   PetscInt       bs,i,nlocalcols;
200   PetscErrorCode ierr;
201 
202   PetscFunctionBegin;
203   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
204   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
205     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
206   }
207   if (!d_nnz) {
208     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
209   } else {
210     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
211   }
212   if (!o_nnz) {
213     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
214   } else {
215     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
216   }
217   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
218   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
219   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
220   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
221   for (i=0;i<matis->sf_nleaves;i++) {
222     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
223   }
224   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
225   for (i=0;i<matis->sf_nleaves/bs;i++) {
226     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
227   }
228   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
229   for (i=0;i<matis->sf_nleaves/bs;i++) {
230     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
231   }
232   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
233   PetscFunctionReturn(0);
234 }
235 
236 #undef __FUNCT__
237 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
238 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
239 {
240   Mat_IS          *matis = (Mat_IS*)(A->data);
241   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
242   const PetscInt  *global_indices_r,*global_indices_c;
243   PetscInt        i,j,bs,rows,cols;
244   PetscInt        lrows,lcols;
245   PetscInt        local_rows,local_cols;
246   PetscMPIInt     nsubdomains;
247   PetscBool       isdense,issbaij;
248   PetscErrorCode  ierr;
249 
250   PetscFunctionBegin;
251   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
252   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
253   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
254   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
255   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
256   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
257   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
258   if (A->rmap->mapping != A->cmap->mapping) {
259     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
260   } else {
261     global_indices_c = global_indices_r;
262   }
263 
264   if (issbaij) {
265     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
266   }
267   /*
268      An SF reduce is needed to sum up properly on shared rows.
269      Note that generally preallocation is not exact, since it overestimates nonzeros
270   */
271   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
272     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
273   }
274   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
275   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
276   /* All processes need to compute entire row ownership */
277   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
278   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
279   for (i=0;i<nsubdomains;i++) {
280     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
281       row_ownership[j] = i;
282     }
283   }
284   ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
285 
286   /*
287      my_dnz and my_onz contains exact contribution to preallocation from each local mat
288      then, they will be summed up properly. This way, preallocation is always sufficient
289   */
290   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
291   /* preallocation as a MATAIJ */
292   if (isdense) { /* special case for dense local matrices */
293     for (i=0;i<local_rows;i++) {
294       PetscInt index_row = global_indices_r[i];
295       for (j=i;j<local_rows;j++) {
296         PetscInt owner = row_ownership[index_row];
297         PetscInt index_col = global_indices_c[j];
298         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
299           my_dnz[i] += 1;
300         } else { /* offdiag block */
301           my_onz[i] += 1;
302         }
303         /* same as before, interchanging rows and cols */
304         if (i != j) {
305           owner = row_ownership[index_col];
306           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
307             my_dnz[j] += 1;
308           } else {
309             my_onz[j] += 1;
310           }
311         }
312       }
313     }
314   } else { /* TODO: this could be optimized using MatGetRowIJ */
315     for (i=0;i<local_rows;i++) {
316       const PetscInt *cols;
317       PetscInt       ncols,index_row = global_indices_r[i];
318       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
319       for (j=0;j<ncols;j++) {
320         PetscInt owner = row_ownership[index_row];
321         PetscInt index_col = global_indices_c[cols[j]];
322         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
323           my_dnz[i] += 1;
324         } else { /* offdiag block */
325           my_onz[i] += 1;
326         }
327         /* same as before, interchanging rows and cols */
328         if (issbaij && index_col != index_row) {
329           owner = row_ownership[index_col];
330           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
331             my_dnz[cols[j]] += 1;
332           } else {
333             my_onz[cols[j]] += 1;
334           }
335         }
336       }
337       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
338     }
339   }
340   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
341   if (global_indices_c != global_indices_r) {
342     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
343   }
344   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
345 
346   /* Reduce my_dnz and my_onz */
347   if (maxreduce) {
348     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
349     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
350     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
351     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
352   } else {
353     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
354     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
355     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
356     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
357   }
358   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
359 
360   /* Resize preallocation if overestimated */
361   for (i=0;i<lrows;i++) {
362     dnz[i] = PetscMin(dnz[i],lcols);
363     onz[i] = PetscMin(onz[i],cols-lcols);
364   }
365   /* set preallocation */
366   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
367   for (i=0;i<lrows/bs;i++) {
368     dnz[i] = dnz[i*bs]/bs;
369     onz[i] = onz[i*bs]/bs;
370   }
371   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
372   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
373   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
374   if (issbaij) {
375     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
376   }
377   PetscFunctionReturn(0);
378 }
379 
380 #undef __FUNCT__
381 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
382 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
383 {
384   Mat_IS         *matis = (Mat_IS*)(mat->data);
385   Mat            local_mat;
386   /* info on mat */
387   PetscInt       bs,rows,cols,lrows,lcols;
388   PetscInt       local_rows,local_cols;
389   PetscBool      isdense,issbaij,isseqaij;
390   PetscMPIInt    nsubdomains;
391   /* values insertion */
392   PetscScalar    *array;
393   /* work */
394   PetscErrorCode ierr;
395 
396   PetscFunctionBegin;
397   /* get info from mat */
398   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
399   if (nsubdomains == 1) {
400     if (reuse == MAT_INITIAL_MATRIX) {
401       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
402     } else {
403       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
404     }
405     PetscFunctionReturn(0);
406   }
407   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
408   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
409   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
410   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
411   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
412   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
413   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
414 
415   if (reuse == MAT_INITIAL_MATRIX) {
416     MatType     new_mat_type;
417     PetscBool   issbaij_red;
418 
419     /* determining new matrix type */
420     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
421     if (issbaij_red) {
422       new_mat_type = MATSBAIJ;
423     } else {
424       if (bs>1) {
425         new_mat_type = MATBAIJ;
426       } else {
427         new_mat_type = MATAIJ;
428       }
429     }
430 
431     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
432     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
433     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
434     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
435     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
436   } else {
437     PetscInt mbs,mrows,mcols,mlrows,mlcols;
438     /* some checks */
439     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
440     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
441     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
442     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
443     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
444     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
445     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
446     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
447     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
448   }
449 
450   if (issbaij) {
451     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
452   } else {
453     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
454     local_mat = matis->A;
455   }
456 
457   /* Set values */
458   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
459   if (isdense) { /* special case for dense local matrices */
460     PetscInt i,*dummy_rows,*dummy_cols;
461 
462     if (local_rows != local_cols) {
463       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
464       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
465       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
466     } else {
467       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
468       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
469       dummy_cols = dummy_rows;
470     }
471     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
472     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
473     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
474     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
475     if (dummy_rows != dummy_cols) {
476       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
477     } else {
478       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
479     }
480   } else if (isseqaij) {
481     PetscInt  i,nvtxs,*xadj,*adjncy;
482     PetscBool done;
483 
484     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
485     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
486     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
487     for (i=0;i<nvtxs;i++) {
488       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
489     }
490     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
491     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
492     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
493   } else { /* very basic values insertion for all other matrix types */
494     PetscInt i;
495 
496     for (i=0;i<local_rows;i++) {
497       PetscInt       j;
498       const PetscInt *local_indices_cols;
499 
500       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
501       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
502       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
503     }
504   }
505   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
506   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
507   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
508   if (isdense) {
509     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
510   }
511   PetscFunctionReturn(0);
512 }
513 
514 #undef __FUNCT__
515 #define __FUNCT__ "MatISGetMPIXAIJ"
516 /*@
517     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
518 
519   Input Parameter:
520 .  mat - the matrix (should be of type MATIS)
521 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
522 
523   Output Parameter:
524 .  newmat - the matrix in AIJ format
525 
526   Level: developer
527 
528   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
529 
530 .seealso: MATIS
531 @*/
532 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
533 {
534   PetscErrorCode ierr;
535 
536   PetscFunctionBegin;
537   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
538   PetscValidLogicalCollectiveEnum(mat,reuse,2);
539   PetscValidPointer(newmat,3);
540   if (reuse != MAT_INITIAL_MATRIX) {
541     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
542     PetscCheckSameComm(mat,1,*newmat,3);
543     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
544   }
545   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
546   PetscFunctionReturn(0);
547 }
548 
549 #undef __FUNCT__
550 #define __FUNCT__ "MatDuplicate_IS"
551 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
552 {
553   PetscErrorCode ierr;
554   Mat_IS         *matis = (Mat_IS*)(mat->data);
555   PetscInt       bs,m,n,M,N;
556   Mat            B,localmat;
557 
558   PetscFunctionBegin;
559   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
560   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
561   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
562   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
563   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
564   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
565   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
566   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
567   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
568   *newmat = B;
569   PetscFunctionReturn(0);
570 }
571 
572 #undef __FUNCT__
573 #define __FUNCT__ "MatIsHermitian_IS"
574 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
575 {
576   PetscErrorCode ierr;
577   Mat_IS         *matis = (Mat_IS*)A->data;
578   PetscBool      local_sym;
579 
580   PetscFunctionBegin;
581   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
582   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
583   PetscFunctionReturn(0);
584 }
585 
586 #undef __FUNCT__
587 #define __FUNCT__ "MatIsSymmetric_IS"
588 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
589 {
590   PetscErrorCode ierr;
591   Mat_IS         *matis = (Mat_IS*)A->data;
592   PetscBool      local_sym;
593 
594   PetscFunctionBegin;
595   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
596   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
597   PetscFunctionReturn(0);
598 }
599 
600 #undef __FUNCT__
601 #define __FUNCT__ "MatDestroy_IS"
602 PetscErrorCode MatDestroy_IS(Mat A)
603 {
604   PetscErrorCode ierr;
605   Mat_IS         *b = (Mat_IS*)A->data;
606 
607   PetscFunctionBegin;
608   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
609   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
610   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
611   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
612   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
613   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
614   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
615   ierr = PetscFree(A->data);CHKERRQ(ierr);
616   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
617   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
618   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
619   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
620   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
621   PetscFunctionReturn(0);
622 }
623 
624 #undef __FUNCT__
625 #define __FUNCT__ "MatMult_IS"
626 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
627 {
628   PetscErrorCode ierr;
629   Mat_IS         *is  = (Mat_IS*)A->data;
630   PetscScalar    zero = 0.0;
631 
632   PetscFunctionBegin;
633   /*  scatter the global vector x into the local work vector */
634   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
635   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
636 
637   /* multiply the local matrix */
638   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
639 
640   /* scatter product back into global memory */
641   ierr = VecSet(y,zero);CHKERRQ(ierr);
642   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
643   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
644   PetscFunctionReturn(0);
645 }
646 
647 #undef __FUNCT__
648 #define __FUNCT__ "MatMultAdd_IS"
649 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
650 {
651   Vec            temp_vec;
652   PetscErrorCode ierr;
653 
654   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
655   if (v3 != v2) {
656     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
657     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
658   } else {
659     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
660     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
661     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
662     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
663     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
664   }
665   PetscFunctionReturn(0);
666 }
667 
668 #undef __FUNCT__
669 #define __FUNCT__ "MatMultTranspose_IS"
670 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
671 {
672   Mat_IS         *is = (Mat_IS*)A->data;
673   PetscErrorCode ierr;
674 
675   PetscFunctionBegin;
676   /*  scatter the global vector x into the local work vector */
677   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
678   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
679 
680   /* multiply the local matrix */
681   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
682 
683   /* scatter product back into global vector */
684   ierr = VecSet(x,0);CHKERRQ(ierr);
685   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
686   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
687   PetscFunctionReturn(0);
688 }
689 
690 #undef __FUNCT__
691 #define __FUNCT__ "MatMultTransposeAdd_IS"
692 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
693 {
694   Vec            temp_vec;
695   PetscErrorCode ierr;
696 
697   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
698   if (v3 != v2) {
699     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
700     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
701   } else {
702     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
703     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
704     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
705     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
706     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
707   }
708   PetscFunctionReturn(0);
709 }
710 
711 #undef __FUNCT__
712 #define __FUNCT__ "MatView_IS"
713 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
714 {
715   Mat_IS         *a = (Mat_IS*)A->data;
716   PetscErrorCode ierr;
717   PetscViewer    sviewer;
718 
719   PetscFunctionBegin;
720   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
721   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
722   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
723   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
724   PetscFunctionReturn(0);
725 }
726 
727 #undef __FUNCT__
728 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
729 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
730 {
731   PetscErrorCode ierr;
732   PetscInt       nr,rbs,nc,cbs;
733   Mat_IS         *is = (Mat_IS*)A->data;
734   IS             from,to;
735   Vec            cglobal,rglobal;
736 
737   PetscFunctionBegin;
738   PetscCheckSameComm(A,1,rmapping,2);
739   PetscCheckSameComm(A,1,cmapping,3);
740   /* Destroy any previous data */
741   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
742   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
743   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
744   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
745   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
746   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
747   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
748 
749   /* Setup Layout and set local to global maps */
750   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
751   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
752   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
753   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
754 
755   /* Create the local matrix A */
756   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
757   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
758   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
759   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
760   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
761   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
762   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
763   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
764   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
765   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
766   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
767 
768   /* Create the local work vectors */
769   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
770 
771   /* setup the global to local scatters */
772   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
773   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
774   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
775   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
776   if (rmapping != cmapping) {
777     ierr = ISDestroy(&to);CHKERRQ(ierr);
778     ierr = ISDestroy(&from);CHKERRQ(ierr);
779     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
780     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
781     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
782   } else {
783     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
784     is->cctx = is->rctx;
785   }
786   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
787   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
788   ierr = ISDestroy(&to);CHKERRQ(ierr);
789   ierr = ISDestroy(&from);CHKERRQ(ierr);
790   ierr = MatSetUp(A);CHKERRQ(ierr);
791   PetscFunctionReturn(0);
792 }
793 
794 #undef __FUNCT__
795 #define __FUNCT__ "MatSetValues_IS"
796 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
797 {
798   Mat_IS         *is = (Mat_IS*)mat->data;
799   PetscErrorCode ierr;
800 #if defined(PETSC_USE_DEBUG)
801   PetscInt       i,zm,zn;
802 #endif
803   PetscInt       rows_l[2048],cols_l[2048];
804 
805   PetscFunctionBegin;
806 #if defined(PETSC_USE_DEBUG)
807   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
808   /* count negative indices */
809   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
810   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
811 #endif
812   ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
813   ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
814 #if defined(PETSC_USE_DEBUG)
815   /* count negative indices (should be the same as before) */
816   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
817   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
818   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
819   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
820 #endif
821   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
822   PetscFunctionReturn(0);
823 }
824 
825 #undef __FUNCT__
826 #define __FUNCT__ "MatSetValuesBlocked_IS"
827 PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
828 {
829   Mat_IS         *is = (Mat_IS*)mat->data;
830   PetscErrorCode ierr;
831 #if defined(PETSC_USE_DEBUG)
832   PetscInt       i,zm,zn;
833 #endif
834   PetscInt       rows_l[2048],cols_l[2048];
835 
836   PetscFunctionBegin;
837 #if defined(PETSC_USE_DEBUG)
838   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
839   /* count negative indices */
840   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
841   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
842 #endif
843   ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
844   ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
845 #if defined(PETSC_USE_DEBUG)
846   /* count negative indices (should be the same as before) */
847   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
848   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
849   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
850   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
851 #endif
852   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
853   PetscFunctionReturn(0);
854 }
855 
856 #undef __FUNCT__
857 #define __FUNCT__ "MatSetValuesLocal_IS"
858 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
859 {
860   PetscErrorCode ierr;
861   Mat_IS         *is = (Mat_IS*)A->data;
862 
863   PetscFunctionBegin;
864   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
865   PetscFunctionReturn(0);
866 }
867 
868 #undef __FUNCT__
869 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
870 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
871 {
872   PetscErrorCode ierr;
873   Mat_IS         *is = (Mat_IS*)A->data;
874 
875   PetscFunctionBegin;
876   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
877   PetscFunctionReturn(0);
878 }
879 
880 #undef __FUNCT__
881 #define __FUNCT__ "MatZeroRows_IS"
882 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
883 {
884   Mat_IS         *matis = (Mat_IS*)A->data;
885   PetscInt       nr,nl,len,i;
886   PetscInt       *lrows;
887   PetscErrorCode ierr;
888 
889   PetscFunctionBegin;
890   /* get locally owned rows */
891   ierr = MatZeroRowsMapLocal_Private(A,n,rows,&len,&lrows);CHKERRQ(ierr);
892   /* fix right hand side if needed */
893   if (x && b) {
894     const PetscScalar *xx;
895     PetscScalar       *bb;
896 
897     ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr);
898     ierr = VecGetArray(b, &bb);CHKERRQ(ierr);
899     for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]];
900     ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr);
901     ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr);
902   }
903   /* get rows associated to the local matrices */
904   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
905     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
906   }
907   ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr);
908   ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr);
909   ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
910   for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1;
911   ierr = PetscFree(lrows);CHKERRQ(ierr);
912   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
913   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
914   ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr);
915   for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i;
916   ierr = MatISZeroRowsLocal_Private(A,nr,lrows,diag);CHKERRQ(ierr);
917   ierr = PetscFree(lrows);CHKERRQ(ierr);
918   PetscFunctionReturn(0);
919 }
920 
921 #undef __FUNCT__
922 #define __FUNCT__ "MatISZeroRowsLocal_Private"
923 static PetscErrorCode MatISZeroRowsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag)
924 {
925   Mat_IS         *is = (Mat_IS*)A->data;
926   PetscErrorCode ierr;
927 
928   PetscFunctionBegin;
929   if (diag != 0.) {
930     /*
931        Set up is->x as a "counting vector". This is in order to MatMult_IS
932        work properly in the interface nodes.
933     */
934     Vec counter;
935     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
936     ierr = VecSet(counter,0.);CHKERRQ(ierr);
937     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
938     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
939     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
940     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
941     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
942     ierr = VecDestroy(&counter);CHKERRQ(ierr);
943   }
944 
945   if (!n) {
946     is->pure_neumann = PETSC_TRUE;
947   } else {
948     PetscInt i;
949     is->pure_neumann = PETSC_FALSE;
950 
951     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
952     if (diag != 0.) {
953       const PetscScalar *array;
954       ierr = VecGetArrayRead(is->y,&array);CHKERRQ(ierr);
955       for (i=0; i<n; i++) {
956         ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
957       }
958       ierr = VecRestoreArrayRead(is->y,&array);CHKERRQ(ierr);
959     }
960     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
961     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
962   }
963   PetscFunctionReturn(0);
964 }
965 
966 #undef __FUNCT__
967 #define __FUNCT__ "MatAssemblyBegin_IS"
968 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
969 {
970   Mat_IS         *is = (Mat_IS*)A->data;
971   PetscErrorCode ierr;
972 
973   PetscFunctionBegin;
974   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
975   PetscFunctionReturn(0);
976 }
977 
978 #undef __FUNCT__
979 #define __FUNCT__ "MatAssemblyEnd_IS"
980 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
981 {
982   Mat_IS         *is = (Mat_IS*)A->data;
983   PetscErrorCode ierr;
984 
985   PetscFunctionBegin;
986   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
987   PetscFunctionReturn(0);
988 }
989 
990 #undef __FUNCT__
991 #define __FUNCT__ "MatISGetLocalMat_IS"
992 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
993 {
994   Mat_IS *is = (Mat_IS*)mat->data;
995 
996   PetscFunctionBegin;
997   *local = is->A;
998   PetscFunctionReturn(0);
999 }
1000 
1001 #undef __FUNCT__
1002 #define __FUNCT__ "MatISGetLocalMat"
1003 /*@
1004     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
1005 
1006   Input Parameter:
1007 .  mat - the matrix
1008 
1009   Output Parameter:
1010 .  local - the local matrix
1011 
1012   Level: advanced
1013 
1014   Notes:
1015     This can be called if you have precomputed the nonzero structure of the
1016   matrix and want to provide it to the inner matrix object to improve the performance
1017   of the MatSetValues() operation.
1018 
1019 .seealso: MATIS
1020 @*/
1021 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
1022 {
1023   PetscErrorCode ierr;
1024 
1025   PetscFunctionBegin;
1026   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1027   PetscValidPointer(local,2);
1028   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
1029   PetscFunctionReturn(0);
1030 }
1031 
1032 #undef __FUNCT__
1033 #define __FUNCT__ "MatISSetLocalMat_IS"
1034 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
1035 {
1036   Mat_IS         *is = (Mat_IS*)mat->data;
1037   PetscInt       nrows,ncols,orows,ocols;
1038   PetscErrorCode ierr;
1039 
1040   PetscFunctionBegin;
1041   if (is->A) {
1042     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
1043     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
1044     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
1045   }
1046   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
1047   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
1048   is->A = local;
1049   PetscFunctionReturn(0);
1050 }
1051 
1052 #undef __FUNCT__
1053 #define __FUNCT__ "MatISSetLocalMat"
1054 /*@
1055     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
1056 
1057   Input Parameter:
1058 .  mat - the matrix
1059 .  local - the local matrix
1060 
1061   Output Parameter:
1062 
1063   Level: advanced
1064 
1065   Notes:
1066     This can be called if you have precomputed the local matrix and
1067   want to provide it to the matrix object MATIS.
1068 
1069 .seealso: MATIS
1070 @*/
1071 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
1072 {
1073   PetscErrorCode ierr;
1074 
1075   PetscFunctionBegin;
1076   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1077   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
1078   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
1079   PetscFunctionReturn(0);
1080 }
1081 
1082 #undef __FUNCT__
1083 #define __FUNCT__ "MatZeroEntries_IS"
1084 PetscErrorCode MatZeroEntries_IS(Mat A)
1085 {
1086   Mat_IS         *a = (Mat_IS*)A->data;
1087   PetscErrorCode ierr;
1088 
1089   PetscFunctionBegin;
1090   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
1091   PetscFunctionReturn(0);
1092 }
1093 
1094 #undef __FUNCT__
1095 #define __FUNCT__ "MatScale_IS"
1096 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
1097 {
1098   Mat_IS         *is = (Mat_IS*)A->data;
1099   PetscErrorCode ierr;
1100 
1101   PetscFunctionBegin;
1102   ierr = MatScale(is->A,a);CHKERRQ(ierr);
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 #undef __FUNCT__
1107 #define __FUNCT__ "MatGetDiagonal_IS"
1108 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
1109 {
1110   Mat_IS         *is = (Mat_IS*)A->data;
1111   PetscErrorCode ierr;
1112 
1113   PetscFunctionBegin;
1114   /* get diagonal of the local matrix */
1115   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
1116 
1117   /* scatter diagonal back into global vector */
1118   ierr = VecSet(v,0);CHKERRQ(ierr);
1119   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1120   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1121   PetscFunctionReturn(0);
1122 }
1123 
1124 #undef __FUNCT__
1125 #define __FUNCT__ "MatSetOption_IS"
1126 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
1127 {
1128   Mat_IS         *a = (Mat_IS*)A->data;
1129   PetscErrorCode ierr;
1130 
1131   PetscFunctionBegin;
1132   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1133   PetscFunctionReturn(0);
1134 }
1135 
1136 #undef __FUNCT__
1137 #define __FUNCT__ "MatCreateIS"
1138 /*@
1139     MatCreateIS - Creates a "process" unassembled matrix, assembled on each
1140        process but not across processes.
1141 
1142    Input Parameters:
1143 +     comm    - MPI communicator that will share the matrix
1144 .     bs      - block size of the matrix
1145 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
1146 .     rmap    - local to global map for rows
1147 -     cmap    - local to global map for cols
1148 
1149    Output Parameter:
1150 .    A - the resulting matrix
1151 
1152    Level: advanced
1153 
1154    Notes: See MATIS for more details.
1155           m and n are NOT related to the size of the map; they are the size of the part of the vector owned
1156           by that process. The sizes of rmap and cmap define the size of the local matrices.
1157           If either rmap or cmap are NULL, then the matrix is assumed to be square.
1158 
1159 .seealso: MATIS, MatSetLocalToGlobalMapping()
1160 @*/
1161 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1162 {
1163   PetscErrorCode ierr;
1164 
1165   PetscFunctionBegin;
1166   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1167   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1168   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1169   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1170   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1171   if (rmap && cmap) {
1172     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1173   } else if (!rmap) {
1174     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1175   } else {
1176     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1177   }
1178   PetscFunctionReturn(0);
1179 }
1180 
1181 /*MC
1182    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1183    This stores the matrices in globally unassembled form. Each processor
1184    assembles only its local Neumann problem and the parallel matrix vector
1185    product is handled "implicitly".
1186 
1187    Operations Provided:
1188 +  MatMult()
1189 .  MatMultAdd()
1190 .  MatMultTranspose()
1191 .  MatMultTransposeAdd()
1192 .  MatZeroEntries()
1193 .  MatSetOption()
1194 .  MatZeroRows()
1195 .  MatSetValues()
1196 .  MatSetValuesBlocked()
1197 .  MatSetValuesLocal()
1198 .  MatSetValuesBlockedLocal()
1199 .  MatScale()
1200 .  MatGetDiagonal()
1201 .  MatMissingDiagonal()
1202 .  MatDuplicate()
1203 .  MatCopy()
1204 -  MatSetLocalToGlobalMapping()
1205 
1206    Options Database Keys:
1207 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1208 
1209    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1210 
1211           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1212 
1213           You can do matrix preallocation on the local matrix after you obtain it with
1214           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1215 
1216   Level: advanced
1217 
1218 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC, MatCreateIS()
1219 
1220 M*/
1221 
1222 #undef __FUNCT__
1223 #define __FUNCT__ "MatCreate_IS"
1224 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1225 {
1226   PetscErrorCode ierr;
1227   Mat_IS         *b;
1228 
1229   PetscFunctionBegin;
1230   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1231   A->data = (void*)b;
1232 
1233   /* matrix ops */
1234   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1235   A->ops->mult                    = MatMult_IS;
1236   A->ops->multadd                 = MatMultAdd_IS;
1237   A->ops->multtranspose           = MatMultTranspose_IS;
1238   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1239   A->ops->destroy                 = MatDestroy_IS;
1240   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1241   A->ops->setvalues               = MatSetValues_IS;
1242   A->ops->setvaluesblocked        = MatSetValuesBlocked_IS;
1243   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1244   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1245   A->ops->zerorows                = MatZeroRows_IS;
1246   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1247   A->ops->assemblyend             = MatAssemblyEnd_IS;
1248   A->ops->view                    = MatView_IS;
1249   A->ops->zeroentries             = MatZeroEntries_IS;
1250   A->ops->scale                   = MatScale_IS;
1251   A->ops->getdiagonal             = MatGetDiagonal_IS;
1252   A->ops->setoption               = MatSetOption_IS;
1253   A->ops->ishermitian             = MatIsHermitian_IS;
1254   A->ops->issymmetric             = MatIsSymmetric_IS;
1255   A->ops->duplicate               = MatDuplicate_IS;
1256   A->ops->missingdiagonal         = MatMissingDiagonal_IS;
1257   A->ops->copy                    = MatCopy_IS;
1258   A->ops->getlocalsubmatrix       = MatGetLocalSubMatrix_IS;
1259 
1260   /* special MATIS functions */
1261   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1262   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1263   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1264   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1265   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1266   PetscFunctionReturn(0);
1267 }
1268