xref: /petsc/src/mat/impls/is/matis.c (revision 6989cf23719da283ffb2f2b046f65cb16d97424c)
1 /*
2     Creates a matrix class for using the Neumann-Neumann type preconditioners.
3     This stores the matrices in globally unassembled form. Each processor
4     assembles only its local Neumann problem and the parallel matrix vector
5     product is handled "implicitly".
6 
7     Currently this allows for only one subdomain per processor.
8 */
9 
10 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
11 #include <../src/mat/impls/aij/mpi/mpiaij.h>
12 #include <petsc/private/sfimpl.h>
13 
14 #define MATIS_MAX_ENTRIES_INSERTION 2048
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "MatISContainerDestroy_Private"
18 static PetscErrorCode MatISContainerDestroy_Private(void *ptr)
19 {
20   PetscErrorCode ierr;
21 
22   PetscFunctionBeginUser;
23   ierr = PetscFree(ptr); CHKERRQ(ierr);
24   PetscFunctionReturn(0);
25 }
26 
27 #undef __FUNCT__
28 #define __FUNCT__ "MatConvert_MPIAIJ_IS"
29 PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_IS(Mat A,MatType type,MatReuse reuse,Mat *newmat)
30 {
31   Mat_MPIAIJ             *aij  = (Mat_MPIAIJ*)A->data;
32   Mat_SeqAIJ             *diag = (Mat_SeqAIJ*)(aij->A->data);
33   Mat_SeqAIJ             *offd = (Mat_SeqAIJ*)(aij->B->data);
34   Mat                    lA;
35   ISLocalToGlobalMapping rl2g,cl2g;
36   IS                     is;
37   MPI_Comm               comm;
38   void                   *ptrs[2];
39   const char             *names[2] = {"_convert_csr_aux","_convert_csr_data"};
40   PetscScalar            *dd,*od,*aa,*data;
41   PetscInt               *di,*dj,*oi,*oj;
42   PetscInt               *aux,*ii,*jj;
43   PetscInt               dr,dc,oc,str,stc,nnz,i,jd,jo,cum;
44   PetscErrorCode         ierr;
45 
46   PetscFunctionBeginUser;
47   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
48   if (!aij->garray) SETERRQ(comm,PETSC_ERR_SUP,"garray not present");
49 
50   /* access relevant information from MPIAIJ */
51   ierr = MatGetOwnershipRange(A,&str,NULL);CHKERRQ(ierr);
52   ierr = MatGetOwnershipRangeColumn(A,&stc,NULL);CHKERRQ(ierr);
53   ierr = MatGetLocalSize(A,&dr,&dc);CHKERRQ(ierr);
54   di   = diag->i;
55   dj   = diag->j;
56   dd   = diag->a;
57   oc   = aij->B->cmap->n;
58   oi   = offd->i;
59   oj   = offd->j;
60   od   = offd->a;
61   nnz  = diag->i[dr] + offd->i[dr];
62 
63   /* generate l2g maps for rows and cols */
64   ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr);
65   ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
66   ierr = ISDestroy(&is);CHKERRQ(ierr);
67   ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr);
68   for (i=0; i<dc; i++) aux[i]    = i+stc;
69   for (i=0; i<oc; i++) aux[i+dc] = aij->garray[i];
70   ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
71   ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
72   ierr = ISDestroy(&is);CHKERRQ(ierr);
73 
74   /* create MATIS object */
75   ierr = MatCreate(comm,newmat);CHKERRQ(ierr);
76   ierr = MatSetSizes(*newmat,dr,dc,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
77   ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr);
78   ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr);
79   ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
80   ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
81 
82   /* merge local matrices */
83   ierr = PetscMalloc1(nnz+dr+1,&aux);CHKERRQ(ierr);
84   ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr);
85   ii   = aux;
86   jj   = aux+dr+1;
87   aa   = data;
88   *ii  = *(di++) + *(oi++);
89   for (jd=0,jo=0,cum=0;*ii<nnz;cum++)
90   {
91      for (;jd<*di;jd++) { *jj++ = *dj++;      *aa++ = *dd++; }
92      for (;jo<*oi;jo++) { *jj++ = *oj++ + dc; *aa++ = *od++; }
93      *(++ii) = *(di++) + *(oi++);
94   }
95   for (;cum<dr;cum++) *(++ii) = nnz;
96   ii   = aux;
97   jj   = aux+dr+1;
98   aa   = data;
99   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);CHKERRQ(ierr);
100 
101   /* create containers to destroy the data */
102   ptrs[0] = aux;
103   ptrs[1] = data;
104   for (i=0; i<2; i++) {
105     PetscContainer c;
106 
107     ierr = PetscContainerCreate(PETSC_COMM_SELF,&c);CHKERRQ(ierr);
108     ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr);
109     ierr = PetscContainerSetUserDestroy(c,MatISContainerDestroy_Private);CHKERRQ(ierr);
110     ierr = PetscObjectCompose((PetscObject)lA,names[i],(PetscObject)c);CHKERRQ(ierr);
111     ierr = PetscContainerDestroy(&c);CHKERRQ(ierr);
112   }
113 
114   /* finalize matrix */
115   ierr = MatISSetLocalMat(*newmat,lA);CHKERRQ(ierr);
116   ierr = MatDestroy(&lA);CHKERRQ(ierr);
117   ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
118   ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "MatISSetUpSF"
124 /*@
125    MatISSetUpSF - Setup star forest objects used by MatIS.
126 
127    Collective on MPI_Comm
128 
129    Input Parameters:
130 +  A - the matrix
131 
132    Level: advanced
133 
134    Notes: This function does not need to be called by the user.
135 
136 .keywords: matrix
137 
138 .seealso: MatCreate(), MatCreateIS(), MatISSetPreallocation(), MatISGetLocalMat()
139 @*/
140 PetscErrorCode  MatISSetUpSF(Mat A)
141 {
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
146   PetscValidType(A,1);
147   ierr = PetscTryMethod(A,"MatISSetUpSF_C",(Mat),(A));CHKERRQ(ierr);
148   PetscFunctionReturn(0);
149 }
150 
151 #undef __FUNCT__
152 #define __FUNCT__ "MatDiagonalSet_IS"
153 PetscErrorCode  MatDiagonalSet_IS(Mat A,Vec D,InsertMode insmode)
154 {
155   Mat_IS         *is = (Mat_IS*)A->data;
156   PetscErrorCode ierr;
157 
158   PetscFunctionBegin;
159   if (!D) { /* this code branch is used by MatShift_IS */
160     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
161   } else {
162     ierr = VecScatterBegin(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
163     ierr = VecScatterEnd(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
164   }
165   ierr = VecPointwiseDivide(is->y,is->y,is->counter);CHKERRQ(ierr);
166   ierr = MatDiagonalSet(is->A,is->y,insmode);CHKERRQ(ierr);
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "MatShift_IS"
172 PetscErrorCode  MatShift_IS(Mat A,PetscScalar a)
173 {
174   PetscErrorCode ierr;
175 
176   PetscFunctionBegin;
177   ierr = MatDiagonalSet_IS(A,NULL,ADD_VALUES);CHKERRQ(ierr);
178   PetscFunctionReturn(0);
179 }
180 
181 #undef __FUNCT__
182 #define __FUNCT__ "MatSetValuesLocal_SubMat_IS"
183 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
184 {
185   PetscErrorCode ierr;
186   Mat_IS         *is = (Mat_IS*)A->data;
187   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
188 
189   PetscFunctionBegin;
190 #if defined(PETSC_USE_DEBUG)
191   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
192 #endif
193   ierr = ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
194   ierr = ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
195   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "MatSetValuesBlockedLocal_SubMat_IS"
201 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
202 {
203   PetscErrorCode ierr;
204   Mat_IS         *is = (Mat_IS*)A->data;
205   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
206 
207   PetscFunctionBegin;
208 #if defined(PETSC_USE_DEBUG)
209   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
210 #endif
211   ierr = ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
212   ierr = ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
213   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
214   PetscFunctionReturn(0);
215 }
216 
217 #undef __FUNCT__
218 #define __FUNCT__ "PetscLayoutMapLocal_Private"
219 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
220 {
221   PetscInt      *owners = map->range;
222   PetscInt       n      = map->n;
223   PetscSF        sf;
224   PetscInt      *lidxs,*work = NULL;
225   PetscSFNode   *ridxs;
226   PetscMPIInt    rank;
227   PetscInt       r, p = 0, len = 0;
228   PetscErrorCode ierr;
229 
230   PetscFunctionBegin;
231   /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */
232   ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr);
233   ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr);
234   for (r = 0; r < n; ++r) lidxs[r] = -1;
235   ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr);
236   for (r = 0; r < N; ++r) {
237     const PetscInt idx = idxs[r];
238     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
239     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
240       ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr);
241     }
242     ridxs[r].rank = p;
243     ridxs[r].index = idxs[r] - owners[p];
244   }
245   ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr);
246   ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr);
247   ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
248   ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
249   if (ogidxs) { /* communicate global idxs */
250     PetscInt cum = 0,start,*work2;
251 
252     ierr = PetscMalloc1(n,&work);CHKERRQ(ierr);
253     ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr);
254     for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
255     ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
256     start -= cum;
257     cum = 0;
258     for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
259     ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
260     ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
261     ierr = PetscFree(work2);CHKERRQ(ierr);
262   }
263   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
264   /* Compress and put in indices */
265   for (r = 0; r < n; ++r)
266     if (lidxs[r] >= 0) {
267       if (work) work[len] = work[r];
268       lidxs[len++] = r;
269     }
270   if (on) *on = len;
271   if (oidxs) *oidxs = lidxs;
272   if (ogidxs) *ogidxs = work;
273   PetscFunctionReturn(0);
274 }
275 
276 #undef __FUNCT__
277 #define __FUNCT__ "MatGetSubMatrix_IS"
278 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat)
279 {
280   Mat               locmat,newlocmat;
281   Mat_IS            *newmatis;
282 #if defined(PETSC_USE_DEBUG)
283   Vec               rtest,ltest;
284   const PetscScalar *array;
285 #endif
286   const PetscInt    *idxs;
287   PetscInt          i,m,n;
288   PetscErrorCode    ierr;
289 
290   PetscFunctionBegin;
291   if (scall == MAT_REUSE_MATRIX) {
292     PetscBool ismatis;
293 
294     ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr);
295     if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type");
296     newmatis = (Mat_IS*)(*newmat)->data;
297     if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS");
298     if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS");
299   }
300   /* irow and icol may not have duplicate entries */
301 #if defined(PETSC_USE_DEBUG)
302   ierr = MatCreateVecs(mat,&ltest,&rtest);CHKERRQ(ierr);
303   ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr);
304   ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
305   for (i=0;i<n;i++) {
306     ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
307   }
308   ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr);
309   ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr);
310   ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr);
311   ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr);
312   ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr);
313   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
314   ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr);
315   ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
316   ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
317   ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
318   for (i=0;i<n;i++) {
319     ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
320   }
321   ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr);
322   ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr);
323   ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr);
324   ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr);
325   ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr);
326   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
327   ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr);
328   ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
329   ierr = VecDestroy(&rtest);CHKERRQ(ierr);
330   ierr = VecDestroy(&ltest);CHKERRQ(ierr);
331 #endif
332   if (scall == MAT_INITIAL_MATRIX) {
333     Mat_IS                 *matis = (Mat_IS*)mat->data;
334     ISLocalToGlobalMapping rl2g;
335     IS                     is;
336     PetscInt               *lidxs,*lgidxs,*newgidxs;
337     PetscInt               ll,newloc;
338     MPI_Comm               comm;
339 
340     ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
341     ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr);
342     ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
343     ierr = MatCreate(comm,newmat);CHKERRQ(ierr);
344     ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr);
345     ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
346     /* communicate irow to their owners in the layout */
347     ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
348     ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
349     ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
350     ierr = MatISSetUpSF(mat);CHKERRQ(ierr);
351     ierr = PetscMemzero(matis->sf_rootdata,matis->sf->nroots*sizeof(PetscInt));CHKERRQ(ierr);
352     for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1;
353     ierr = PetscFree(lidxs);CHKERRQ(ierr);
354     ierr = PetscFree(lgidxs);CHKERRQ(ierr);
355     ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
356     ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
357     for (i=0,newloc=0;i<matis->sf->nleaves;i++) if (matis->sf_leafdata[i]) newloc++;
358     ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
359     ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
360     for (i=0,newloc=0;i<matis->sf->nleaves;i++)
361       if (matis->sf_leafdata[i]) {
362         lidxs[newloc] = i;
363         newgidxs[newloc++] = matis->sf_leafdata[i]-1;
364       }
365     ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
366     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
367     ierr = ISDestroy(&is);CHKERRQ(ierr);
368     /* local is to extract local submatrix */
369     newmatis = (Mat_IS*)(*newmat)->data;
370     ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr);
371     if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
372       PetscBool cong;
373       ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr);
374       if (cong) mat->congruentlayouts = 1;
375       else      mat->congruentlayouts = 0;
376     }
377     if (mat->congruentlayouts && irow == icol) {
378       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr);
379       ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr);
380       newmatis->getsub_cis = newmatis->getsub_ris;
381     } else {
382       ISLocalToGlobalMapping cl2g;
383 
384       /* communicate icol to their owners in the layout */
385       ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
386       ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
387       ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
388       ierr = PetscMemzero(matis->csf_rootdata,matis->csf->nroots*sizeof(PetscInt));CHKERRQ(ierr);
389       for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1;
390       ierr = PetscFree(lidxs);CHKERRQ(ierr);
391       ierr = PetscFree(lgidxs);CHKERRQ(ierr);
392       ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
393       ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
394       for (i=0,newloc=0;i<matis->csf->nleaves;i++) if (matis->csf_leafdata[i]) newloc++;
395       ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
396       ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
397       for (i=0,newloc=0;i<matis->csf->nleaves;i++)
398         if (matis->csf_leafdata[i]) {
399           lidxs[newloc] = i;
400           newgidxs[newloc++] = matis->csf_leafdata[i]-1;
401         }
402       ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
403       ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
404       ierr = ISDestroy(&is);CHKERRQ(ierr);
405       /* local is to extract local submatrix */
406       ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr);
407       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr);
408       ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
409     }
410     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
411   } else {
412     ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr);
413   }
414   ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr);
415   newmatis = (Mat_IS*)(*newmat)->data;
416   ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr);
417   if (scall == MAT_INITIAL_MATRIX) {
418     ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr);
419     ierr = MatDestroy(&newlocmat);CHKERRQ(ierr);
420   }
421   ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
422   ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
423   PetscFunctionReturn(0);
424 }
425 
426 #undef __FUNCT__
427 #define __FUNCT__ "MatCopy_IS"
428 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str)
429 {
430   Mat_IS         *a = (Mat_IS*)A->data,*b;
431   PetscBool      ismatis;
432   PetscErrorCode ierr;
433 
434   PetscFunctionBegin;
435   ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr);
436   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented");
437   b = (Mat_IS*)B->data;
438   ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
439   PetscFunctionReturn(0);
440 }
441 
442 #undef __FUNCT__
443 #define __FUNCT__ "MatMissingDiagonal_IS"
444 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool  *missing,PetscInt *d)
445 {
446   Vec               v;
447   const PetscScalar *array;
448   PetscInt          i,n;
449   PetscErrorCode    ierr;
450 
451   PetscFunctionBegin;
452   *missing = PETSC_FALSE;
453   ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr);
454   ierr = MatGetDiagonal(A,v);CHKERRQ(ierr);
455   ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr);
456   ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr);
457   for (i=0;i<n;i++) if (array[i] == 0.) break;
458   ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr);
459   ierr = VecDestroy(&v);CHKERRQ(ierr);
460   if (i != n) *missing = PETSC_TRUE;
461   if (d) {
462     *d = -1;
463     if (*missing) {
464       PetscInt rstart;
465       ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
466       *d = i+rstart;
467     }
468   }
469   PetscFunctionReturn(0);
470 }
471 
472 #undef __FUNCT__
473 #define __FUNCT__ "MatISSetUpSF_IS"
474 static PetscErrorCode MatISSetUpSF_IS(Mat B)
475 {
476   Mat_IS         *matis = (Mat_IS*)(B->data);
477   const PetscInt *gidxs;
478   PetscInt       nleaves;
479   PetscErrorCode ierr;
480 
481   PetscFunctionBegin;
482   if (matis->sf) PetscFunctionReturn(0);
483   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
484   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
485   ierr = ISLocalToGlobalMappingGetSize(B->rmap->mapping,&nleaves);CHKERRQ(ierr);
486   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
487   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
488   ierr = PetscMalloc2(matis->sf->nroots,&matis->sf_rootdata,matis->sf->nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
489   if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */
490     ierr = ISLocalToGlobalMappingGetSize(B->cmap->mapping,&nleaves);CHKERRQ(ierr);
491     ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr);
492     ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
493     ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
494     ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
495     ierr = PetscMalloc2(matis->csf->nroots,&matis->csf_rootdata,matis->csf->nleaves,&matis->csf_leafdata);CHKERRQ(ierr);
496   } else {
497     matis->csf = matis->sf;
498     matis->csf_leafdata = matis->sf_leafdata;
499     matis->csf_rootdata = matis->sf_rootdata;
500   }
501   PetscFunctionReturn(0);
502 }
503 
504 #undef __FUNCT__
505 #define __FUNCT__ "MatISSetPreallocation"
506 /*@
507    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
508 
509    Collective on MPI_Comm
510 
511    Input Parameters:
512 +  B - the matrix
513 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
514            (same value is used for all local rows)
515 .  d_nnz - array containing the number of nonzeros in the various rows of the
516            DIAGONAL portion of the local submatrix (possibly different for each row)
517            or NULL, if d_nz is used to specify the nonzero structure.
518            The size of this array is equal to the number of local rows, i.e 'm'.
519            For matrices that will be factored, you must leave room for (and set)
520            the diagonal entry even if it is zero.
521 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
522            submatrix (same value is used for all local rows).
523 -  o_nnz - array containing the number of nonzeros in the various rows of the
524            OFF-DIAGONAL portion of the local submatrix (possibly different for
525            each row) or NULL, if o_nz is used to specify the nonzero
526            structure. The size of this array is equal to the number
527            of local rows, i.e 'm'.
528 
529    If the *_nnz parameter is given then the *_nz parameter is ignored
530 
531    Level: intermediate
532 
533    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
534           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
535           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
536 
537 .keywords: matrix
538 
539 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS
540 @*/
541 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
542 {
543   PetscErrorCode ierr;
544 
545   PetscFunctionBegin;
546   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
547   PetscValidType(B,1);
548   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
549   PetscFunctionReturn(0);
550 }
551 
552 #undef __FUNCT__
553 #define __FUNCT__ "MatISSetPreallocation_IS"
554 static PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
555 {
556   Mat_IS         *matis = (Mat_IS*)(B->data);
557   PetscInt       bs,i,nlocalcols;
558   PetscErrorCode ierr;
559 
560   PetscFunctionBegin;
561   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
562   ierr = MatISSetUpSF(B);CHKERRQ(ierr);
563 
564   if (!d_nnz) for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] = d_nz;
565   else for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
566 
567   if (!o_nnz) for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] += o_nz;
568   else for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
569 
570   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
571   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
572   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
573   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
574 
575   for (i=0;i<matis->sf->nleaves;i++) matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
576   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
577 
578   for (i=0;i<matis->sf->nleaves/bs;i++) matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
579   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
580 
581   for (i=0;i<matis->sf->nleaves/bs;i++) matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
582   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
583   PetscFunctionReturn(0);
584 }
585 
586 #undef __FUNCT__
587 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
588 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
589 {
590   Mat_IS          *matis = (Mat_IS*)(A->data);
591   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
592   const PetscInt  *global_indices_r,*global_indices_c;
593   PetscInt        i,j,bs,rows,cols;
594   PetscInt        lrows,lcols;
595   PetscInt        local_rows,local_cols;
596   PetscMPIInt     nsubdomains;
597   PetscBool       isdense,issbaij;
598   PetscErrorCode  ierr;
599 
600   PetscFunctionBegin;
601   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
602   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
603   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
604   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
605   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
606   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
607   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
608   if (A->rmap->mapping != A->cmap->mapping) {
609     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
610   } else {
611     global_indices_c = global_indices_r;
612   }
613 
614   if (issbaij) {
615     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
616   }
617   /*
618      An SF reduce is needed to sum up properly on shared rows.
619      Note that generally preallocation is not exact, since it overestimates nonzeros
620   */
621   ierr = MatISSetUpSF(A);CHKERRQ(ierr);
622   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
623   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
624   /* All processes need to compute entire row ownership */
625   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
626   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
627   for (i=0;i<nsubdomains;i++) {
628     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
629       row_ownership[j] = i;
630     }
631   }
632   ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
633 
634   /*
635      my_dnz and my_onz contains exact contribution to preallocation from each local mat
636      then, they will be summed up properly. This way, preallocation is always sufficient
637   */
638   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
639   /* preallocation as a MATAIJ */
640   if (isdense) { /* special case for dense local matrices */
641     for (i=0;i<local_rows;i++) {
642       PetscInt index_row = global_indices_r[i];
643       for (j=i;j<local_rows;j++) {
644         PetscInt owner = row_ownership[index_row];
645         PetscInt index_col = global_indices_c[j];
646         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
647           my_dnz[i] += 1;
648         } else { /* offdiag block */
649           my_onz[i] += 1;
650         }
651         /* same as before, interchanging rows and cols */
652         if (i != j) {
653           owner = row_ownership[index_col];
654           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
655             my_dnz[j] += 1;
656           } else {
657             my_onz[j] += 1;
658           }
659         }
660       }
661     }
662   } else if (matis->A->ops->getrowij) {
663     const PetscInt *ii,*jj,*jptr;
664     PetscBool      done;
665     ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr);
666     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
667     jptr = jj;
668     for (i=0;i<local_rows;i++) {
669       PetscInt index_row = global_indices_r[i];
670       for (j=0;j<ii[i+1]-ii[i];j++,jptr++) {
671         PetscInt owner = row_ownership[index_row];
672         PetscInt index_col = global_indices_c[*jptr];
673         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
674           my_dnz[i] += 1;
675         } else { /* offdiag block */
676           my_onz[i] += 1;
677         }
678         /* same as before, interchanging rows and cols */
679         if (issbaij && index_col != index_row) {
680           owner = row_ownership[index_col];
681           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
682             my_dnz[*jptr] += 1;
683           } else {
684             my_onz[*jptr] += 1;
685           }
686         }
687       }
688     }
689     ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr);
690     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
691   } else { /* loop over rows and use MatGetRow */
692     for (i=0;i<local_rows;i++) {
693       const PetscInt *cols;
694       PetscInt       ncols,index_row = global_indices_r[i];
695       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
696       for (j=0;j<ncols;j++) {
697         PetscInt owner = row_ownership[index_row];
698         PetscInt index_col = global_indices_c[cols[j]];
699         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
700           my_dnz[i] += 1;
701         } else { /* offdiag block */
702           my_onz[i] += 1;
703         }
704         /* same as before, interchanging rows and cols */
705         if (issbaij && index_col != index_row) {
706           owner = row_ownership[index_col];
707           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
708             my_dnz[cols[j]] += 1;
709           } else {
710             my_onz[cols[j]] += 1;
711           }
712         }
713       }
714       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
715     }
716   }
717   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
718   if (global_indices_c != global_indices_r) {
719     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
720   }
721   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
722 
723   /* Reduce my_dnz and my_onz */
724   if (maxreduce) {
725     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
726     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
727     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
728     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
729   } else {
730     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
731     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
732     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
733     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
734   }
735   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
736 
737   /* Resize preallocation if overestimated */
738   for (i=0;i<lrows;i++) {
739     dnz[i] = PetscMin(dnz[i],lcols);
740     onz[i] = PetscMin(onz[i],cols-lcols);
741   }
742   /* set preallocation */
743   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
744   for (i=0;i<lrows/bs;i++) {
745     dnz[i] = dnz[i*bs]/bs;
746     onz[i] = onz[i*bs]/bs;
747   }
748   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
749   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
750   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
751   if (issbaij) {
752     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
753   }
754   PetscFunctionReturn(0);
755 }
756 
757 #undef __FUNCT__
758 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
759 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
760 {
761   Mat_IS         *matis = (Mat_IS*)(mat->data);
762   Mat            local_mat;
763   /* info on mat */
764   PetscInt       bs,rows,cols,lrows,lcols;
765   PetscInt       local_rows,local_cols;
766   PetscBool      isseqdense,isseqsbaij,isseqaij,isseqbaij;
767 #if defined (PETSC_USE_DEBUG)
768   PetscBool      lb[4],bb[4];
769 #endif
770   PetscMPIInt    nsubdomains;
771   /* values insertion */
772   PetscScalar    *array;
773   /* work */
774   PetscErrorCode ierr;
775 
776   PetscFunctionBegin;
777   /* get info from mat */
778   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
779   if (nsubdomains == 1) {
780     if (reuse == MAT_INITIAL_MATRIX) {
781       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
782     } else {
783       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
784     }
785     PetscFunctionReturn(0);
786   }
787   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
788   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
789   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
790   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
791   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isseqdense);CHKERRQ(ierr);
792   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
793   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQBAIJ,&isseqbaij);CHKERRQ(ierr);
794   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&isseqsbaij);CHKERRQ(ierr);
795   if (!isseqdense && !isseqaij && !isseqbaij && !isseqsbaij) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not for matrix type %s",((PetscObject)(matis->A))->type_name);
796 #if defined (PETSC_USE_DEBUG)
797   lb[0] = isseqdense;
798   lb[1] = isseqaij;
799   lb[2] = isseqbaij;
800   lb[3] = isseqsbaij;
801   ierr = MPIU_Allreduce(lb,bb,4,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
802   if (!bb[0] && !bb[1] && !bb[2] && !bb[3]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Local matrices must have the same type");
803 #endif
804 
805   if (reuse == MAT_INITIAL_MATRIX) {
806     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
807     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
808     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
809     if (!isseqsbaij) {
810       ierr = MatSetType(*M,MATAIJ);CHKERRQ(ierr);
811     } else {
812       ierr = MatSetType(*M,MATSBAIJ);CHKERRQ(ierr);
813     }
814     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
815   } else {
816     PetscInt mbs,mrows,mcols,mlrows,mlcols;
817     /* some checks */
818     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
819     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
820     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
821     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
822     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
823     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
824     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
825     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
826     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
827   }
828 
829   if (isseqsbaij) {
830     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
831   } else {
832     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
833     local_mat = matis->A;
834   }
835 
836   /* Set values */
837   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
838   if (isseqdense) { /* special case for dense local matrices */
839     PetscInt i,*dummy_rows,*dummy_cols;
840 
841     if (local_rows != local_cols) {
842       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
843       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
844       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
845     } else {
846       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
847       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
848       dummy_cols = dummy_rows;
849     }
850     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
851     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
852     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
853     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
854     if (dummy_rows != dummy_cols) {
855       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
856     } else {
857       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
858     }
859   } else if (isseqaij) {
860     PetscInt  i,nvtxs,*xadj,*adjncy;
861     PetscBool done;
862 
863     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
864     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
865     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
866     for (i=0;i<nvtxs;i++) {
867       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
868     }
869     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
870     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
871     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
872   } else { /* very basic values insertion for all other matrix types */
873     PetscInt i;
874 
875     for (i=0;i<local_rows;i++) {
876       PetscInt       j;
877       const PetscInt *local_indices_cols;
878 
879       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
880       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
881       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
882     }
883   }
884   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
885   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
886   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
887   if (isseqdense) {
888     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
889   }
890   PetscFunctionReturn(0);
891 }
892 
893 #undef __FUNCT__
894 #define __FUNCT__ "MatISGetMPIXAIJ"
895 /*@
896     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
897 
898   Input Parameter:
899 .  mat - the matrix (should be of type MATIS)
900 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
901 
902   Output Parameter:
903 .  newmat - the matrix in AIJ format
904 
905   Level: developer
906 
907   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
908 
909 .seealso: MATIS
910 @*/
911 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
912 {
913   PetscErrorCode ierr;
914 
915   PetscFunctionBegin;
916   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
917   PetscValidLogicalCollectiveEnum(mat,reuse,2);
918   PetscValidPointer(newmat,3);
919   if (reuse != MAT_INITIAL_MATRIX) {
920     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
921     PetscCheckSameComm(mat,1,*newmat,3);
922     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
923   }
924   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "MatDuplicate_IS"
930 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
931 {
932   PetscErrorCode ierr;
933   Mat_IS         *matis = (Mat_IS*)(mat->data);
934   PetscInt       bs,m,n,M,N;
935   Mat            B,localmat;
936 
937   PetscFunctionBegin;
938   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
939   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
940   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
941   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
942   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
943   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
944   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
945   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
946   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
947   *newmat = B;
948   PetscFunctionReturn(0);
949 }
950 
951 #undef __FUNCT__
952 #define __FUNCT__ "MatIsHermitian_IS"
953 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
954 {
955   PetscErrorCode ierr;
956   Mat_IS         *matis = (Mat_IS*)A->data;
957   PetscBool      local_sym;
958 
959   PetscFunctionBegin;
960   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
961   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
962   PetscFunctionReturn(0);
963 }
964 
965 #undef __FUNCT__
966 #define __FUNCT__ "MatIsSymmetric_IS"
967 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
968 {
969   PetscErrorCode ierr;
970   Mat_IS         *matis = (Mat_IS*)A->data;
971   PetscBool      local_sym;
972 
973   PetscFunctionBegin;
974   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
975   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
976   PetscFunctionReturn(0);
977 }
978 
979 #undef __FUNCT__
980 #define __FUNCT__ "MatIsStructurallySymmetric_IS"
981 static PetscErrorCode MatIsStructurallySymmetric_IS(Mat A,PetscBool  *flg)
982 {
983   PetscErrorCode ierr;
984   Mat_IS         *matis = (Mat_IS*)A->data;
985   PetscBool      local_sym;
986 
987   PetscFunctionBegin;
988   if (A->rmap->mapping != A->cmap->mapping) {
989     *flg = PETSC_FALSE;
990     PetscFunctionReturn(0);
991   }
992   ierr = MatIsStructurallySymmetric(matis->A,&local_sym);CHKERRQ(ierr);
993   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
994   PetscFunctionReturn(0);
995 }
996 
997 #undef __FUNCT__
998 #define __FUNCT__ "MatDestroy_IS"
999 static PetscErrorCode MatDestroy_IS(Mat A)
1000 {
1001   PetscErrorCode ierr;
1002   Mat_IS         *b = (Mat_IS*)A->data;
1003 
1004   PetscFunctionBegin;
1005   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
1006   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
1007   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
1008   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
1009   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
1010   ierr = VecDestroy(&b->counter);CHKERRQ(ierr);
1011   ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr);
1012   ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr);
1013   if (b->sf != b->csf) {
1014     ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr);
1015     ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr);
1016   }
1017   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
1018   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
1019   ierr = PetscFree(A->data);CHKERRQ(ierr);
1020   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
1021   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
1022   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
1023   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
1024   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
1025   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",NULL);CHKERRQ(ierr);
1026   PetscFunctionReturn(0);
1027 }
1028 
1029 #undef __FUNCT__
1030 #define __FUNCT__ "MatMult_IS"
1031 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
1032 {
1033   PetscErrorCode ierr;
1034   Mat_IS         *is  = (Mat_IS*)A->data;
1035   PetscScalar    zero = 0.0;
1036 
1037   PetscFunctionBegin;
1038   /*  scatter the global vector x into the local work vector */
1039   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1040   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1041 
1042   /* multiply the local matrix */
1043   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
1044 
1045   /* scatter product back into global memory */
1046   ierr = VecSet(y,zero);CHKERRQ(ierr);
1047   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1048   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1049   PetscFunctionReturn(0);
1050 }
1051 
1052 #undef __FUNCT__
1053 #define __FUNCT__ "MatMultAdd_IS"
1054 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
1055 {
1056   Vec            temp_vec;
1057   PetscErrorCode ierr;
1058 
1059   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
1060   if (v3 != v2) {
1061     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
1062     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
1063   } else {
1064     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
1065     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
1066     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
1067     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
1068     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
1069   }
1070   PetscFunctionReturn(0);
1071 }
1072 
1073 #undef __FUNCT__
1074 #define __FUNCT__ "MatMultTranspose_IS"
1075 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
1076 {
1077   Mat_IS         *is = (Mat_IS*)A->data;
1078   PetscErrorCode ierr;
1079 
1080   PetscFunctionBegin;
1081   /*  scatter the global vector x into the local work vector */
1082   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1083   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1084 
1085   /* multiply the local matrix */
1086   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
1087 
1088   /* scatter product back into global vector */
1089   ierr = VecSet(x,0);CHKERRQ(ierr);
1090   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1091   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1092   PetscFunctionReturn(0);
1093 }
1094 
1095 #undef __FUNCT__
1096 #define __FUNCT__ "MatMultTransposeAdd_IS"
1097 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
1098 {
1099   Vec            temp_vec;
1100   PetscErrorCode ierr;
1101 
1102   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
1103   if (v3 != v2) {
1104     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
1105     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
1106   } else {
1107     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
1108     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
1109     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
1110     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
1111     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
1112   }
1113   PetscFunctionReturn(0);
1114 }
1115 
1116 #undef __FUNCT__
1117 #define __FUNCT__ "MatView_IS"
1118 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
1119 {
1120   Mat_IS         *a = (Mat_IS*)A->data;
1121   PetscErrorCode ierr;
1122   PetscViewer    sviewer;
1123   PetscBool      isascii,view = PETSC_TRUE;
1124 
1125   PetscFunctionBegin;
1126   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr);
1127   if (isascii)  {
1128     PetscViewerFormat format;
1129 
1130     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1131     if (format == PETSC_VIEWER_ASCII_INFO) view = PETSC_FALSE;
1132   }
1133   if (!view) PetscFunctionReturn(0);
1134   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1135   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
1136   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1137   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1138   PetscFunctionReturn(0);
1139 }
1140 
1141 #undef __FUNCT__
1142 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
1143 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
1144 {
1145   PetscErrorCode ierr;
1146   PetscInt       nr,rbs,nc,cbs;
1147   Mat_IS         *is = (Mat_IS*)A->data;
1148   IS             from,to;
1149   Vec            cglobal,rglobal;
1150 
1151   PetscFunctionBegin;
1152   PetscCheckSameComm(A,1,rmapping,2);
1153   PetscCheckSameComm(A,1,cmapping,3);
1154   /* Destroy any previous data */
1155   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
1156   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
1157   ierr = VecDestroy(&is->counter);CHKERRQ(ierr);
1158   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
1159   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
1160   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
1161   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
1162   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
1163 
1164   /* Setup Layout and set local to global maps */
1165   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1166   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1167   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
1168   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
1169 
1170   /* Create the local matrix A */
1171   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
1172   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
1173   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
1174   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
1175   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
1176   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
1177   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
1178   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
1179   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
1180   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
1181   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
1182 
1183   if (!is->islocalref) { /* setup scatters and local vectors for MatMult */
1184     /* Create the local work vectors */
1185     ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
1186 
1187     /* setup the global to local scatters */
1188     ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
1189     ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
1190     ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
1191     ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
1192     if (rmapping != cmapping) {
1193       ierr = ISDestroy(&to);CHKERRQ(ierr);
1194       ierr = ISDestroy(&from);CHKERRQ(ierr);
1195       ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
1196       ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
1197       ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
1198     } else {
1199       ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
1200       is->cctx = is->rctx;
1201     }
1202 
1203     /* interface counter vector (local) */
1204     ierr = VecDuplicate(is->y,&is->counter);CHKERRQ(ierr);
1205     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
1206     ierr = VecScatterBegin(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1207     ierr = VecScatterEnd(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1208     ierr = VecScatterBegin(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1209     ierr = VecScatterEnd(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1210 
1211     /* free workspace */
1212     ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
1213     ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
1214     ierr = ISDestroy(&to);CHKERRQ(ierr);
1215     ierr = ISDestroy(&from);CHKERRQ(ierr);
1216   }
1217   ierr = MatSetUp(A);CHKERRQ(ierr);
1218   PetscFunctionReturn(0);
1219 }
1220 
1221 #undef __FUNCT__
1222 #define __FUNCT__ "MatSetValues_IS"
1223 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1224 {
1225   Mat_IS         *is = (Mat_IS*)mat->data;
1226   PetscErrorCode ierr;
1227 #if defined(PETSC_USE_DEBUG)
1228   PetscInt       i,zm,zn;
1229 #endif
1230   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1231 
1232   PetscFunctionBegin;
1233 #if defined(PETSC_USE_DEBUG)
1234   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1235   /* count negative indices */
1236   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1237   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1238 #endif
1239   ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1240   ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1241 #if defined(PETSC_USE_DEBUG)
1242   /* count negative indices (should be the same as before) */
1243   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1244   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1245   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1246   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1247 #endif
1248   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1249   PetscFunctionReturn(0);
1250 }
1251 
1252 #undef __FUNCT__
1253 #define __FUNCT__ "MatSetValuesBlocked_IS"
1254 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1255 {
1256   Mat_IS         *is = (Mat_IS*)mat->data;
1257   PetscErrorCode ierr;
1258 #if defined(PETSC_USE_DEBUG)
1259   PetscInt       i,zm,zn;
1260 #endif
1261   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1262 
1263   PetscFunctionBegin;
1264 #if defined(PETSC_USE_DEBUG)
1265   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1266   /* count negative indices */
1267   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1268   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1269 #endif
1270   ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1271   ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1272 #if defined(PETSC_USE_DEBUG)
1273   /* count negative indices (should be the same as before) */
1274   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1275   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1276   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1277   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1278 #endif
1279   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1280   PetscFunctionReturn(0);
1281 }
1282 
1283 #undef __FUNCT__
1284 #define __FUNCT__ "MatSetValuesLocal_IS"
1285 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1286 {
1287   PetscErrorCode ierr;
1288   Mat_IS         *is = (Mat_IS*)A->data;
1289 
1290   PetscFunctionBegin;
1291   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1292   PetscFunctionReturn(0);
1293 }
1294 
1295 #undef __FUNCT__
1296 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
1297 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1298 {
1299   PetscErrorCode ierr;
1300   Mat_IS         *is = (Mat_IS*)A->data;
1301 
1302   PetscFunctionBegin;
1303   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1304   PetscFunctionReturn(0);
1305 }
1306 
1307 #undef __FUNCT__
1308 #define __FUNCT__ "MatISZeroRowsColumnsLocal_Private"
1309 static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,PetscBool columns)
1310 {
1311   Mat_IS         *is = (Mat_IS*)A->data;
1312   PetscErrorCode ierr;
1313 
1314   PetscFunctionBegin;
1315   if (!n) {
1316     is->pure_neumann = PETSC_TRUE;
1317   } else {
1318     PetscInt i;
1319     is->pure_neumann = PETSC_FALSE;
1320 
1321     if (columns) {
1322       ierr = MatZeroRowsColumns(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
1323     } else {
1324       ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
1325     }
1326     if (diag != 0.) {
1327       const PetscScalar *array;
1328       ierr = VecGetArrayRead(is->counter,&array);CHKERRQ(ierr);
1329       for (i=0; i<n; i++) {
1330         ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
1331       }
1332       ierr = VecRestoreArrayRead(is->counter,&array);CHKERRQ(ierr);
1333     }
1334     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1335     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1336   }
1337   PetscFunctionReturn(0);
1338 }
1339 
1340 #undef __FUNCT__
1341 #define __FUNCT__ "MatZeroRowsColumns_Private_IS"
1342 static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b,PetscBool columns)
1343 {
1344   Mat_IS         *matis = (Mat_IS*)A->data;
1345   PetscInt       nr,nl,len,i;
1346   PetscInt       *lrows;
1347   PetscErrorCode ierr;
1348 
1349   PetscFunctionBegin;
1350 #if defined(PETSC_USE_DEBUG)
1351   if (columns || diag != 0. || (x && b)) {
1352     PetscBool cong;
1353     ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr);
1354     if (!cong && columns) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Columns can be zeroed if and only if A->rmap and A->cmap are congruent for MATIS");
1355     if (!cong && diag != 0.) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent for MATIS");
1356     if (!cong && x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"A->rmap and A->cmap need to be congruent");
1357   }
1358 #endif
1359   /* get locally owned rows */
1360   ierr = PetscLayoutMapLocal_Private(A->rmap,n,rows,&len,&lrows,NULL);CHKERRQ(ierr);
1361   /* fix right hand side if needed */
1362   if (x && b) {
1363     const PetscScalar *xx;
1364     PetscScalar       *bb;
1365 
1366     ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr);
1367     ierr = VecGetArray(b, &bb);CHKERRQ(ierr);
1368     for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]];
1369     ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr);
1370     ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr);
1371   }
1372   /* get rows associated to the local matrices */
1373   ierr = MatISSetUpSF(A);CHKERRQ(ierr);
1374   ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr);
1375   ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr);
1376   ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
1377   for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1;
1378   ierr = PetscFree(lrows);CHKERRQ(ierr);
1379   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1380   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1381   ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr);
1382   for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i;
1383   ierr = MatISZeroRowsColumnsLocal_Private(A,nr,lrows,diag,columns);CHKERRQ(ierr);
1384   ierr = PetscFree(lrows);CHKERRQ(ierr);
1385   PetscFunctionReturn(0);
1386 }
1387 
1388 #undef __FUNCT__
1389 #define __FUNCT__ "MatZeroRows_IS"
1390 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1391 {
1392   PetscErrorCode ierr;
1393 
1394   PetscFunctionBegin;
1395   ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_FALSE);CHKERRQ(ierr);
1396   PetscFunctionReturn(0);
1397 }
1398 
1399 #undef __FUNCT__
1400 #define __FUNCT__ "MatZeroRowsColumns_IS"
1401 static PetscErrorCode MatZeroRowsColumns_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1402 {
1403   PetscErrorCode ierr;
1404 
1405   PetscFunctionBegin;
1406   ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_TRUE);CHKERRQ(ierr);
1407   PetscFunctionReturn(0);
1408 }
1409 
1410 #undef __FUNCT__
1411 #define __FUNCT__ "MatAssemblyBegin_IS"
1412 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
1413 {
1414   Mat_IS         *is = (Mat_IS*)A->data;
1415   PetscErrorCode ierr;
1416 
1417   PetscFunctionBegin;
1418   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
1419   PetscFunctionReturn(0);
1420 }
1421 
1422 #undef __FUNCT__
1423 #define __FUNCT__ "MatAssemblyEnd_IS"
1424 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
1425 {
1426   Mat_IS         *is = (Mat_IS*)A->data;
1427   PetscErrorCode ierr;
1428 
1429   PetscFunctionBegin;
1430   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
1431   PetscFunctionReturn(0);
1432 }
1433 
1434 #undef __FUNCT__
1435 #define __FUNCT__ "MatISGetLocalMat_IS"
1436 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
1437 {
1438   Mat_IS *is = (Mat_IS*)mat->data;
1439 
1440   PetscFunctionBegin;
1441   *local = is->A;
1442   PetscFunctionReturn(0);
1443 }
1444 
1445 #undef __FUNCT__
1446 #define __FUNCT__ "MatISGetLocalMat"
1447 /*@
1448     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
1449 
1450   Input Parameter:
1451 .  mat - the matrix
1452 
1453   Output Parameter:
1454 .  local - the local matrix
1455 
1456   Level: advanced
1457 
1458   Notes:
1459     This can be called if you have precomputed the nonzero structure of the
1460   matrix and want to provide it to the inner matrix object to improve the performance
1461   of the MatSetValues() operation.
1462 
1463 .seealso: MATIS
1464 @*/
1465 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
1466 {
1467   PetscErrorCode ierr;
1468 
1469   PetscFunctionBegin;
1470   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1471   PetscValidPointer(local,2);
1472   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
1473   PetscFunctionReturn(0);
1474 }
1475 
1476 #undef __FUNCT__
1477 #define __FUNCT__ "MatISSetLocalMat_IS"
1478 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
1479 {
1480   Mat_IS         *is = (Mat_IS*)mat->data;
1481   PetscInt       nrows,ncols,orows,ocols;
1482   PetscErrorCode ierr;
1483 
1484   PetscFunctionBegin;
1485   if (is->A) {
1486     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
1487     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
1488     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %Dx%D (you passed a %Dx%D matrix)",orows,ocols,nrows,ncols);
1489   }
1490   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
1491   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
1492   is->A = local;
1493   PetscFunctionReturn(0);
1494 }
1495 
1496 #undef __FUNCT__
1497 #define __FUNCT__ "MatISSetLocalMat"
1498 /*@
1499     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
1500 
1501   Input Parameter:
1502 .  mat - the matrix
1503 .  local - the local matrix
1504 
1505   Output Parameter:
1506 
1507   Level: advanced
1508 
1509   Notes:
1510     This can be called if you have precomputed the local matrix and
1511   want to provide it to the matrix object MATIS.
1512 
1513 .seealso: MATIS
1514 @*/
1515 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
1516 {
1517   PetscErrorCode ierr;
1518 
1519   PetscFunctionBegin;
1520   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1521   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
1522   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
1523   PetscFunctionReturn(0);
1524 }
1525 
1526 #undef __FUNCT__
1527 #define __FUNCT__ "MatZeroEntries_IS"
1528 static PetscErrorCode MatZeroEntries_IS(Mat A)
1529 {
1530   Mat_IS         *a = (Mat_IS*)A->data;
1531   PetscErrorCode ierr;
1532 
1533   PetscFunctionBegin;
1534   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
1535   PetscFunctionReturn(0);
1536 }
1537 
1538 #undef __FUNCT__
1539 #define __FUNCT__ "MatScale_IS"
1540 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
1541 {
1542   Mat_IS         *is = (Mat_IS*)A->data;
1543   PetscErrorCode ierr;
1544 
1545   PetscFunctionBegin;
1546   ierr = MatScale(is->A,a);CHKERRQ(ierr);
1547   PetscFunctionReturn(0);
1548 }
1549 
1550 #undef __FUNCT__
1551 #define __FUNCT__ "MatGetDiagonal_IS"
1552 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
1553 {
1554   Mat_IS         *is = (Mat_IS*)A->data;
1555   PetscErrorCode ierr;
1556 
1557   PetscFunctionBegin;
1558   /* get diagonal of the local matrix */
1559   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
1560 
1561   /* scatter diagonal back into global vector */
1562   ierr = VecSet(v,0);CHKERRQ(ierr);
1563   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1564   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1565   PetscFunctionReturn(0);
1566 }
1567 
1568 #undef __FUNCT__
1569 #define __FUNCT__ "MatSetOption_IS"
1570 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
1571 {
1572   Mat_IS         *a = (Mat_IS*)A->data;
1573   PetscErrorCode ierr;
1574 
1575   PetscFunctionBegin;
1576   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1577   PetscFunctionReturn(0);
1578 }
1579 
1580 #undef __FUNCT__
1581 #define __FUNCT__ "MatAXPY_IS"
1582 static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str)
1583 {
1584   Mat_IS         *y = (Mat_IS*)Y->data;
1585   Mat_IS         *x;
1586 #if defined(PETSC_USE_DEBUG)
1587   PetscBool      ismatis;
1588 #endif
1589   PetscErrorCode ierr;
1590 
1591   PetscFunctionBegin;
1592 #if defined(PETSC_USE_DEBUG)
1593   ierr = PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);CHKERRQ(ierr);
1594   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS");
1595 #endif
1596   x = (Mat_IS*)X->data;
1597   ierr = MatAXPY(y->A,a,x->A,str);CHKERRQ(ierr);
1598   PetscFunctionReturn(0);
1599 }
1600 
1601 #undef __FUNCT__
1602 #define __FUNCT__ "MatGetLocalSubMatrix_IS"
1603 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat)
1604 {
1605   Mat                    lA;
1606   Mat_IS                 *matis;
1607   ISLocalToGlobalMapping rl2g,cl2g;
1608   IS                     is;
1609   const PetscInt         *rg,*rl;
1610   PetscInt               nrg;
1611   PetscInt               N,M,nrl,i,*idxs;
1612   PetscErrorCode         ierr;
1613 
1614   PetscFunctionBegin;
1615   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
1616   ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr);
1617   ierr = ISGetIndices(row,&rl);CHKERRQ(ierr);
1618   ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr);
1619 #if defined(PETSC_USE_DEBUG)
1620   for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %D -> %D greater than maximum possible %D",i,rl[i],nrg);
1621 #endif
1622   ierr = PetscMalloc1(nrg,&idxs);CHKERRQ(ierr);
1623   /* map from [0,nrl) to row */
1624   for (i=0;i<nrl;i++) idxs[i] = rl[i];
1625 #if defined(PETSC_USE_DEBUG)
1626   for (i=nrl;i<nrg;i++) idxs[i] = nrg;
1627 #else
1628   for (i=nrl;i<nrg;i++) idxs[i] = -1;
1629 #endif
1630   ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr);
1631   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
1632   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
1633   ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
1634   ierr = ISDestroy(&is);CHKERRQ(ierr);
1635   /* compute new l2g map for columns */
1636   if (col != row || A->rmap->mapping != A->cmap->mapping) {
1637     const PetscInt *cg,*cl;
1638     PetscInt       ncg;
1639     PetscInt       ncl;
1640 
1641     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
1642     ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr);
1643     ierr = ISGetIndices(col,&cl);CHKERRQ(ierr);
1644     ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr);
1645 #if defined(PETSC_USE_DEBUG)
1646     for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %D -> %D greater than maximum possible %D",i,cl[i],ncg);
1647 #endif
1648     ierr = PetscMalloc1(ncg,&idxs);CHKERRQ(ierr);
1649     /* map from [0,ncl) to col */
1650     for (i=0;i<ncl;i++) idxs[i] = cl[i];
1651 #if defined(PETSC_USE_DEBUG)
1652     for (i=ncl;i<ncg;i++) idxs[i] = ncg;
1653 #else
1654     for (i=ncl;i<ncg;i++) idxs[i] = -1;
1655 #endif
1656     ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr);
1657     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
1658     ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
1659     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
1660     ierr = ISDestroy(&is);CHKERRQ(ierr);
1661   } else {
1662     ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr);
1663     cl2g = rl2g;
1664   }
1665   /* create the MATIS submatrix */
1666   ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
1667   ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr);
1668   ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1669   ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr);
1670   matis = (Mat_IS*)((*submat)->data);
1671   matis->islocalref = PETSC_TRUE;
1672   ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr);
1673   ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr);
1674   ierr = MatISSetLocalMat(*submat,lA);CHKERRQ(ierr);
1675   ierr = MatSetUp(*submat);CHKERRQ(ierr);
1676   ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1677   ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1678   ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
1679   ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
1680   /* remove unsupported ops */
1681   ierr = PetscMemzero((*submat)->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1682   (*submat)->ops->destroy               = MatDestroy_IS;
1683   (*submat)->ops->setvalueslocal        = MatSetValuesLocal_SubMat_IS;
1684   (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS;
1685   (*submat)->ops->assemblybegin         = MatAssemblyBegin_IS;
1686   (*submat)->ops->assemblyend           = MatAssemblyEnd_IS;
1687   PetscFunctionReturn(0);
1688 }
1689 
1690 #undef __FUNCT__
1691 #define __FUNCT__ "MatCreateIS"
1692 /*@
1693     MatCreateIS - Creates a "process" unassembled matrix, assembled on each
1694        process but not across processes.
1695 
1696    Input Parameters:
1697 +     comm    - MPI communicator that will share the matrix
1698 .     bs      - block size of the matrix
1699 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
1700 .     rmap    - local to global map for rows
1701 -     cmap    - local to global map for cols
1702 
1703    Output Parameter:
1704 .    A - the resulting matrix
1705 
1706    Level: advanced
1707 
1708    Notes: See MATIS for more details.
1709           m and n are NOT related to the size of the map; they are the size of the part of the vector owned
1710           by that process. The sizes of rmap and cmap define the size of the local matrices.
1711           If either rmap or cmap are NULL, then the matrix is assumed to be square.
1712 
1713 .seealso: MATIS, MatSetLocalToGlobalMapping()
1714 @*/
1715 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1716 {
1717   PetscErrorCode ierr;
1718 
1719   PetscFunctionBegin;
1720   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1721   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1722   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1723   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1724   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1725   if (rmap && cmap) {
1726     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1727   } else if (!rmap) {
1728     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1729   } else {
1730     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1731   }
1732   PetscFunctionReturn(0);
1733 }
1734 
1735 /*MC
1736    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP).
1737    This stores the matrices in globally unassembled form. Each processor
1738    assembles only its local Neumann problem and the parallel matrix vector
1739    product is handled "implicitly".
1740 
1741    Operations Provided:
1742 +  MatMult()
1743 .  MatMultAdd()
1744 .  MatMultTranspose()
1745 .  MatMultTransposeAdd()
1746 .  MatZeroEntries()
1747 .  MatSetOption()
1748 .  MatZeroRows()
1749 .  MatSetValues()
1750 .  MatSetValuesBlocked()
1751 .  MatSetValuesLocal()
1752 .  MatSetValuesBlockedLocal()
1753 .  MatScale()
1754 .  MatGetDiagonal()
1755 .  MatMissingDiagonal()
1756 .  MatDuplicate()
1757 .  MatCopy()
1758 .  MatAXPY()
1759 .  MatGetSubMatrix()
1760 .  MatGetLocalSubMatrix()
1761 -  MatSetLocalToGlobalMapping()
1762 
1763    Options Database Keys:
1764 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1765 
1766    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1767 
1768           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1769 
1770           You can do matrix preallocation on the local matrix after you obtain it with
1771           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1772 
1773   Level: advanced
1774 
1775 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP
1776 
1777 M*/
1778 
1779 #undef __FUNCT__
1780 #define __FUNCT__ "MatCreate_IS"
1781 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1782 {
1783   PetscErrorCode ierr;
1784   Mat_IS         *b;
1785 
1786   PetscFunctionBegin;
1787   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1788   A->data = (void*)b;
1789 
1790   /* matrix ops */
1791   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1792   A->ops->mult                    = MatMult_IS;
1793   A->ops->multadd                 = MatMultAdd_IS;
1794   A->ops->multtranspose           = MatMultTranspose_IS;
1795   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1796   A->ops->destroy                 = MatDestroy_IS;
1797   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1798   A->ops->setvalues               = MatSetValues_IS;
1799   A->ops->setvaluesblocked        = MatSetValuesBlocked_IS;
1800   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1801   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1802   A->ops->zerorows                = MatZeroRows_IS;
1803   A->ops->zerorowscolumns         = MatZeroRowsColumns_IS;
1804   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1805   A->ops->assemblyend             = MatAssemblyEnd_IS;
1806   A->ops->view                    = MatView_IS;
1807   A->ops->zeroentries             = MatZeroEntries_IS;
1808   A->ops->scale                   = MatScale_IS;
1809   A->ops->getdiagonal             = MatGetDiagonal_IS;
1810   A->ops->setoption               = MatSetOption_IS;
1811   A->ops->ishermitian             = MatIsHermitian_IS;
1812   A->ops->issymmetric             = MatIsSymmetric_IS;
1813   A->ops->isstructurallysymmetric = MatIsStructurallySymmetric_IS;
1814   A->ops->duplicate               = MatDuplicate_IS;
1815   A->ops->missingdiagonal         = MatMissingDiagonal_IS;
1816   A->ops->copy                    = MatCopy_IS;
1817   A->ops->getlocalsubmatrix       = MatGetLocalSubMatrix_IS;
1818   A->ops->getsubmatrix            = MatGetSubMatrix_IS;
1819   A->ops->axpy                    = MatAXPY_IS;
1820   A->ops->diagonalset             = MatDiagonalSet_IS;
1821   A->ops->shift                   = MatShift_IS;
1822 
1823   /* special MATIS functions */
1824   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1825   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1826   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1827   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1828   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",MatISSetUpSF_IS);CHKERRQ(ierr);
1829   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1830   PetscFunctionReturn(0);
1831 }
1832