xref: /petsc/src/mat/impls/is/matis.c (revision 3fd1c9e7f1c0fdd3fec5018503c80f3dbf383f11)
1 /*
2     Creates a matrix class for using the Neumann-Neumann type preconditioners.
3     This stores the matrices in globally unassembled form. Each processor
4     assembles only its local Neumann problem and the parallel matrix vector
5     product is handled "implicitly".
6 
7     Currently this allows for only one subdomain per processor.
8 */
9 
10 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
11 
12 #define MATIS_MAX_ENTRIES_INSERTION 2048
13 
14 static PetscErrorCode MatISComputeSF_Private(Mat);
15 static PetscErrorCode MatISZeroRowsLocal_Private(Mat,PetscInt,const PetscInt[],PetscScalar);
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "MatDiagonalSet_IS"
19 PetscErrorCode  MatDiagonalSet_IS(Mat A,Vec D,InsertMode insmode)
20 {
21   Mat_IS         *is = (Mat_IS*)A->data;
22   PetscErrorCode ierr;
23 
24   PetscFunctionBegin;
25   if (!D) { /* this code branch is used by MatShift_IS */
26     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
27   } else {
28     ierr = VecScatterBegin(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
29     ierr = VecScatterEnd(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
30   }
31   ierr = VecPointwiseDivide(is->y,is->y,is->counter);CHKERRQ(ierr);
32   ierr = MatDiagonalSet(is->A,is->y,insmode);CHKERRQ(ierr);
33   PetscFunctionReturn(0);
34 }
35 
36 #undef __FUNCT__
37 #define __FUNCT__ "MatShift_IS"
38 PetscErrorCode  MatShift_IS(Mat A,PetscScalar a)
39 {
40   PetscErrorCode ierr;
41 
42   PetscFunctionBegin;
43   ierr = MatDiagonalSet_IS(A,NULL,ADD_VALUES);CHKERRQ(ierr);
44   PetscFunctionReturn(0);
45 }
46 
47 #undef __FUNCT__
48 #define __FUNCT__ "MatSetValuesLocal_SubMat_IS"
49 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
50 {
51   PetscErrorCode ierr;
52   Mat_IS         *is = (Mat_IS*)A->data;
53   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
54 
55   PetscFunctionBegin;
56 #if defined(PETSC_USE_DEBUG)
57   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
58 #endif
59   ierr = ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
60   ierr = ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
61   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
62   PetscFunctionReturn(0);
63 }
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "MatSetValuesBlockedLocal_SubMat_IS"
67 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
68 {
69   PetscErrorCode ierr;
70   Mat_IS         *is = (Mat_IS*)A->data;
71   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
72 
73   PetscFunctionBegin;
74 #if defined(PETSC_USE_DEBUG)
75   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
76 #endif
77   ierr = ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
78   ierr = ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
79   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
80   PetscFunctionReturn(0);
81 }
82 
83 #undef __FUNCT__
84 #define __FUNCT__ "PetscLayoutMapLocal_Private"
85 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[],const PetscInt gidxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
86 {
87   PetscInt      *owners = map->range;
88   PetscInt       n      = map->n;
89   PetscSF        sf;
90   PetscInt      *lidxs,*work = NULL;
91   PetscSFNode   *ridxs;
92   PetscMPIInt    rank;
93   PetscInt       r, p = 0, len = 0;
94   PetscErrorCode ierr;
95 
96   PetscFunctionBegin;
97   /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */
98   ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr);
99   ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr);
100   for (r = 0; r < n; ++r) lidxs[r] = -1;
101   ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr);
102   for (r = 0; r < N; ++r) {
103     const PetscInt idx = idxs[r];
104     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
105     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
106       ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr);
107     }
108     ridxs[r].rank = p;
109     ridxs[r].index = idxs[r] - owners[p];
110   }
111   ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr);
112   ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr);
113   ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
114   ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
115   if ((gidxs && ogidxs) || ogidxs) {
116     ierr = PetscMalloc1(n,&work);CHKERRQ(ierr);
117     if (!gidxs) { /* if gidxs is not provided, global indices are inferred */
118       PetscInt cum = 0,start,*work2;
119       ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr);
120       for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
121       ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
122       start -= cum;
123       cum = 0;
124       for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
125       ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
126       ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
127       ierr = PetscFree(work2);CHKERRQ(ierr);
128     } else {
129       ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)gidxs,work,MPIU_REPLACE);CHKERRQ(ierr);
130       ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)gidxs,work,MPIU_REPLACE);CHKERRQ(ierr);
131     }
132   }
133   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
134   /* Compress and put in indices */
135   for (r = 0; r < n; ++r)
136     if (lidxs[r] >= 0) {
137       if (work) work[len] = work[r];
138       lidxs[len++] = r;
139     }
140   if (on) *on = len;
141   if (oidxs) *oidxs = lidxs;
142   if (ogidxs) *ogidxs = work;
143   PetscFunctionReturn(0);
144 }
145 
146 #undef __FUNCT__
147 #define __FUNCT__ "MatGetSubMatrix_IS"
148 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat)
149 {
150   Mat               locmat,newlocmat;
151   Mat_IS            *newmatis;
152 #if defined(PETSC_USE_DEBUG)
153   Vec               rtest,ltest;
154   const PetscScalar *array;
155 #endif
156   const PetscInt    *idxs;
157   PetscInt          i,m,n;
158   PetscErrorCode    ierr;
159 
160   PetscFunctionBegin;
161   if (scall == MAT_REUSE_MATRIX) {
162     PetscBool ismatis;
163 
164     ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr);
165     if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type");
166     newmatis = (Mat_IS*)(*newmat)->data;
167     if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS");
168     if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS");
169   }
170   /* irow and icol may not have duplicate entries */
171 #if defined(PETSC_USE_DEBUG)
172   ierr = MatCreateVecs(mat,&ltest,&rtest);CHKERRQ(ierr);
173   ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr);
174   ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
175   for (i=0;i<n;i++) {
176     ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
177   }
178   ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr);
179   ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr);
180   ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr);
181   ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr);
182   ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr);
183   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
184   ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr);
185   ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
186   ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
187   ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
188   for (i=0;i<n;i++) {
189     ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
190   }
191   ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr);
192   ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr);
193   ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr);
194   ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr);
195   ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr);
196   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
197   ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr);
198   ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
199   ierr = VecDestroy(&rtest);CHKERRQ(ierr);
200   ierr = VecDestroy(&ltest);CHKERRQ(ierr);
201 #endif
202   if (scall == MAT_INITIAL_MATRIX) {
203     Mat_IS                 *matis = (Mat_IS*)mat->data;
204     ISLocalToGlobalMapping rl2g;
205     IS                     is;
206     PetscInt               *lidxs,*lgidxs,*newgidxs;
207     PetscInt               ll,newloc;
208     MPI_Comm               comm;
209 
210     ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
211     ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr);
212     ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
213     ierr = MatCreate(comm,newmat);CHKERRQ(ierr);
214     ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr);
215     ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
216     /* communicate irow to their owners in the layout */
217     ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
218     ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,NULL,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
219     ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
220     if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
221       ierr = MatISComputeSF_Private(mat);CHKERRQ(ierr);
222     }
223     ierr = PetscMemzero(matis->sf_rootdata,matis->sf_nroots*sizeof(PetscInt));CHKERRQ(ierr);
224     for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1;
225     ierr = PetscFree(lidxs);CHKERRQ(ierr);
226     ierr = PetscFree(lgidxs);CHKERRQ(ierr);
227     ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
228     ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
229     for (i=0,newloc=0;i<matis->sf_nleaves;i++) if (matis->sf_leafdata[i]) newloc++;
230     ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
231     ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
232     for (i=0,newloc=0;i<matis->sf_nleaves;i++)
233       if (matis->sf_leafdata[i]) {
234         lidxs[newloc] = i;
235         newgidxs[newloc++] = matis->sf_leafdata[i]-1;
236       }
237     ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
238     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
239     ierr = ISDestroy(&is);CHKERRQ(ierr);
240     /* local is to extract local submatrix */
241     newmatis = (Mat_IS*)(*newmat)->data;
242     ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr);
243     if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
244       PetscBool cong;
245       ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr);
246       if (cong) mat->congruentlayouts = 1;
247       else      mat->congruentlayouts = 0;
248     }
249     if (mat->congruentlayouts && irow == icol) {
250       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr);
251       ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr);
252       newmatis->getsub_cis = newmatis->getsub_ris;
253     } else {
254       ISLocalToGlobalMapping cl2g;
255 
256       /* communicate icol to their owners in the layout */
257       ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
258       ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,NULL,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
259       ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
260       ierr = PetscMemzero(matis->csf_rootdata,matis->csf_nroots*sizeof(PetscInt));CHKERRQ(ierr);
261       for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1;
262       ierr = PetscFree(lidxs);CHKERRQ(ierr);
263       ierr = PetscFree(lgidxs);CHKERRQ(ierr);
264       ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
265       ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
266       for (i=0,newloc=0;i<matis->csf_nleaves;i++) if (matis->csf_leafdata[i]) newloc++;
267       ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
268       ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
269       for (i=0,newloc=0;i<matis->csf_nleaves;i++)
270         if (matis->csf_leafdata[i]) {
271           lidxs[newloc] = i;
272           newgidxs[newloc++] = matis->csf_leafdata[i]-1;
273         }
274       ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
275       ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
276       ierr = ISDestroy(&is);CHKERRQ(ierr);
277       /* local is to extract local submatrix */
278       ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr);
279       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr);
280       ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
281     }
282     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
283   } else {
284     ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr);
285   }
286   ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr);
287   newmatis = (Mat_IS*)(*newmat)->data;
288   ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr);
289   if (scall == MAT_INITIAL_MATRIX) {
290     ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr);
291     ierr = MatDestroy(&newlocmat);CHKERRQ(ierr);
292   }
293   ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
294   ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
295   PetscFunctionReturn(0);
296 }
297 
298 #undef __FUNCT__
299 #define __FUNCT__ "MatCopy_IS"
300 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str)
301 {
302   Mat_IS         *a = (Mat_IS*)A->data,*b;
303   PetscBool      ismatis;
304   PetscErrorCode ierr;
305 
306   PetscFunctionBegin;
307   ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr);
308   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented");
309   b = (Mat_IS*)B->data;
310   ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "MatMissingDiagonal_IS"
316 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool  *missing,PetscInt *d)
317 {
318   Vec               v;
319   const PetscScalar *array;
320   PetscInt          i,n;
321   PetscErrorCode    ierr;
322 
323   PetscFunctionBegin;
324   *missing = PETSC_FALSE;
325   ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr);
326   ierr = MatGetDiagonal(A,v);CHKERRQ(ierr);
327   ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr);
328   ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr);
329   for (i=0;i<n;i++) if (array[i] == 0.) break;
330   ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr);
331   ierr = VecDestroy(&v);CHKERRQ(ierr);
332   if (i != n) *missing = PETSC_TRUE;
333   if (d) {
334     *d = -1;
335     if (*missing) {
336       PetscInt rstart;
337       ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
338       *d = i+rstart;
339     }
340   }
341   PetscFunctionReturn(0);
342 }
343 
344 #undef __FUNCT__
345 #define __FUNCT__ "MatISComputeSF_Private"
346 static PetscErrorCode MatISComputeSF_Private(Mat B)
347 {
348   Mat_IS         *matis = (Mat_IS*)(B->data);
349   const PetscInt *gidxs;
350   PetscErrorCode ierr;
351 
352   PetscFunctionBegin;
353   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
354   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
355   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
356   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
357   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
358   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
359   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
360   if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */
361     ierr = MatGetSize(matis->A,NULL,&matis->csf_nleaves);CHKERRQ(ierr);
362     ierr = MatGetLocalSize(B,NULL,&matis->csf_nroots);CHKERRQ(ierr);
363     ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr);
364     ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
365     ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,matis->csf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
366     ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
367     ierr = PetscMalloc2(matis->csf_nroots,&matis->csf_rootdata,matis->csf_nleaves,&matis->csf_leafdata);CHKERRQ(ierr);
368   } else {
369     matis->csf = matis->sf;
370     matis->csf_nleaves = matis->sf_nleaves;
371     matis->csf_nroots = matis->sf_nroots;
372     matis->csf_leafdata = matis->sf_leafdata;
373     matis->csf_rootdata = matis->sf_rootdata;
374   }
375   PetscFunctionReturn(0);
376 }
377 
378 #undef __FUNCT__
379 #define __FUNCT__ "MatISSetPreallocation"
380 /*@
381    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
382 
383    Collective on MPI_Comm
384 
385    Input Parameters:
386 +  B - the matrix
387 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
388            (same value is used for all local rows)
389 .  d_nnz - array containing the number of nonzeros in the various rows of the
390            DIAGONAL portion of the local submatrix (possibly different for each row)
391            or NULL, if d_nz is used to specify the nonzero structure.
392            The size of this array is equal to the number of local rows, i.e 'm'.
393            For matrices that will be factored, you must leave room for (and set)
394            the diagonal entry even if it is zero.
395 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
396            submatrix (same value is used for all local rows).
397 -  o_nnz - array containing the number of nonzeros in the various rows of the
398            OFF-DIAGONAL portion of the local submatrix (possibly different for
399            each row) or NULL, if o_nz is used to specify the nonzero
400            structure. The size of this array is equal to the number
401            of local rows, i.e 'm'.
402 
403    If the *_nnz parameter is given then the *_nz parameter is ignored
404 
405    Level: intermediate
406 
407    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
408           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
409           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
410 
411 .keywords: matrix
412 
413 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS
414 @*/
415 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
416 {
417   PetscErrorCode ierr;
418 
419   PetscFunctionBegin;
420   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
421   PetscValidType(B,1);
422   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
423   PetscFunctionReturn(0);
424 }
425 
426 #undef __FUNCT__
427 #define __FUNCT__ "MatISSetPreallocation_IS"
428 static PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
429 {
430   Mat_IS         *matis = (Mat_IS*)(B->data);
431   PetscInt       bs,i,nlocalcols;
432   PetscErrorCode ierr;
433 
434   PetscFunctionBegin;
435   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
436   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
437     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
438   }
439   if (!d_nnz) {
440     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
441   } else {
442     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
443   }
444   if (!o_nnz) {
445     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
446   } else {
447     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
448   }
449   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
450   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
451   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
452   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
453   for (i=0;i<matis->sf_nleaves;i++) {
454     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
455   }
456   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
457   for (i=0;i<matis->sf_nleaves/bs;i++) {
458     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
459   }
460   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
461   for (i=0;i<matis->sf_nleaves/bs;i++) {
462     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
463   }
464   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
465   PetscFunctionReturn(0);
466 }
467 
468 #undef __FUNCT__
469 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
470 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
471 {
472   Mat_IS          *matis = (Mat_IS*)(A->data);
473   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
474   const PetscInt  *global_indices_r,*global_indices_c;
475   PetscInt        i,j,bs,rows,cols;
476   PetscInt        lrows,lcols;
477   PetscInt        local_rows,local_cols;
478   PetscMPIInt     nsubdomains;
479   PetscBool       isdense,issbaij;
480   PetscErrorCode  ierr;
481 
482   PetscFunctionBegin;
483   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
484   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
485   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
486   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
487   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
488   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
489   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
490   if (A->rmap->mapping != A->cmap->mapping) {
491     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
492   } else {
493     global_indices_c = global_indices_r;
494   }
495 
496   if (issbaij) {
497     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
498   }
499   /*
500      An SF reduce is needed to sum up properly on shared rows.
501      Note that generally preallocation is not exact, since it overestimates nonzeros
502   */
503   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
504     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
505   }
506   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
507   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
508   /* All processes need to compute entire row ownership */
509   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
510   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
511   for (i=0;i<nsubdomains;i++) {
512     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
513       row_ownership[j] = i;
514     }
515   }
516   ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
517 
518   /*
519      my_dnz and my_onz contains exact contribution to preallocation from each local mat
520      then, they will be summed up properly. This way, preallocation is always sufficient
521   */
522   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
523   /* preallocation as a MATAIJ */
524   if (isdense) { /* special case for dense local matrices */
525     for (i=0;i<local_rows;i++) {
526       PetscInt index_row = global_indices_r[i];
527       for (j=i;j<local_rows;j++) {
528         PetscInt owner = row_ownership[index_row];
529         PetscInt index_col = global_indices_c[j];
530         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
531           my_dnz[i] += 1;
532         } else { /* offdiag block */
533           my_onz[i] += 1;
534         }
535         /* same as before, interchanging rows and cols */
536         if (i != j) {
537           owner = row_ownership[index_col];
538           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
539             my_dnz[j] += 1;
540           } else {
541             my_onz[j] += 1;
542           }
543         }
544       }
545     }
546   } else if (matis->A->ops->getrowij) {
547     const PetscInt *ii,*jj,*jptr;
548     PetscBool      done;
549     ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr);
550     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
551     jptr = jj;
552     for (i=0;i<local_rows;i++) {
553       PetscInt index_row = global_indices_r[i];
554       for (j=0;j<ii[i+1]-ii[i];j++,jptr++) {
555         PetscInt owner = row_ownership[index_row];
556         PetscInt index_col = global_indices_c[*jptr];
557         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
558           my_dnz[i] += 1;
559         } else { /* offdiag block */
560           my_onz[i] += 1;
561         }
562         /* same as before, interchanging rows and cols */
563         if (issbaij && index_col != index_row) {
564           owner = row_ownership[index_col];
565           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
566             my_dnz[*jptr] += 1;
567           } else {
568             my_onz[*jptr] += 1;
569           }
570         }
571       }
572     }
573     ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr);
574     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
575   } else { /* loop over rows and use MatGetRow */
576     for (i=0;i<local_rows;i++) {
577       const PetscInt *cols;
578       PetscInt       ncols,index_row = global_indices_r[i];
579       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
580       for (j=0;j<ncols;j++) {
581         PetscInt owner = row_ownership[index_row];
582         PetscInt index_col = global_indices_c[cols[j]];
583         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
584           my_dnz[i] += 1;
585         } else { /* offdiag block */
586           my_onz[i] += 1;
587         }
588         /* same as before, interchanging rows and cols */
589         if (issbaij && index_col != index_row) {
590           owner = row_ownership[index_col];
591           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
592             my_dnz[cols[j]] += 1;
593           } else {
594             my_onz[cols[j]] += 1;
595           }
596         }
597       }
598       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
599     }
600   }
601   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
602   if (global_indices_c != global_indices_r) {
603     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
604   }
605   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
606 
607   /* Reduce my_dnz and my_onz */
608   if (maxreduce) {
609     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
610     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
611     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
612     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
613   } else {
614     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
615     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
616     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
617     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
618   }
619   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
620 
621   /* Resize preallocation if overestimated */
622   for (i=0;i<lrows;i++) {
623     dnz[i] = PetscMin(dnz[i],lcols);
624     onz[i] = PetscMin(onz[i],cols-lcols);
625   }
626   /* set preallocation */
627   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
628   for (i=0;i<lrows/bs;i++) {
629     dnz[i] = dnz[i*bs]/bs;
630     onz[i] = onz[i*bs]/bs;
631   }
632   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
633   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
634   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
635   if (issbaij) {
636     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
637   }
638   PetscFunctionReturn(0);
639 }
640 
641 #undef __FUNCT__
642 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
643 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
644 {
645   Mat_IS         *matis = (Mat_IS*)(mat->data);
646   Mat            local_mat;
647   /* info on mat */
648   PetscInt       bs,rows,cols,lrows,lcols;
649   PetscInt       local_rows,local_cols;
650   PetscBool      isdense,issbaij,isseqaij;
651   PetscMPIInt    nsubdomains;
652   /* values insertion */
653   PetscScalar    *array;
654   /* work */
655   PetscErrorCode ierr;
656 
657   PetscFunctionBegin;
658   /* get info from mat */
659   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
660   if (nsubdomains == 1) {
661     if (reuse == MAT_INITIAL_MATRIX) {
662       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
663     } else {
664       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
665     }
666     PetscFunctionReturn(0);
667   }
668   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
669   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
670   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
671   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
672   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
673   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
674   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
675 
676   if (reuse == MAT_INITIAL_MATRIX) {
677     MatType     new_mat_type;
678     PetscBool   issbaij_red;
679 
680     /* determining new matrix type */
681     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
682     if (issbaij_red) {
683       new_mat_type = MATSBAIJ;
684     } else {
685       if (bs>1) {
686         new_mat_type = MATBAIJ;
687       } else {
688         new_mat_type = MATAIJ;
689       }
690     }
691 
692     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
693     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
694     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
695     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
696     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
697   } else {
698     PetscInt mbs,mrows,mcols,mlrows,mlcols;
699     /* some checks */
700     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
701     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
702     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
703     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
704     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
705     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
706     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
707     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
708     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
709   }
710 
711   if (issbaij) {
712     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
713   } else {
714     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
715     local_mat = matis->A;
716   }
717 
718   /* Set values */
719   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
720   if (isdense) { /* special case for dense local matrices */
721     PetscInt i,*dummy_rows,*dummy_cols;
722 
723     if (local_rows != local_cols) {
724       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
725       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
726       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
727     } else {
728       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
729       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
730       dummy_cols = dummy_rows;
731     }
732     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
733     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
734     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
735     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
736     if (dummy_rows != dummy_cols) {
737       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
738     } else {
739       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
740     }
741   } else if (isseqaij) {
742     PetscInt  i,nvtxs,*xadj,*adjncy;
743     PetscBool done;
744 
745     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
746     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
747     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
748     for (i=0;i<nvtxs;i++) {
749       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
750     }
751     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
752     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
753     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
754   } else { /* very basic values insertion for all other matrix types */
755     PetscInt i;
756 
757     for (i=0;i<local_rows;i++) {
758       PetscInt       j;
759       const PetscInt *local_indices_cols;
760 
761       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
762       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
763       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
764     }
765   }
766   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
767   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
768   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
769   if (isdense) {
770     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
771   }
772   PetscFunctionReturn(0);
773 }
774 
775 #undef __FUNCT__
776 #define __FUNCT__ "MatISGetMPIXAIJ"
777 /*@
778     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
779 
780   Input Parameter:
781 .  mat - the matrix (should be of type MATIS)
782 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
783 
784   Output Parameter:
785 .  newmat - the matrix in AIJ format
786 
787   Level: developer
788 
789   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
790 
791 .seealso: MATIS
792 @*/
793 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
794 {
795   PetscErrorCode ierr;
796 
797   PetscFunctionBegin;
798   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
799   PetscValidLogicalCollectiveEnum(mat,reuse,2);
800   PetscValidPointer(newmat,3);
801   if (reuse != MAT_INITIAL_MATRIX) {
802     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
803     PetscCheckSameComm(mat,1,*newmat,3);
804     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
805   }
806   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
807   PetscFunctionReturn(0);
808 }
809 
810 #undef __FUNCT__
811 #define __FUNCT__ "MatDuplicate_IS"
812 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
813 {
814   PetscErrorCode ierr;
815   Mat_IS         *matis = (Mat_IS*)(mat->data);
816   PetscInt       bs,m,n,M,N;
817   Mat            B,localmat;
818 
819   PetscFunctionBegin;
820   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
821   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
822   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
823   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
824   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
825   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
826   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
827   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
828   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
829   *newmat = B;
830   PetscFunctionReturn(0);
831 }
832 
833 #undef __FUNCT__
834 #define __FUNCT__ "MatIsHermitian_IS"
835 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
836 {
837   PetscErrorCode ierr;
838   Mat_IS         *matis = (Mat_IS*)A->data;
839   PetscBool      local_sym;
840 
841   PetscFunctionBegin;
842   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
843   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
844   PetscFunctionReturn(0);
845 }
846 
847 #undef __FUNCT__
848 #define __FUNCT__ "MatIsSymmetric_IS"
849 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
850 {
851   PetscErrorCode ierr;
852   Mat_IS         *matis = (Mat_IS*)A->data;
853   PetscBool      local_sym;
854 
855   PetscFunctionBegin;
856   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
857   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
858   PetscFunctionReturn(0);
859 }
860 
861 #undef __FUNCT__
862 #define __FUNCT__ "MatDestroy_IS"
863 static PetscErrorCode MatDestroy_IS(Mat A)
864 {
865   PetscErrorCode ierr;
866   Mat_IS         *b = (Mat_IS*)A->data;
867 
868   PetscFunctionBegin;
869   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
870   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
871   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
872   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
873   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
874   ierr = VecDestroy(&b->counter);CHKERRQ(ierr);
875   ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr);
876   ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr);
877   if (b->sf != b->csf) {
878     ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr);
879     ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr);
880   }
881   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
882   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
883   ierr = PetscFree(A->data);CHKERRQ(ierr);
884   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
885   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
886   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
887   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
888   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 
892 #undef __FUNCT__
893 #define __FUNCT__ "MatMult_IS"
894 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
895 {
896   PetscErrorCode ierr;
897   Mat_IS         *is  = (Mat_IS*)A->data;
898   PetscScalar    zero = 0.0;
899 
900   PetscFunctionBegin;
901   /*  scatter the global vector x into the local work vector */
902   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
903   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
904 
905   /* multiply the local matrix */
906   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
907 
908   /* scatter product back into global memory */
909   ierr = VecSet(y,zero);CHKERRQ(ierr);
910   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
911   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
912   PetscFunctionReturn(0);
913 }
914 
915 #undef __FUNCT__
916 #define __FUNCT__ "MatMultAdd_IS"
917 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
918 {
919   Vec            temp_vec;
920   PetscErrorCode ierr;
921 
922   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
923   if (v3 != v2) {
924     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
925     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
926   } else {
927     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
928     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
929     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
930     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
931     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
932   }
933   PetscFunctionReturn(0);
934 }
935 
936 #undef __FUNCT__
937 #define __FUNCT__ "MatMultTranspose_IS"
938 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
939 {
940   Mat_IS         *is = (Mat_IS*)A->data;
941   PetscErrorCode ierr;
942 
943   PetscFunctionBegin;
944   /*  scatter the global vector x into the local work vector */
945   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
946   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
947 
948   /* multiply the local matrix */
949   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
950 
951   /* scatter product back into global vector */
952   ierr = VecSet(x,0);CHKERRQ(ierr);
953   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
954   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
955   PetscFunctionReturn(0);
956 }
957 
958 #undef __FUNCT__
959 #define __FUNCT__ "MatMultTransposeAdd_IS"
960 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
961 {
962   Vec            temp_vec;
963   PetscErrorCode ierr;
964 
965   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
966   if (v3 != v2) {
967     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
968     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
969   } else {
970     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
971     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
972     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
973     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
974     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
975   }
976   PetscFunctionReturn(0);
977 }
978 
979 #undef __FUNCT__
980 #define __FUNCT__ "MatView_IS"
981 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
982 {
983   Mat_IS         *a = (Mat_IS*)A->data;
984   PetscErrorCode ierr;
985   PetscViewer    sviewer;
986 
987   PetscFunctionBegin;
988   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
989   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
990   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
991   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
992   PetscFunctionReturn(0);
993 }
994 
995 #undef __FUNCT__
996 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
997 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
998 {
999   PetscErrorCode ierr;
1000   PetscInt       nr,rbs,nc,cbs;
1001   Mat_IS         *is = (Mat_IS*)A->data;
1002   IS             from,to;
1003   Vec            cglobal,rglobal;
1004 
1005   PetscFunctionBegin;
1006   PetscCheckSameComm(A,1,rmapping,2);
1007   PetscCheckSameComm(A,1,cmapping,3);
1008   /* Destroy any previous data */
1009   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
1010   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
1011   ierr = VecDestroy(&is->counter);CHKERRQ(ierr);
1012   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
1013   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
1014   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
1015   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
1016   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
1017 
1018   /* Setup Layout and set local to global maps */
1019   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1020   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1021   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
1022   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
1023 
1024   /* Create the local matrix A */
1025   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
1026   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
1027   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
1028   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
1029   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
1030   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
1031   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
1032   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
1033   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
1034   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
1035   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
1036 
1037   if (!is->islocalref) { /* setup scatters and local vectors for MatMult */
1038     /* Create the local work vectors */
1039     ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
1040 
1041     /* setup the global to local scatters */
1042     ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
1043     ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
1044     ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
1045     ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
1046     if (rmapping != cmapping) {
1047       ierr = ISDestroy(&to);CHKERRQ(ierr);
1048       ierr = ISDestroy(&from);CHKERRQ(ierr);
1049       ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
1050       ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
1051       ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
1052     } else {
1053       ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
1054       is->cctx = is->rctx;
1055     }
1056 
1057     /* interface counter vector (local) */
1058     ierr = VecDuplicate(is->y,&is->counter);CHKERRQ(ierr);
1059     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
1060     ierr = VecScatterBegin(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1061     ierr = VecScatterEnd(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1062     ierr = VecScatterBegin(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1063     ierr = VecScatterEnd(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1064 
1065     /* free workspace */
1066     ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
1067     ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
1068     ierr = ISDestroy(&to);CHKERRQ(ierr);
1069     ierr = ISDestroy(&from);CHKERRQ(ierr);
1070   }
1071   ierr = MatSetUp(A);CHKERRQ(ierr);
1072   PetscFunctionReturn(0);
1073 }
1074 
1075 #undef __FUNCT__
1076 #define __FUNCT__ "MatSetValues_IS"
1077 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1078 {
1079   Mat_IS         *is = (Mat_IS*)mat->data;
1080   PetscErrorCode ierr;
1081 #if defined(PETSC_USE_DEBUG)
1082   PetscInt       i,zm,zn;
1083 #endif
1084   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1085 
1086   PetscFunctionBegin;
1087 #if defined(PETSC_USE_DEBUG)
1088   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1089   /* count negative indices */
1090   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1091   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1092 #endif
1093   ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1094   ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1095 #if defined(PETSC_USE_DEBUG)
1096   /* count negative indices (should be the same as before) */
1097   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1098   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1099   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1100   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1101 #endif
1102   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 #undef __FUNCT__
1107 #define __FUNCT__ "MatSetValuesBlocked_IS"
1108 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1109 {
1110   Mat_IS         *is = (Mat_IS*)mat->data;
1111   PetscErrorCode ierr;
1112 #if defined(PETSC_USE_DEBUG)
1113   PetscInt       i,zm,zn;
1114 #endif
1115   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1116 
1117   PetscFunctionBegin;
1118 #if defined(PETSC_USE_DEBUG)
1119   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1120   /* count negative indices */
1121   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1122   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1123 #endif
1124   ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1125   ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1126 #if defined(PETSC_USE_DEBUG)
1127   /* count negative indices (should be the same as before) */
1128   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1129   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1130   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1131   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1132 #endif
1133   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 #undef __FUNCT__
1138 #define __FUNCT__ "MatSetValuesLocal_IS"
1139 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1140 {
1141   PetscErrorCode ierr;
1142   Mat_IS         *is = (Mat_IS*)A->data;
1143 
1144   PetscFunctionBegin;
1145   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1146   PetscFunctionReturn(0);
1147 }
1148 
1149 #undef __FUNCT__
1150 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
1151 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1152 {
1153   PetscErrorCode ierr;
1154   Mat_IS         *is = (Mat_IS*)A->data;
1155 
1156   PetscFunctionBegin;
1157   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1158   PetscFunctionReturn(0);
1159 }
1160 
1161 #undef __FUNCT__
1162 #define __FUNCT__ "MatZeroRows_IS"
1163 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1164 {
1165   Mat_IS         *matis = (Mat_IS*)A->data;
1166   PetscInt       nr,nl,len,i;
1167   PetscInt       *lrows;
1168   PetscErrorCode ierr;
1169 
1170   PetscFunctionBegin;
1171   /* get locally owned rows */
1172   ierr = MatZeroRowsMapLocal_Private(A,n,rows,&len,&lrows);CHKERRQ(ierr);
1173   /* fix right hand side if needed */
1174   if (x && b) {
1175     const PetscScalar *xx;
1176     PetscScalar       *bb;
1177 
1178     ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr);
1179     ierr = VecGetArray(b, &bb);CHKERRQ(ierr);
1180     for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]];
1181     ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr);
1182     ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr);
1183   }
1184   /* get rows associated to the local matrices */
1185   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
1186     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
1187   }
1188   ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr);
1189   ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr);
1190   ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
1191   for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1;
1192   ierr = PetscFree(lrows);CHKERRQ(ierr);
1193   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1194   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1195   ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr);
1196   for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i;
1197   ierr = MatISZeroRowsLocal_Private(A,nr,lrows,diag);CHKERRQ(ierr);
1198   ierr = PetscFree(lrows);CHKERRQ(ierr);
1199   PetscFunctionReturn(0);
1200 }
1201 
1202 #undef __FUNCT__
1203 #define __FUNCT__ "MatISZeroRowsLocal_Private"
1204 static PetscErrorCode MatISZeroRowsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag)
1205 {
1206   Mat_IS         *is = (Mat_IS*)A->data;
1207   PetscErrorCode ierr;
1208 
1209   PetscFunctionBegin;
1210   if (!n) {
1211     is->pure_neumann = PETSC_TRUE;
1212   } else {
1213     PetscInt i;
1214     is->pure_neumann = PETSC_FALSE;
1215 
1216     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
1217     if (diag != 0.) {
1218       const PetscScalar *array;
1219       ierr = VecGetArrayRead(is->counter,&array);CHKERRQ(ierr);
1220       for (i=0; i<n; i++) {
1221         ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
1222       }
1223       ierr = VecRestoreArrayRead(is->counter,&array);CHKERRQ(ierr);
1224     }
1225     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1226     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1227   }
1228   PetscFunctionReturn(0);
1229 }
1230 
1231 #undef __FUNCT__
1232 #define __FUNCT__ "MatAssemblyBegin_IS"
1233 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
1234 {
1235   Mat_IS         *is = (Mat_IS*)A->data;
1236   PetscErrorCode ierr;
1237 
1238   PetscFunctionBegin;
1239   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
1240   PetscFunctionReturn(0);
1241 }
1242 
1243 #undef __FUNCT__
1244 #define __FUNCT__ "MatAssemblyEnd_IS"
1245 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
1246 {
1247   Mat_IS         *is = (Mat_IS*)A->data;
1248   PetscErrorCode ierr;
1249 
1250   PetscFunctionBegin;
1251   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
1252   PetscFunctionReturn(0);
1253 }
1254 
1255 #undef __FUNCT__
1256 #define __FUNCT__ "MatISGetLocalMat_IS"
1257 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
1258 {
1259   Mat_IS *is = (Mat_IS*)mat->data;
1260 
1261   PetscFunctionBegin;
1262   *local = is->A;
1263   PetscFunctionReturn(0);
1264 }
1265 
1266 #undef __FUNCT__
1267 #define __FUNCT__ "MatISGetLocalMat"
1268 /*@
1269     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
1270 
1271   Input Parameter:
1272 .  mat - the matrix
1273 
1274   Output Parameter:
1275 .  local - the local matrix
1276 
1277   Level: advanced
1278 
1279   Notes:
1280     This can be called if you have precomputed the nonzero structure of the
1281   matrix and want to provide it to the inner matrix object to improve the performance
1282   of the MatSetValues() operation.
1283 
1284 .seealso: MATIS
1285 @*/
1286 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
1287 {
1288   PetscErrorCode ierr;
1289 
1290   PetscFunctionBegin;
1291   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1292   PetscValidPointer(local,2);
1293   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
1294   PetscFunctionReturn(0);
1295 }
1296 
1297 #undef __FUNCT__
1298 #define __FUNCT__ "MatISSetLocalMat_IS"
1299 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
1300 {
1301   Mat_IS         *is = (Mat_IS*)mat->data;
1302   PetscInt       nrows,ncols,orows,ocols;
1303   PetscErrorCode ierr;
1304 
1305   PetscFunctionBegin;
1306   if (is->A) {
1307     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
1308     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
1309     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
1310   }
1311   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
1312   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
1313   is->A = local;
1314   PetscFunctionReturn(0);
1315 }
1316 
1317 #undef __FUNCT__
1318 #define __FUNCT__ "MatISSetLocalMat"
1319 /*@
1320     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
1321 
1322   Input Parameter:
1323 .  mat - the matrix
1324 .  local - the local matrix
1325 
1326   Output Parameter:
1327 
1328   Level: advanced
1329 
1330   Notes:
1331     This can be called if you have precomputed the local matrix and
1332   want to provide it to the matrix object MATIS.
1333 
1334 .seealso: MATIS
1335 @*/
1336 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
1337 {
1338   PetscErrorCode ierr;
1339 
1340   PetscFunctionBegin;
1341   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1342   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
1343   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
1344   PetscFunctionReturn(0);
1345 }
1346 
1347 #undef __FUNCT__
1348 #define __FUNCT__ "MatZeroEntries_IS"
1349 static PetscErrorCode MatZeroEntries_IS(Mat A)
1350 {
1351   Mat_IS         *a = (Mat_IS*)A->data;
1352   PetscErrorCode ierr;
1353 
1354   PetscFunctionBegin;
1355   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
1356   PetscFunctionReturn(0);
1357 }
1358 
1359 #undef __FUNCT__
1360 #define __FUNCT__ "MatScale_IS"
1361 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
1362 {
1363   Mat_IS         *is = (Mat_IS*)A->data;
1364   PetscErrorCode ierr;
1365 
1366   PetscFunctionBegin;
1367   ierr = MatScale(is->A,a);CHKERRQ(ierr);
1368   PetscFunctionReturn(0);
1369 }
1370 
1371 #undef __FUNCT__
1372 #define __FUNCT__ "MatGetDiagonal_IS"
1373 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
1374 {
1375   Mat_IS         *is = (Mat_IS*)A->data;
1376   PetscErrorCode ierr;
1377 
1378   PetscFunctionBegin;
1379   /* get diagonal of the local matrix */
1380   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
1381 
1382   /* scatter diagonal back into global vector */
1383   ierr = VecSet(v,0);CHKERRQ(ierr);
1384   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1385   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1386   PetscFunctionReturn(0);
1387 }
1388 
1389 #undef __FUNCT__
1390 #define __FUNCT__ "MatSetOption_IS"
1391 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
1392 {
1393   Mat_IS         *a = (Mat_IS*)A->data;
1394   PetscErrorCode ierr;
1395 
1396   PetscFunctionBegin;
1397   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1398   PetscFunctionReturn(0);
1399 }
1400 
1401 #undef __FUNCT__
1402 #define __FUNCT__ "MatAXPY_IS"
1403 static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str)
1404 {
1405   Mat_IS         *y = (Mat_IS*)Y->data;
1406   Mat_IS         *x;
1407 #if defined(PETSC_USE_DEBUG)
1408   PetscBool      ismatis;
1409 #endif
1410   PetscErrorCode ierr;
1411 
1412   PetscFunctionBegin;
1413 #if defined(PETSC_USE_DEBUG)
1414   ierr = PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);CHKERRQ(ierr);
1415   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS");
1416 #endif
1417   x = (Mat_IS*)X->data;
1418   ierr = MatAXPY(y->A,a,x->A,str);CHKERRQ(ierr);
1419   PetscFunctionReturn(0);
1420 }
1421 
1422 #undef __FUNCT__
1423 #define __FUNCT__ "MatGetLocalSubMatrix_IS"
1424 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat)
1425 {
1426   Mat                    lA;
1427   Mat_IS                 *matis;
1428   ISLocalToGlobalMapping rl2g,cl2g;
1429   IS                     is;
1430   const PetscInt         *rg,*rl;
1431   PetscInt               nrg;
1432   PetscInt               N,M,nrl,i,*idxs;
1433   PetscErrorCode         ierr;
1434 
1435   PetscFunctionBegin;
1436   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
1437   ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr);
1438   ierr = ISGetIndices(row,&rl);CHKERRQ(ierr);
1439   ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr);
1440 #if defined(PETSC_USE_DEBUG)
1441   for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %d -> %d greater than maximum possible %d\n",i,rl[i],nrg);
1442 #endif
1443   ierr = PetscMalloc1(nrg,&idxs);CHKERRQ(ierr);
1444   /* map from [0,nrl) to row */
1445   for (i=0;i<nrl;i++) idxs[i] = rl[i];
1446 #if defined(PETSC_USE_DEBUG)
1447   for (i=nrl;i<nrg;i++) idxs[i] = nrg;
1448 #else
1449   for (i=nrl;i<nrg;i++) idxs[i] = -1;
1450 #endif
1451   ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr);
1452   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
1453   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
1454   ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
1455   ierr = ISDestroy(&is);CHKERRQ(ierr);
1456   /* compute new l2g map for columns */
1457   if (col != row || A->rmap->mapping != A->cmap->mapping) {
1458     const PetscInt *cg,*cl;
1459     PetscInt       ncg;
1460     PetscInt       ncl;
1461 
1462     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
1463     ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr);
1464     ierr = ISGetIndices(col,&cl);CHKERRQ(ierr);
1465     ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr);
1466 #if defined(PETSC_USE_DEBUG)
1467     for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %d -> %d greater than maximum possible %d\n",i,cl[i],ncg);
1468 #endif
1469     ierr = PetscMalloc1(ncg,&idxs);CHKERRQ(ierr);
1470     /* map from [0,ncl) to col */
1471     for (i=0;i<ncl;i++) idxs[i] = cl[i];
1472 #if defined(PETSC_USE_DEBUG)
1473     for (i=ncl;i<ncg;i++) idxs[i] = ncg;
1474 #else
1475     for (i=ncl;i<ncg;i++) idxs[i] = -1;
1476 #endif
1477     ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr);
1478     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
1479     ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
1480     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
1481     ierr = ISDestroy(&is);CHKERRQ(ierr);
1482   } else {
1483     ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr);
1484     cl2g = rl2g;
1485   }
1486   /* create the MATIS submatrix */
1487   ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
1488   ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr);
1489   ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1490   ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr);
1491   matis = (Mat_IS*)((*submat)->data);
1492   matis->islocalref = PETSC_TRUE;
1493   ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr);
1494   ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr);
1495   ierr = MatISSetLocalMat(*submat,lA);CHKERRQ(ierr);
1496   ierr = MatSetUp(*submat);CHKERRQ(ierr);
1497   ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1498   ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1499   ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
1500   ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
1501   /* remove unsupported ops */
1502   ierr = PetscMemzero((*submat)->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1503   (*submat)->ops->destroy               = MatDestroy_IS;
1504   (*submat)->ops->setvalueslocal        = MatSetValuesLocal_SubMat_IS;
1505   (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS;
1506   (*submat)->ops->assemblybegin         = MatAssemblyBegin_IS;
1507   (*submat)->ops->assemblyend           = MatAssemblyEnd_IS;
1508   PetscFunctionReturn(0);
1509 }
1510 
1511 #undef __FUNCT__
1512 #define __FUNCT__ "MatCreateIS"
1513 /*@
1514     MatCreateIS - Creates a "process" unassembled matrix, assembled on each
1515        process but not across processes.
1516 
1517    Input Parameters:
1518 +     comm    - MPI communicator that will share the matrix
1519 .     bs      - block size of the matrix
1520 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
1521 .     rmap    - local to global map for rows
1522 -     cmap    - local to global map for cols
1523 
1524    Output Parameter:
1525 .    A - the resulting matrix
1526 
1527    Level: advanced
1528 
1529    Notes: See MATIS for more details.
1530           m and n are NOT related to the size of the map; they are the size of the part of the vector owned
1531           by that process. The sizes of rmap and cmap define the size of the local matrices.
1532           If either rmap or cmap are NULL, then the matrix is assumed to be square.
1533 
1534 .seealso: MATIS, MatSetLocalToGlobalMapping()
1535 @*/
1536 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1537 {
1538   PetscErrorCode ierr;
1539 
1540   PetscFunctionBegin;
1541   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1542   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1543   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1544   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1545   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1546   if (rmap && cmap) {
1547     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1548   } else if (!rmap) {
1549     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1550   } else {
1551     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1552   }
1553   PetscFunctionReturn(0);
1554 }
1555 
1556 /*MC
1557    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP).
1558    This stores the matrices in globally unassembled form. Each processor
1559    assembles only its local Neumann problem and the parallel matrix vector
1560    product is handled "implicitly".
1561 
1562    Operations Provided:
1563 +  MatMult()
1564 .  MatMultAdd()
1565 .  MatMultTranspose()
1566 .  MatMultTransposeAdd()
1567 .  MatZeroEntries()
1568 .  MatSetOption()
1569 .  MatZeroRows()
1570 .  MatSetValues()
1571 .  MatSetValuesBlocked()
1572 .  MatSetValuesLocal()
1573 .  MatSetValuesBlockedLocal()
1574 .  MatScale()
1575 .  MatGetDiagonal()
1576 .  MatMissingDiagonal()
1577 .  MatDuplicate()
1578 .  MatCopy()
1579 .  MatAXPY()
1580 .  MatGetSubMatrix()
1581 .  MatGetLocalSubMatrix()
1582 -  MatSetLocalToGlobalMapping()
1583 
1584    Options Database Keys:
1585 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1586 
1587    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1588 
1589           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1590 
1591           You can do matrix preallocation on the local matrix after you obtain it with
1592           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1593 
1594   Level: advanced
1595 
1596 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP
1597 
1598 M*/
1599 
1600 #undef __FUNCT__
1601 #define __FUNCT__ "MatCreate_IS"
1602 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1603 {
1604   PetscErrorCode ierr;
1605   Mat_IS         *b;
1606 
1607   PetscFunctionBegin;
1608   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1609   A->data = (void*)b;
1610 
1611   /* matrix ops */
1612   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1613   A->ops->mult                    = MatMult_IS;
1614   A->ops->multadd                 = MatMultAdd_IS;
1615   A->ops->multtranspose           = MatMultTranspose_IS;
1616   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1617   A->ops->destroy                 = MatDestroy_IS;
1618   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1619   A->ops->setvalues               = MatSetValues_IS;
1620   A->ops->setvaluesblocked        = MatSetValuesBlocked_IS;
1621   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1622   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1623   A->ops->zerorows                = MatZeroRows_IS;
1624   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1625   A->ops->assemblyend             = MatAssemblyEnd_IS;
1626   A->ops->view                    = MatView_IS;
1627   A->ops->zeroentries             = MatZeroEntries_IS;
1628   A->ops->scale                   = MatScale_IS;
1629   A->ops->getdiagonal             = MatGetDiagonal_IS;
1630   A->ops->setoption               = MatSetOption_IS;
1631   A->ops->ishermitian             = MatIsHermitian_IS;
1632   A->ops->issymmetric             = MatIsSymmetric_IS;
1633   A->ops->duplicate               = MatDuplicate_IS;
1634   A->ops->missingdiagonal         = MatMissingDiagonal_IS;
1635   A->ops->copy                    = MatCopy_IS;
1636   A->ops->getlocalsubmatrix       = MatGetLocalSubMatrix_IS;
1637   A->ops->getsubmatrix            = MatGetSubMatrix_IS;
1638   A->ops->axpy                    = MatAXPY_IS;
1639   A->ops->diagonalset             = MatDiagonalSet_IS;
1640   A->ops->shift                   = MatShift_IS;
1641 
1642   /* special MATIS functions */
1643   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1644   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1645   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1646   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1647   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1648   PetscFunctionReturn(0);
1649 }
1650