xref: /petsc/src/mat/impls/is/matis.c (revision fd479f66c58c9a0ee261ba9ebfceb419755cfce5)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4     This stores the matrices in globally unassembled form. Each processor
5     assembles only its local Neumann problem and the parallel matrix vector
6     product is handled "implicitly".
7 
8     Currently this allows for only one subdomain per processor.
9 */
10 
11 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
12 
13 static PetscErrorCode MatISComputeSF_Private(Mat);
14 static PetscErrorCode MatISZeroRowsLocal_Private(Mat,PetscInt,const PetscInt[],PetscScalar);
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "PetscLayoutMapLocal_Private"
18 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[],const PetscInt gidxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
19 {
20   PetscInt      *owners = map->range;
21   PetscInt       n      = map->n;
22   PetscSF        sf;
23   PetscInt      *lidxs,*work = NULL;
24   PetscSFNode   *ridxs;
25   PetscMPIInt    rank;
26   PetscInt       r, p = 0, len = 0;
27   PetscErrorCode ierr;
28 
29   PetscFunctionBegin;
30   /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */
31   ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr);
32   ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr);
33   for (r = 0; r < n; ++r) lidxs[r] = -1;
34   ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr);
35   for (r = 0; r < N; ++r) {
36     const PetscInt idx = idxs[r];
37     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
38     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
39       ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr);
40     }
41     ridxs[r].rank = p;
42     ridxs[r].index = idxs[r] - owners[p];
43   }
44   ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr);
45   ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr);
46   ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
47   ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
48   if ((gidxs && ogidxs) || ogidxs) {
49     ierr = PetscMalloc1(n,&work);CHKERRQ(ierr);
50     if (!gidxs) { /* if gidxs is not provided, global indices are inferred */
51       PetscInt cum = 0,start,*work2;
52       ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr);
53       for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
54       ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
55       start -= cum;
56       cum = 0;
57       for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
58       ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
59       ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
60       ierr = PetscFree(work2);CHKERRQ(ierr);
61     } else {
62       ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)gidxs,work,MPIU_REPLACE);CHKERRQ(ierr);
63       ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)gidxs,work,MPIU_REPLACE);CHKERRQ(ierr);
64     }
65   }
66   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
67   /* Compress and put in indices */
68   for (r = 0; r < n; ++r)
69     if (lidxs[r] >= 0) {
70       if (work) work[len] = work[r];
71       lidxs[len++] = r;
72     }
73   if (on) *on = len;
74   if (oidxs) *oidxs = lidxs;
75   if (ogidxs) *ogidxs = work;
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "MatGetSubMatrix_IS"
81 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat)
82 {
83   Mat               locmat,newlocmat;
84   Mat_IS            *newmatis;
85 #if defined(PETSC_USE_DEBUG)
86   Vec               rtest,ltest;
87   const PetscScalar *array;
88 #endif
89   const PetscInt    *idxs;
90   PetscInt          i,m,n;
91   PetscErrorCode    ierr;
92 
93   PetscFunctionBegin;
94   if (scall == MAT_REUSE_MATRIX) {
95     PetscBool ismatis;
96 
97     ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr);
98     if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type");
99     newmatis = (Mat_IS*)(*newmat)->data;
100     if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS");
101     if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS");
102   }
103   /* irow and icol may not have duplicate entries */
104 #if defined(PETSC_USE_DEBUG)
105   ierr = MatCreateVecs(mat,&ltest,&rtest);CHKERRQ(ierr);
106   ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr);
107   ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
108   for (i=0;i<n;i++) {
109     ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
110   }
111   ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr);
112   ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr);
113   ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr);
114   ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr);
115   ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr);
116   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
117   ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr);
118   ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
119   ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
120   ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
121   for (i=0;i<n;i++) {
122     ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
123   }
124   ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr);
125   ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr);
126   ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr);
127   ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr);
128   ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr);
129   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
130   ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr);
131   ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
132   ierr = VecDestroy(&rtest);CHKERRQ(ierr);
133   ierr = VecDestroy(&ltest);CHKERRQ(ierr);
134 #endif
135   if (scall == MAT_INITIAL_MATRIX) {
136     Mat_IS                 *matis = (Mat_IS*)mat->data;
137     ISLocalToGlobalMapping rl2g;
138     IS                     is;
139     PetscInt               *lidxs,*lgidxs,*newgidxs;
140     PetscInt               i,m,n,ll,newloc;
141     MPI_Comm               comm;
142 
143     ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
144     ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr);
145     ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
146     ierr = MatCreate(comm,newmat);CHKERRQ(ierr);
147     ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr);
148     ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
149     /* communicate irow to their owners in the layout */
150     ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
151     ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,NULL,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
152     ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
153     if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
154       ierr = MatISComputeSF_Private(mat);CHKERRQ(ierr);
155     }
156     ierr = PetscMemzero(matis->sf_rootdata,matis->sf_nroots*sizeof(PetscInt));CHKERRQ(ierr);
157     for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1;
158     ierr = PetscFree(lidxs);CHKERRQ(ierr);
159     ierr = PetscFree(lgidxs);CHKERRQ(ierr);
160     ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
161     ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
162     for (i=0,newloc=0;i<matis->sf_nleaves;i++) if (matis->sf_leafdata[i]) newloc++;
163     ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
164     ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
165     for (i=0,newloc=0;i<matis->sf_nleaves;i++)
166       if (matis->sf_leafdata[i]) {
167         lidxs[newloc] = i;
168         newgidxs[newloc++] = matis->sf_leafdata[i]-1;
169       }
170     ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
171     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
172     ierr = ISDestroy(&is);CHKERRQ(ierr);
173     /* local is to extract local submatrix */
174     newmatis = (Mat_IS*)(*newmat)->data;
175     ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr);
176     if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
177       PetscBool cong;
178       ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr);
179       if (cong) mat->congruentlayouts = 1;
180       else      mat->congruentlayouts = 0;
181     }
182     if (mat->congruentlayouts && irow == icol) {
183       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr);
184       ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr);
185       newmatis->getsub_cis = newmatis->getsub_ris;
186     } else {
187       ISLocalToGlobalMapping cl2g;
188 
189       /* communicate icol to their owners in the layout */
190       ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
191       ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,NULL,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
192       ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
193       ierr = PetscMemzero(matis->csf_rootdata,matis->csf_nroots*sizeof(PetscInt));CHKERRQ(ierr);
194       for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1;
195       ierr = PetscFree(lidxs);CHKERRQ(ierr);
196       ierr = PetscFree(lgidxs);CHKERRQ(ierr);
197       ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
198       ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
199       for (i=0,newloc=0;i<matis->csf_nleaves;i++) if (matis->csf_leafdata[i]) newloc++;
200       ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
201       ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
202       for (i=0,newloc=0;i<matis->csf_nleaves;i++)
203         if (matis->csf_leafdata[i]) {
204           lidxs[newloc] = i;
205           newgidxs[newloc++] = matis->csf_leafdata[i]-1;
206         }
207       ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
208       ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
209       ierr = ISDestroy(&is);CHKERRQ(ierr);
210       /* local is to extract local submatrix */
211       ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr);
212       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr);
213       ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
214     }
215     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
216   } else {
217     ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr);
218   }
219   ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr);
220   newmatis = (Mat_IS*)(*newmat)->data;
221   ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr);
222   if (scall == MAT_INITIAL_MATRIX) {
223     ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr);
224     ierr = MatDestroy(&newlocmat);CHKERRQ(ierr);
225   }
226   ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
227   ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "MatGetLocalSubMatrix_IS"
233 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat)
234 {
235   Mat                    lA,lsubmat;
236   ISLocalToGlobalMapping rl2g,cl2g;
237   IS                     is,isn;
238   const PetscInt         *rg,*rl;
239 #if defined(PETSC_USE_DEBUG)
240   PetscInt               nrg;
241 #endif
242   PetscInt               N,M,nrl,i,*idxs;
243   PetscErrorCode         ierr;
244 
245   PetscFunctionBegin;
246   /* extract local submatrix (it will complain if row and col exceed the local sizes) */
247   ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr);
248   ierr = MatGetSubMatrix(lA,row,col,MAT_INITIAL_MATRIX,&lsubmat);CHKERRQ(ierr);
249   /* compute new l2g map for rows */
250   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
251   ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr);
252   ierr = ISGetIndices(row,&rl);CHKERRQ(ierr);
253 #if defined(PETSC_USE_DEBUG)
254   ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr);
255   for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %d -> %d greater than maximum possible %d\n",i,rl[i],nrg);
256 #endif
257   ierr = PetscMalloc1(nrl,&idxs);CHKERRQ(ierr);
258   for (i=0;i<nrl;i++) idxs[i] = rg[rl[i]];
259   ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr);
260   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
261   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrl,idxs,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
262   ierr = PetscFree(idxs);CHKERRQ(ierr);
263   ierr = ISRenumber(is,NULL,&M,&isn);CHKERRQ(ierr);
264   ierr = ISDestroy(&is);CHKERRQ(ierr);
265   ierr = ISLocalToGlobalMappingCreateIS(isn,&rl2g);CHKERRQ(ierr);
266   ierr = ISDestroy(&isn);CHKERRQ(ierr);
267   /* compute new l2g map for columns */
268   if (col != row || A->rmap->mapping != A->cmap->mapping) {
269     const PetscInt *cg,*cl;
270 #if defined(PETSC_USE_DEBUG)
271     PetscInt       ncg;
272 #endif
273     PetscInt       ncl;
274 
275     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
276     ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr);
277     ierr = ISGetIndices(col,&cl);CHKERRQ(ierr);
278 #if defined(PETSC_USE_DEBUG)
279     ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr);
280     for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %d -> %d greater than maximum possible %d\n",i,cl[i],ncg);
281 #endif
282     ierr = PetscMalloc1(ncl,&idxs);CHKERRQ(ierr);
283     for (i=0;i<ncl;i++) idxs[i] = cg[cl[i]];
284     ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr);
285     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
286     ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncl,idxs,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
287     ierr = PetscFree(idxs);CHKERRQ(ierr);
288     ierr = ISRenumber(is,NULL,&N,&isn);CHKERRQ(ierr);
289     ierr = ISDestroy(&is);CHKERRQ(ierr);
290     ierr = ISLocalToGlobalMappingCreateIS(isn,&cl2g);CHKERRQ(ierr);
291     ierr = ISDestroy(&isn);CHKERRQ(ierr);
292   } else {
293     ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr);
294     cl2g = rl2g;
295     N = M;
296   }
297   /* create the MATIS submatrix */
298   ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr);
299   ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
300   ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr);
301   ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr);
302   ierr = MatISSetLocalMat(*submat,lsubmat);CHKERRQ(ierr);
303   ierr = MatDestroy(&lsubmat);CHKERRQ(ierr);
304   ierr = MatSetUp(*submat);CHKERRQ(ierr);
305   ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
306   ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
307   ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
308   ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
309   PetscFunctionReturn(0);
310 }
311 
312 #undef __FUNCT__
313 #define __FUNCT__ "MatCopy_IS"
314 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str)
315 {
316   Mat_IS         *a = (Mat_IS*)A->data,*b;
317   PetscBool      ismatis;
318   PetscErrorCode ierr;
319 
320   PetscFunctionBegin;
321   ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr);
322   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented");
323   b = (Mat_IS*)B->data;
324   ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
325   PetscFunctionReturn(0);
326 }
327 
328 #undef __FUNCT__
329 #define __FUNCT__ "MatMissingDiagonal_IS"
330 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool  *missing,PetscInt *d)
331 {
332   Vec               v;
333   const PetscScalar *array;
334   PetscInt          i,n;
335   PetscErrorCode    ierr;
336 
337   PetscFunctionBegin;
338   *missing = PETSC_FALSE;
339   ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr);
340   ierr = MatGetDiagonal(A,v);CHKERRQ(ierr);
341   ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr);
342   ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr);
343   for (i=0;i<n;i++) if (array[i] == 0.) break;
344   ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr);
345   ierr = VecDestroy(&v);CHKERRQ(ierr);
346   if (i != n) *missing = PETSC_TRUE;
347   if (d) {
348     *d = -1;
349     if (*missing) {
350       PetscInt rstart;
351       ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
352       *d = i+rstart;
353     }
354   }
355   PetscFunctionReturn(0);
356 }
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "MatISComputeSF_Private"
360 static PetscErrorCode MatISComputeSF_Private(Mat B)
361 {
362   Mat_IS         *matis = (Mat_IS*)(B->data);
363   const PetscInt *gidxs;
364   PetscErrorCode ierr;
365 
366   PetscFunctionBegin;
367   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
368   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
369   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
370   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
371   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
372   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
373   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
374   if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */
375     ierr = MatGetSize(matis->A,NULL,&matis->csf_nleaves);CHKERRQ(ierr);
376     ierr = MatGetLocalSize(B,NULL,&matis->csf_nroots);CHKERRQ(ierr);
377     ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr);
378     ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
379     ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,matis->csf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
380     ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
381     ierr = PetscMalloc2(matis->csf_nroots,&matis->csf_rootdata,matis->csf_nleaves,&matis->csf_leafdata);CHKERRQ(ierr);
382   } else {
383     matis->csf = matis->sf;
384     matis->csf_nleaves = matis->sf_nleaves;
385     matis->csf_nroots = matis->sf_nroots;
386     matis->csf_leafdata = matis->sf_leafdata;
387     matis->csf_rootdata = matis->sf_rootdata;
388   }
389   PetscFunctionReturn(0);
390 }
391 
392 #undef __FUNCT__
393 #define __FUNCT__ "MatISSetPreallocation"
394 /*@
395    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
396 
397    Collective on MPI_Comm
398 
399    Input Parameters:
400 +  B - the matrix
401 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
402            (same value is used for all local rows)
403 .  d_nnz - array containing the number of nonzeros in the various rows of the
404            DIAGONAL portion of the local submatrix (possibly different for each row)
405            or NULL, if d_nz is used to specify the nonzero structure.
406            The size of this array is equal to the number of local rows, i.e 'm'.
407            For matrices that will be factored, you must leave room for (and set)
408            the diagonal entry even if it is zero.
409 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
410            submatrix (same value is used for all local rows).
411 -  o_nnz - array containing the number of nonzeros in the various rows of the
412            OFF-DIAGONAL portion of the local submatrix (possibly different for
413            each row) or NULL, if o_nz is used to specify the nonzero
414            structure. The size of this array is equal to the number
415            of local rows, i.e 'm'.
416 
417    If the *_nnz parameter is given then the *_nz parameter is ignored
418 
419    Level: intermediate
420 
421    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
422           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
423           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
424 
425 .keywords: matrix
426 
427 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS
428 @*/
429 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
430 {
431   PetscErrorCode ierr;
432 
433   PetscFunctionBegin;
434   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
435   PetscValidType(B,1);
436   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
437   PetscFunctionReturn(0);
438 }
439 
440 #undef __FUNCT__
441 #define __FUNCT__ "MatISSetPreallocation_IS"
442 static PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
443 {
444   Mat_IS         *matis = (Mat_IS*)(B->data);
445   PetscInt       bs,i,nlocalcols;
446   PetscErrorCode ierr;
447 
448   PetscFunctionBegin;
449   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
450   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
451     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
452   }
453   if (!d_nnz) {
454     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
455   } else {
456     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
457   }
458   if (!o_nnz) {
459     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
460   } else {
461     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
462   }
463   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
464   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
465   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
466   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
467   for (i=0;i<matis->sf_nleaves;i++) {
468     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
469   }
470   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
471   for (i=0;i<matis->sf_nleaves/bs;i++) {
472     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
473   }
474   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
475   for (i=0;i<matis->sf_nleaves/bs;i++) {
476     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
477   }
478   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
479   PetscFunctionReturn(0);
480 }
481 
482 #undef __FUNCT__
483 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
484 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
485 {
486   Mat_IS          *matis = (Mat_IS*)(A->data);
487   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
488   const PetscInt  *global_indices_r,*global_indices_c;
489   PetscInt        i,j,bs,rows,cols;
490   PetscInt        lrows,lcols;
491   PetscInt        local_rows,local_cols;
492   PetscMPIInt     nsubdomains;
493   PetscBool       isdense,issbaij;
494   PetscErrorCode  ierr;
495 
496   PetscFunctionBegin;
497   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
498   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
499   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
500   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
501   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
502   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
503   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
504   if (A->rmap->mapping != A->cmap->mapping) {
505     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
506   } else {
507     global_indices_c = global_indices_r;
508   }
509 
510   if (issbaij) {
511     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
512   }
513   /*
514      An SF reduce is needed to sum up properly on shared rows.
515      Note that generally preallocation is not exact, since it overestimates nonzeros
516   */
517   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
518     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
519   }
520   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
521   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
522   /* All processes need to compute entire row ownership */
523   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
524   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
525   for (i=0;i<nsubdomains;i++) {
526     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
527       row_ownership[j] = i;
528     }
529   }
530   ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
531 
532   /*
533      my_dnz and my_onz contains exact contribution to preallocation from each local mat
534      then, they will be summed up properly. This way, preallocation is always sufficient
535   */
536   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
537   /* preallocation as a MATAIJ */
538   if (isdense) { /* special case for dense local matrices */
539     for (i=0;i<local_rows;i++) {
540       PetscInt index_row = global_indices_r[i];
541       for (j=i;j<local_rows;j++) {
542         PetscInt owner = row_ownership[index_row];
543         PetscInt index_col = global_indices_c[j];
544         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
545           my_dnz[i] += 1;
546         } else { /* offdiag block */
547           my_onz[i] += 1;
548         }
549         /* same as before, interchanging rows and cols */
550         if (i != j) {
551           owner = row_ownership[index_col];
552           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
553             my_dnz[j] += 1;
554           } else {
555             my_onz[j] += 1;
556           }
557         }
558       }
559     }
560   } else { /* TODO: this could be optimized using MatGetRowIJ */
561     for (i=0;i<local_rows;i++) {
562       const PetscInt *cols;
563       PetscInt       ncols,index_row = global_indices_r[i];
564       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
565       for (j=0;j<ncols;j++) {
566         PetscInt owner = row_ownership[index_row];
567         PetscInt index_col = global_indices_c[cols[j]];
568         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
569           my_dnz[i] += 1;
570         } else { /* offdiag block */
571           my_onz[i] += 1;
572         }
573         /* same as before, interchanging rows and cols */
574         if (issbaij && index_col != index_row) {
575           owner = row_ownership[index_col];
576           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
577             my_dnz[cols[j]] += 1;
578           } else {
579             my_onz[cols[j]] += 1;
580           }
581         }
582       }
583       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
584     }
585   }
586   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
587   if (global_indices_c != global_indices_r) {
588     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
589   }
590   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
591 
592   /* Reduce my_dnz and my_onz */
593   if (maxreduce) {
594     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
595     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
596     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
597     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
598   } else {
599     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
600     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
601     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
602     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
603   }
604   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
605 
606   /* Resize preallocation if overestimated */
607   for (i=0;i<lrows;i++) {
608     dnz[i] = PetscMin(dnz[i],lcols);
609     onz[i] = PetscMin(onz[i],cols-lcols);
610   }
611   /* set preallocation */
612   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
613   for (i=0;i<lrows/bs;i++) {
614     dnz[i] = dnz[i*bs]/bs;
615     onz[i] = onz[i*bs]/bs;
616   }
617   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
618   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
619   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
620   if (issbaij) {
621     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
622   }
623   PetscFunctionReturn(0);
624 }
625 
626 #undef __FUNCT__
627 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
628 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
629 {
630   Mat_IS         *matis = (Mat_IS*)(mat->data);
631   Mat            local_mat;
632   /* info on mat */
633   PetscInt       bs,rows,cols,lrows,lcols;
634   PetscInt       local_rows,local_cols;
635   PetscBool      isdense,issbaij,isseqaij;
636   PetscMPIInt    nsubdomains;
637   /* values insertion */
638   PetscScalar    *array;
639   /* work */
640   PetscErrorCode ierr;
641 
642   PetscFunctionBegin;
643   /* get info from mat */
644   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
645   if (nsubdomains == 1) {
646     if (reuse == MAT_INITIAL_MATRIX) {
647       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
648     } else {
649       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
650     }
651     PetscFunctionReturn(0);
652   }
653   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
654   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
655   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
656   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
657   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
658   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
659   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
660 
661   if (reuse == MAT_INITIAL_MATRIX) {
662     MatType     new_mat_type;
663     PetscBool   issbaij_red;
664 
665     /* determining new matrix type */
666     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
667     if (issbaij_red) {
668       new_mat_type = MATSBAIJ;
669     } else {
670       if (bs>1) {
671         new_mat_type = MATBAIJ;
672       } else {
673         new_mat_type = MATAIJ;
674       }
675     }
676 
677     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
678     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
679     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
680     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
681     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
682   } else {
683     PetscInt mbs,mrows,mcols,mlrows,mlcols;
684     /* some checks */
685     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
686     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
687     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
688     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
689     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
690     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
691     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
692     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
693     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
694   }
695 
696   if (issbaij) {
697     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
698   } else {
699     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
700     local_mat = matis->A;
701   }
702 
703   /* Set values */
704   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
705   if (isdense) { /* special case for dense local matrices */
706     PetscInt i,*dummy_rows,*dummy_cols;
707 
708     if (local_rows != local_cols) {
709       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
710       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
711       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
712     } else {
713       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
714       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
715       dummy_cols = dummy_rows;
716     }
717     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
718     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
719     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
720     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
721     if (dummy_rows != dummy_cols) {
722       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
723     } else {
724       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
725     }
726   } else if (isseqaij) {
727     PetscInt  i,nvtxs,*xadj,*adjncy;
728     PetscBool done;
729 
730     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
731     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
732     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
733     for (i=0;i<nvtxs;i++) {
734       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
735     }
736     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
737     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
738     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
739   } else { /* very basic values insertion for all other matrix types */
740     PetscInt i;
741 
742     for (i=0;i<local_rows;i++) {
743       PetscInt       j;
744       const PetscInt *local_indices_cols;
745 
746       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
747       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
748       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
749     }
750   }
751   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
752   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
753   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
754   if (isdense) {
755     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
756   }
757   PetscFunctionReturn(0);
758 }
759 
760 #undef __FUNCT__
761 #define __FUNCT__ "MatISGetMPIXAIJ"
762 /*@
763     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
764 
765   Input Parameter:
766 .  mat - the matrix (should be of type MATIS)
767 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
768 
769   Output Parameter:
770 .  newmat - the matrix in AIJ format
771 
772   Level: developer
773 
774   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
775 
776 .seealso: MATIS
777 @*/
778 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
779 {
780   PetscErrorCode ierr;
781 
782   PetscFunctionBegin;
783   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
784   PetscValidLogicalCollectiveEnum(mat,reuse,2);
785   PetscValidPointer(newmat,3);
786   if (reuse != MAT_INITIAL_MATRIX) {
787     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
788     PetscCheckSameComm(mat,1,*newmat,3);
789     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
790   }
791   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
792   PetscFunctionReturn(0);
793 }
794 
795 #undef __FUNCT__
796 #define __FUNCT__ "MatDuplicate_IS"
797 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
798 {
799   PetscErrorCode ierr;
800   Mat_IS         *matis = (Mat_IS*)(mat->data);
801   PetscInt       bs,m,n,M,N;
802   Mat            B,localmat;
803 
804   PetscFunctionBegin;
805   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
806   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
807   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
808   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
809   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
810   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
811   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
812   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
813   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
814   *newmat = B;
815   PetscFunctionReturn(0);
816 }
817 
818 #undef __FUNCT__
819 #define __FUNCT__ "MatIsHermitian_IS"
820 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
821 {
822   PetscErrorCode ierr;
823   Mat_IS         *matis = (Mat_IS*)A->data;
824   PetscBool      local_sym;
825 
826   PetscFunctionBegin;
827   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
828   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
829   PetscFunctionReturn(0);
830 }
831 
832 #undef __FUNCT__
833 #define __FUNCT__ "MatIsSymmetric_IS"
834 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
835 {
836   PetscErrorCode ierr;
837   Mat_IS         *matis = (Mat_IS*)A->data;
838   PetscBool      local_sym;
839 
840   PetscFunctionBegin;
841   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
842   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
843   PetscFunctionReturn(0);
844 }
845 
846 #undef __FUNCT__
847 #define __FUNCT__ "MatDestroy_IS"
848 static PetscErrorCode MatDestroy_IS(Mat A)
849 {
850   PetscErrorCode ierr;
851   Mat_IS         *b = (Mat_IS*)A->data;
852 
853   PetscFunctionBegin;
854   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
855   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
856   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
857   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
858   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
859   ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr);
860   ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr);
861   if (b->sf != b->csf) {
862     ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr);
863     ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr);
864   }
865   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
866   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
867   ierr = PetscFree(A->data);CHKERRQ(ierr);
868   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
869   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
870   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
871   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
872   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
873   PetscFunctionReturn(0);
874 }
875 
876 #undef __FUNCT__
877 #define __FUNCT__ "MatMult_IS"
878 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
879 {
880   PetscErrorCode ierr;
881   Mat_IS         *is  = (Mat_IS*)A->data;
882   PetscScalar    zero = 0.0;
883 
884   PetscFunctionBegin;
885   /*  scatter the global vector x into the local work vector */
886   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
887   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
888 
889   /* multiply the local matrix */
890   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
891 
892   /* scatter product back into global memory */
893   ierr = VecSet(y,zero);CHKERRQ(ierr);
894   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
895   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
896   PetscFunctionReturn(0);
897 }
898 
899 #undef __FUNCT__
900 #define __FUNCT__ "MatMultAdd_IS"
901 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
902 {
903   Vec            temp_vec;
904   PetscErrorCode ierr;
905 
906   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
907   if (v3 != v2) {
908     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
909     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
910   } else {
911     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
912     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
913     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
914     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
915     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
916   }
917   PetscFunctionReturn(0);
918 }
919 
920 #undef __FUNCT__
921 #define __FUNCT__ "MatMultTranspose_IS"
922 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
923 {
924   Mat_IS         *is = (Mat_IS*)A->data;
925   PetscErrorCode ierr;
926 
927   PetscFunctionBegin;
928   /*  scatter the global vector x into the local work vector */
929   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
930   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
931 
932   /* multiply the local matrix */
933   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
934 
935   /* scatter product back into global vector */
936   ierr = VecSet(x,0);CHKERRQ(ierr);
937   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
938   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
939   PetscFunctionReturn(0);
940 }
941 
942 #undef __FUNCT__
943 #define __FUNCT__ "MatMultTransposeAdd_IS"
944 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
945 {
946   Vec            temp_vec;
947   PetscErrorCode ierr;
948 
949   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
950   if (v3 != v2) {
951     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
952     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
953   } else {
954     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
955     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
956     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
957     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
958     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
959   }
960   PetscFunctionReturn(0);
961 }
962 
963 #undef __FUNCT__
964 #define __FUNCT__ "MatView_IS"
965 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
966 {
967   Mat_IS         *a = (Mat_IS*)A->data;
968   PetscErrorCode ierr;
969   PetscViewer    sviewer;
970 
971   PetscFunctionBegin;
972   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
973   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
974   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
975   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
976   PetscFunctionReturn(0);
977 }
978 
979 #undef __FUNCT__
980 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
981 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
982 {
983   PetscErrorCode ierr;
984   PetscInt       nr,rbs,nc,cbs;
985   Mat_IS         *is = (Mat_IS*)A->data;
986   IS             from,to;
987   Vec            cglobal,rglobal;
988 
989   PetscFunctionBegin;
990   PetscCheckSameComm(A,1,rmapping,2);
991   PetscCheckSameComm(A,1,cmapping,3);
992   /* Destroy any previous data */
993   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
994   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
995   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
996   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
997   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
998   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
999   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
1000 
1001   /* Setup Layout and set local to global maps */
1002   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1003   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1004   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
1005   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
1006 
1007   /* Create the local matrix A */
1008   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
1009   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
1010   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
1011   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
1012   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
1013   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
1014   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
1015   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
1016   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
1017   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
1018   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
1019 
1020   /* Create the local work vectors */
1021   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
1022 
1023   /* setup the global to local scatters */
1024   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
1025   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
1026   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
1027   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
1028   if (rmapping != cmapping) {
1029     ierr = ISDestroy(&to);CHKERRQ(ierr);
1030     ierr = ISDestroy(&from);CHKERRQ(ierr);
1031     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
1032     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
1033     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
1034   } else {
1035     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
1036     is->cctx = is->rctx;
1037   }
1038   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
1039   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
1040   ierr = ISDestroy(&to);CHKERRQ(ierr);
1041   ierr = ISDestroy(&from);CHKERRQ(ierr);
1042   ierr = MatSetUp(A);CHKERRQ(ierr);
1043   PetscFunctionReturn(0);
1044 }
1045 
1046 #undef __FUNCT__
1047 #define __FUNCT__ "MatSetValues_IS"
1048 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1049 {
1050   Mat_IS         *is = (Mat_IS*)mat->data;
1051   PetscErrorCode ierr;
1052 #if defined(PETSC_USE_DEBUG)
1053   PetscInt       i,zm,zn;
1054 #endif
1055   PetscInt       rows_l[2048],cols_l[2048];
1056 
1057   PetscFunctionBegin;
1058 #if defined(PETSC_USE_DEBUG)
1059   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
1060   /* count negative indices */
1061   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1062   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1063 #endif
1064   ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1065   ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1066 #if defined(PETSC_USE_DEBUG)
1067   /* count negative indices (should be the same as before) */
1068   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1069   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1070   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1071   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1072 #endif
1073   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1074   PetscFunctionReturn(0);
1075 }
1076 
1077 #undef __FUNCT__
1078 #define __FUNCT__ "MatSetValuesBlocked_IS"
1079 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1080 {
1081   Mat_IS         *is = (Mat_IS*)mat->data;
1082   PetscErrorCode ierr;
1083 #if defined(PETSC_USE_DEBUG)
1084   PetscInt       i,zm,zn;
1085 #endif
1086   PetscInt       rows_l[2048],cols_l[2048];
1087 
1088   PetscFunctionBegin;
1089 #if defined(PETSC_USE_DEBUG)
1090   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
1091   /* count negative indices */
1092   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1093   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1094 #endif
1095   ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1096   ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1097 #if defined(PETSC_USE_DEBUG)
1098   /* count negative indices (should be the same as before) */
1099   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1100   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1101   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1102   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1103 #endif
1104   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1105   PetscFunctionReturn(0);
1106 }
1107 
1108 #undef __FUNCT__
1109 #define __FUNCT__ "MatSetValuesLocal_IS"
1110 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1111 {
1112   PetscErrorCode ierr;
1113   Mat_IS         *is = (Mat_IS*)A->data;
1114 
1115   PetscFunctionBegin;
1116   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1117   PetscFunctionReturn(0);
1118 }
1119 
1120 #undef __FUNCT__
1121 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
1122 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1123 {
1124   PetscErrorCode ierr;
1125   Mat_IS         *is = (Mat_IS*)A->data;
1126 
1127   PetscFunctionBegin;
1128   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1129   PetscFunctionReturn(0);
1130 }
1131 
1132 #undef __FUNCT__
1133 #define __FUNCT__ "MatZeroRows_IS"
1134 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1135 {
1136   Mat_IS         *matis = (Mat_IS*)A->data;
1137   PetscInt       nr,nl,len,i;
1138   PetscInt       *lrows;
1139   PetscErrorCode ierr;
1140 
1141   PetscFunctionBegin;
1142   /* get locally owned rows */
1143   ierr = MatZeroRowsMapLocal_Private(A,n,rows,&len,&lrows);CHKERRQ(ierr);
1144   /* fix right hand side if needed */
1145   if (x && b) {
1146     const PetscScalar *xx;
1147     PetscScalar       *bb;
1148 
1149     ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr);
1150     ierr = VecGetArray(b, &bb);CHKERRQ(ierr);
1151     for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]];
1152     ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr);
1153     ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr);
1154   }
1155   /* get rows associated to the local matrices */
1156   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
1157     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
1158   }
1159   ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr);
1160   ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr);
1161   ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
1162   for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1;
1163   ierr = PetscFree(lrows);CHKERRQ(ierr);
1164   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1165   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1166   ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr);
1167   for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i;
1168   ierr = MatISZeroRowsLocal_Private(A,nr,lrows,diag);CHKERRQ(ierr);
1169   ierr = PetscFree(lrows);CHKERRQ(ierr);
1170   PetscFunctionReturn(0);
1171 }
1172 
1173 #undef __FUNCT__
1174 #define __FUNCT__ "MatISZeroRowsLocal_Private"
1175 static PetscErrorCode MatISZeroRowsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag)
1176 {
1177   Mat_IS         *is = (Mat_IS*)A->data;
1178   PetscErrorCode ierr;
1179 
1180   PetscFunctionBegin;
1181   if (diag != 0.) {
1182     /*
1183        Set up is->x as a "counting vector". This is in order to MatMult_IS
1184        work properly in the interface nodes.
1185     */
1186     Vec counter;
1187     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
1188     ierr = VecSet(counter,0.);CHKERRQ(ierr);
1189     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
1190     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1191     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1192     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1193     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1194     ierr = VecDestroy(&counter);CHKERRQ(ierr);
1195   }
1196 
1197   if (!n) {
1198     is->pure_neumann = PETSC_TRUE;
1199   } else {
1200     PetscInt i;
1201     is->pure_neumann = PETSC_FALSE;
1202 
1203     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
1204     if (diag != 0.) {
1205       const PetscScalar *array;
1206       ierr = VecGetArrayRead(is->y,&array);CHKERRQ(ierr);
1207       for (i=0; i<n; i++) {
1208         ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
1209       }
1210       ierr = VecRestoreArrayRead(is->y,&array);CHKERRQ(ierr);
1211     }
1212     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1213     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1214   }
1215   PetscFunctionReturn(0);
1216 }
1217 
1218 #undef __FUNCT__
1219 #define __FUNCT__ "MatAssemblyBegin_IS"
1220 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
1221 {
1222   Mat_IS         *is = (Mat_IS*)A->data;
1223   PetscErrorCode ierr;
1224 
1225   PetscFunctionBegin;
1226   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
1227   PetscFunctionReturn(0);
1228 }
1229 
1230 #undef __FUNCT__
1231 #define __FUNCT__ "MatAssemblyEnd_IS"
1232 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
1233 {
1234   Mat_IS         *is = (Mat_IS*)A->data;
1235   PetscErrorCode ierr;
1236 
1237   PetscFunctionBegin;
1238   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
1239   PetscFunctionReturn(0);
1240 }
1241 
1242 #undef __FUNCT__
1243 #define __FUNCT__ "MatISGetLocalMat_IS"
1244 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
1245 {
1246   Mat_IS *is = (Mat_IS*)mat->data;
1247 
1248   PetscFunctionBegin;
1249   *local = is->A;
1250   PetscFunctionReturn(0);
1251 }
1252 
1253 #undef __FUNCT__
1254 #define __FUNCT__ "MatISGetLocalMat"
1255 /*@
1256     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
1257 
1258   Input Parameter:
1259 .  mat - the matrix
1260 
1261   Output Parameter:
1262 .  local - the local matrix
1263 
1264   Level: advanced
1265 
1266   Notes:
1267     This can be called if you have precomputed the nonzero structure of the
1268   matrix and want to provide it to the inner matrix object to improve the performance
1269   of the MatSetValues() operation.
1270 
1271 .seealso: MATIS
1272 @*/
1273 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
1274 {
1275   PetscErrorCode ierr;
1276 
1277   PetscFunctionBegin;
1278   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1279   PetscValidPointer(local,2);
1280   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
1281   PetscFunctionReturn(0);
1282 }
1283 
1284 #undef __FUNCT__
1285 #define __FUNCT__ "MatISSetLocalMat_IS"
1286 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
1287 {
1288   Mat_IS         *is = (Mat_IS*)mat->data;
1289   PetscInt       nrows,ncols,orows,ocols;
1290   PetscErrorCode ierr;
1291 
1292   PetscFunctionBegin;
1293   if (is->A) {
1294     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
1295     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
1296     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
1297   }
1298   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
1299   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
1300   is->A = local;
1301   PetscFunctionReturn(0);
1302 }
1303 
1304 #undef __FUNCT__
1305 #define __FUNCT__ "MatISSetLocalMat"
1306 /*@
1307     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
1308 
1309   Input Parameter:
1310 .  mat - the matrix
1311 .  local - the local matrix
1312 
1313   Output Parameter:
1314 
1315   Level: advanced
1316 
1317   Notes:
1318     This can be called if you have precomputed the local matrix and
1319   want to provide it to the matrix object MATIS.
1320 
1321 .seealso: MATIS
1322 @*/
1323 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
1324 {
1325   PetscErrorCode ierr;
1326 
1327   PetscFunctionBegin;
1328   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1329   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
1330   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
1331   PetscFunctionReturn(0);
1332 }
1333 
1334 #undef __FUNCT__
1335 #define __FUNCT__ "MatZeroEntries_IS"
1336 static PetscErrorCode MatZeroEntries_IS(Mat A)
1337 {
1338   Mat_IS         *a = (Mat_IS*)A->data;
1339   PetscErrorCode ierr;
1340 
1341   PetscFunctionBegin;
1342   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
1343   PetscFunctionReturn(0);
1344 }
1345 
1346 #undef __FUNCT__
1347 #define __FUNCT__ "MatScale_IS"
1348 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
1349 {
1350   Mat_IS         *is = (Mat_IS*)A->data;
1351   PetscErrorCode ierr;
1352 
1353   PetscFunctionBegin;
1354   ierr = MatScale(is->A,a);CHKERRQ(ierr);
1355   PetscFunctionReturn(0);
1356 }
1357 
1358 #undef __FUNCT__
1359 #define __FUNCT__ "MatGetDiagonal_IS"
1360 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
1361 {
1362   Mat_IS         *is = (Mat_IS*)A->data;
1363   PetscErrorCode ierr;
1364 
1365   PetscFunctionBegin;
1366   /* get diagonal of the local matrix */
1367   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
1368 
1369   /* scatter diagonal back into global vector */
1370   ierr = VecSet(v,0);CHKERRQ(ierr);
1371   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1372   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1373   PetscFunctionReturn(0);
1374 }
1375 
1376 #undef __FUNCT__
1377 #define __FUNCT__ "MatSetOption_IS"
1378 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
1379 {
1380   Mat_IS         *a = (Mat_IS*)A->data;
1381   PetscErrorCode ierr;
1382 
1383   PetscFunctionBegin;
1384   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1385   PetscFunctionReturn(0);
1386 }
1387 
1388 #undef __FUNCT__
1389 #define __FUNCT__ "MatCreateIS"
1390 /*@
1391     MatCreateIS - Creates a "process" unassembled matrix, assembled on each
1392        process but not across processes.
1393 
1394    Input Parameters:
1395 +     comm    - MPI communicator that will share the matrix
1396 .     bs      - block size of the matrix
1397 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
1398 .     rmap    - local to global map for rows
1399 -     cmap    - local to global map for cols
1400 
1401    Output Parameter:
1402 .    A - the resulting matrix
1403 
1404    Level: advanced
1405 
1406    Notes: See MATIS for more details.
1407           m and n are NOT related to the size of the map; they are the size of the part of the vector owned
1408           by that process. The sizes of rmap and cmap define the size of the local matrices.
1409           If either rmap or cmap are NULL, then the matrix is assumed to be square.
1410 
1411 .seealso: MATIS, MatSetLocalToGlobalMapping()
1412 @*/
1413 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1414 {
1415   PetscErrorCode ierr;
1416 
1417   PetscFunctionBegin;
1418   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1419   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1420   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1421   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1422   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1423   if (rmap && cmap) {
1424     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1425   } else if (!rmap) {
1426     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1427   } else {
1428     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1429   }
1430   PetscFunctionReturn(0);
1431 }
1432 
1433 /*MC
1434    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1435    This stores the matrices in globally unassembled form. Each processor
1436    assembles only its local Neumann problem and the parallel matrix vector
1437    product is handled "implicitly".
1438 
1439    Operations Provided:
1440 +  MatMult()
1441 .  MatMultAdd()
1442 .  MatMultTranspose()
1443 .  MatMultTransposeAdd()
1444 .  MatZeroEntries()
1445 .  MatSetOption()
1446 .  MatZeroRows()
1447 .  MatSetValues()
1448 .  MatSetValuesBlocked()
1449 .  MatSetValuesLocal()
1450 .  MatSetValuesBlockedLocal()
1451 .  MatScale()
1452 .  MatGetDiagonal()
1453 .  MatMissingDiagonal()
1454 .  MatDuplicate()
1455 .  MatCopy()
1456 -  MatSetLocalToGlobalMapping()
1457 
1458    Options Database Keys:
1459 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1460 
1461    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1462 
1463           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1464 
1465           You can do matrix preallocation on the local matrix after you obtain it with
1466           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1467 
1468   Level: advanced
1469 
1470 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC, MatCreateIS()
1471 
1472 M*/
1473 
1474 #undef __FUNCT__
1475 #define __FUNCT__ "MatCreate_IS"
1476 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1477 {
1478   PetscErrorCode ierr;
1479   Mat_IS         *b;
1480 
1481   PetscFunctionBegin;
1482   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1483   A->data = (void*)b;
1484 
1485   /* matrix ops */
1486   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1487   A->ops->mult                    = MatMult_IS;
1488   A->ops->multadd                 = MatMultAdd_IS;
1489   A->ops->multtranspose           = MatMultTranspose_IS;
1490   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1491   A->ops->destroy                 = MatDestroy_IS;
1492   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1493   A->ops->setvalues               = MatSetValues_IS;
1494   A->ops->setvaluesblocked        = MatSetValuesBlocked_IS;
1495   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1496   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1497   A->ops->zerorows                = MatZeroRows_IS;
1498   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1499   A->ops->assemblyend             = MatAssemblyEnd_IS;
1500   A->ops->view                    = MatView_IS;
1501   A->ops->zeroentries             = MatZeroEntries_IS;
1502   A->ops->scale                   = MatScale_IS;
1503   A->ops->getdiagonal             = MatGetDiagonal_IS;
1504   A->ops->setoption               = MatSetOption_IS;
1505   A->ops->ishermitian             = MatIsHermitian_IS;
1506   A->ops->issymmetric             = MatIsSymmetric_IS;
1507   A->ops->duplicate               = MatDuplicate_IS;
1508   A->ops->missingdiagonal         = MatMissingDiagonal_IS;
1509   A->ops->copy                    = MatCopy_IS;
1510   A->ops->getlocalsubmatrix       = MatGetLocalSubMatrix_IS;
1511   A->ops->getsubmatrix            = MatGetSubMatrix_IS;
1512 
1513   /* special MATIS functions */
1514   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1515   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1516   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1517   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1518   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1519   PetscFunctionReturn(0);
1520 }
1521