xref: /petsc/src/mat/impls/is/matis.c (revision 314ce898bacf8328c7126c252303a448b90aeccb)
1 /*
2     Creates a matrix class for using the Neumann-Neumann type preconditioners.
3     This stores the matrices in globally unassembled form. Each processor
4     assembles only its local Neumann problem and the parallel matrix vector
5     product is handled "implicitly".
6 
7     Currently this allows for only one subdomain per processor.
8 */
9 
10 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
11 
12 #define MATIS_MAX_ENTRIES_INSERTION 2048
13 
14 static PetscErrorCode MatISComputeSF_Private(Mat);
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "MatGetInfo_IS"
18 static PetscErrorCode MatGetInfo_IS(Mat A,MatInfoType flag,MatInfo *ginfo)
19 {
20   Mat_IS         *matis = (Mat_IS*)A->data;
21   MatInfo        info;
22   PetscReal      isend[6],irecv[6];
23   PetscInt       bs;
24   PetscErrorCode ierr;
25 
26   PetscFunctionBegin;
27   ierr     = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
28   ierr     = MatGetInfo(matis->A,MAT_LOCAL,&info);CHKERRQ(ierr);
29   isend[0] = info.nz_used;
30   isend[1] = info.nz_allocated;
31   isend[2] = info.nz_unneeded;
32   isend[3] = info.memory;
33   isend[4] = info.mallocs;
34   isend[5] = matis->A->num_ass;
35   if (flag == MAT_LOCAL) {
36     ginfo->nz_used      = isend[0];
37     ginfo->nz_allocated = isend[1];
38     ginfo->nz_unneeded  = isend[2];
39     ginfo->memory       = isend[3];
40     ginfo->mallocs      = isend[4];
41     ginfo->assemblies   = isend[5];
42   } else if (flag == MAT_GLOBAL_MAX) {
43     ierr = MPIU_Allreduce(isend,irecv,6,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
44 
45     ginfo->nz_used      = irecv[0];
46     ginfo->nz_allocated = irecv[1];
47     ginfo->nz_unneeded  = irecv[2];
48     ginfo->memory       = irecv[3];
49     ginfo->mallocs      = irecv[4];
50     ginfo->assemblies   = irecv[5];
51   } else if (flag == MAT_GLOBAL_SUM) {
52     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
53 
54     ginfo->nz_used      = irecv[0];
55     ginfo->nz_allocated = irecv[1];
56     ginfo->nz_unneeded  = irecv[2];
57     ginfo->memory       = irecv[3];
58     ginfo->mallocs      = irecv[4];
59     ginfo->assemblies   = A->num_ass;
60   }
61   ginfo->block_size        = bs;
62   ginfo->fill_ratio_given  = 0;
63   ginfo->fill_ratio_needed = 0;
64   ginfo->factor_mallocs    = 0;
65   PetscFunctionReturn(0);
66 }
67 
68 #undef __FUNCT__
69 #define __FUNCT__ "MatTranspose_IS"
70 PetscErrorCode MatTranspose_IS(Mat A,MatReuse reuse,Mat *B)
71 {
72   Mat                    C,lC,lA;
73   ISLocalToGlobalMapping rl2g,cl2g;
74   PetscBool              notset = PETSC_FALSE,cong = PETSC_TRUE;
75   PetscErrorCode         ierr;
76 
77   PetscFunctionBegin;
78   if (reuse == MAT_REUSE_MATRIX) {
79     PetscBool rcong,ccong;
80 
81     ierr = PetscLayoutCompare((*B)->cmap,A->rmap,&rcong);CHKERRQ(ierr);
82     ierr = PetscLayoutCompare((*B)->rmap,A->cmap,&ccong);CHKERRQ(ierr);
83     cong = (PetscBool)(rcong || ccong);
84   }
85   if (reuse == MAT_INITIAL_MATRIX || *B == A || !cong) {
86     ierr = MatCreate(PetscObjectComm((PetscObject)A),&C);CHKERRQ(ierr);
87   } else {
88     ierr = PetscObjectTypeCompare((PetscObject)*B,MATIS,&notset);CHKERRQ(ierr);
89     C = *B;
90   }
91 
92   if (!notset) {
93     ierr = MatSetSizes(C,A->cmap->n,A->rmap->n,A->cmap->N,A->rmap->N);CHKERRQ(ierr);
94     ierr = MatSetBlockSizes(C,PetscAbs(A->cmap->bs),PetscAbs(A->rmap->bs));CHKERRQ(ierr);
95     ierr = MatSetType(C,MATIS);CHKERRQ(ierr);
96     ierr = MatGetLocalToGlobalMapping(A,&rl2g,&cl2g);CHKERRQ(ierr);
97     ierr = MatSetLocalToGlobalMapping(C,cl2g,rl2g);CHKERRQ(ierr);
98   }
99 
100   /* perform local transposition */
101   ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr);
102   ierr = MatTranspose(lA,MAT_INITIAL_MATRIX,&lC);CHKERRQ(ierr);
103   ierr = MatISSetLocalMat(C,lC);CHKERRQ(ierr);
104   ierr = MatDestroy(&lC);CHKERRQ(ierr);
105 
106   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
107   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
108 
109   if (reuse == MAT_INITIAL_MATRIX || *B != A) {
110     if (!cong) {
111       ierr = MatDestroy(B);CHKERRQ(ierr);
112     }
113     *B = C;
114   } else {
115     ierr = MatHeaderMerge(A,&C);CHKERRQ(ierr);
116   }
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNCT__
121 #define __FUNCT__ "MatDiagonalSet_IS"
122 PetscErrorCode  MatDiagonalSet_IS(Mat A,Vec D,InsertMode insmode)
123 {
124   Mat_IS         *is = (Mat_IS*)A->data;
125   PetscErrorCode ierr;
126 
127   PetscFunctionBegin;
128   if (!D) { /* this code branch is used by MatShift_IS */
129     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
130   } else {
131     ierr = VecScatterBegin(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
132     ierr = VecScatterEnd(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
133   }
134   ierr = VecPointwiseDivide(is->y,is->y,is->counter);CHKERRQ(ierr);
135   ierr = MatDiagonalSet(is->A,is->y,insmode);CHKERRQ(ierr);
136   PetscFunctionReturn(0);
137 }
138 
139 #undef __FUNCT__
140 #define __FUNCT__ "MatShift_IS"
141 PetscErrorCode  MatShift_IS(Mat A,PetscScalar a)
142 {
143   PetscErrorCode ierr;
144 
145   PetscFunctionBegin;
146   ierr = MatDiagonalSet_IS(A,NULL,ADD_VALUES);CHKERRQ(ierr);
147   PetscFunctionReturn(0);
148 }
149 
150 #undef __FUNCT__
151 #define __FUNCT__ "MatSetValuesLocal_SubMat_IS"
152 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
153 {
154   PetscErrorCode ierr;
155   Mat_IS         *is = (Mat_IS*)A->data;
156   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
157 
158   PetscFunctionBegin;
159 #if defined(PETSC_USE_DEBUG)
160   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
161 #endif
162   ierr = ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
163   ierr = ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
164   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
165   PetscFunctionReturn(0);
166 }
167 
168 #undef __FUNCT__
169 #define __FUNCT__ "MatSetValuesBlockedLocal_SubMat_IS"
170 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
171 {
172   PetscErrorCode ierr;
173   Mat_IS         *is = (Mat_IS*)A->data;
174   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
175 
176   PetscFunctionBegin;
177 #if defined(PETSC_USE_DEBUG)
178   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
179 #endif
180   ierr = ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
181   ierr = ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
182   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
183   PetscFunctionReturn(0);
184 }
185 
186 #undef __FUNCT__
187 #define __FUNCT__ "PetscLayoutMapLocal_Private"
188 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
189 {
190   PetscInt      *owners = map->range;
191   PetscInt       n      = map->n;
192   PetscSF        sf;
193   PetscInt      *lidxs,*work = NULL;
194   PetscSFNode   *ridxs;
195   PetscMPIInt    rank;
196   PetscInt       r, p = 0, len = 0;
197   PetscErrorCode ierr;
198 
199   PetscFunctionBegin;
200   /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */
201   ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr);
202   ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr);
203   for (r = 0; r < n; ++r) lidxs[r] = -1;
204   ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr);
205   for (r = 0; r < N; ++r) {
206     const PetscInt idx = idxs[r];
207     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
208     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
209       ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr);
210     }
211     ridxs[r].rank = p;
212     ridxs[r].index = idxs[r] - owners[p];
213   }
214   ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr);
215   ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr);
216   ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
217   ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
218   if (ogidxs) { /* communicate global idxs */
219     PetscInt cum = 0,start,*work2;
220 
221     ierr = PetscMalloc1(n,&work);CHKERRQ(ierr);
222     ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr);
223     for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
224     ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
225     start -= cum;
226     cum = 0;
227     for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
228     ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
229     ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
230     ierr = PetscFree(work2);CHKERRQ(ierr);
231   }
232   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
233   /* Compress and put in indices */
234   for (r = 0; r < n; ++r)
235     if (lidxs[r] >= 0) {
236       if (work) work[len] = work[r];
237       lidxs[len++] = r;
238     }
239   if (on) *on = len;
240   if (oidxs) *oidxs = lidxs;
241   if (ogidxs) *ogidxs = work;
242   PetscFunctionReturn(0);
243 }
244 
245 #undef __FUNCT__
246 #define __FUNCT__ "MatGetSubMatrix_IS"
247 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat)
248 {
249   Mat               locmat,newlocmat;
250   Mat_IS            *newmatis;
251 #if defined(PETSC_USE_DEBUG)
252   Vec               rtest,ltest;
253   const PetscScalar *array;
254 #endif
255   const PetscInt    *idxs;
256   PetscInt          i,m,n;
257   PetscErrorCode    ierr;
258 
259   PetscFunctionBegin;
260   if (scall == MAT_REUSE_MATRIX) {
261     PetscBool ismatis;
262 
263     ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr);
264     if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type");
265     newmatis = (Mat_IS*)(*newmat)->data;
266     if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS");
267     if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS");
268   }
269   /* irow and icol may not have duplicate entries */
270 #if defined(PETSC_USE_DEBUG)
271   ierr = MatCreateVecs(mat,&ltest,&rtest);CHKERRQ(ierr);
272   ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr);
273   ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
274   for (i=0;i<n;i++) {
275     ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
276   }
277   ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr);
278   ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr);
279   ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr);
280   ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr);
281   ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr);
282   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
283   ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr);
284   ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
285   ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
286   ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
287   for (i=0;i<n;i++) {
288     ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
289   }
290   ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr);
291   ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr);
292   ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr);
293   ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr);
294   ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr);
295   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
296   ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr);
297   ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
298   ierr = VecDestroy(&rtest);CHKERRQ(ierr);
299   ierr = VecDestroy(&ltest);CHKERRQ(ierr);
300 #endif
301   if (scall == MAT_INITIAL_MATRIX) {
302     Mat_IS                 *matis = (Mat_IS*)mat->data;
303     ISLocalToGlobalMapping rl2g;
304     IS                     is;
305     PetscInt               *lidxs,*lgidxs,*newgidxs;
306     PetscInt               ll,newloc;
307     MPI_Comm               comm;
308 
309     ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
310     ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr);
311     ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
312     ierr = MatCreate(comm,newmat);CHKERRQ(ierr);
313     ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr);
314     ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
315     /* communicate irow to their owners in the layout */
316     ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
317     ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
318     ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
319     if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
320       ierr = MatISComputeSF_Private(mat);CHKERRQ(ierr);
321     }
322     ierr = PetscMemzero(matis->sf_rootdata,matis->sf_nroots*sizeof(PetscInt));CHKERRQ(ierr);
323     for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1;
324     ierr = PetscFree(lidxs);CHKERRQ(ierr);
325     ierr = PetscFree(lgidxs);CHKERRQ(ierr);
326     ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
327     ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
328     for (i=0,newloc=0;i<matis->sf_nleaves;i++) if (matis->sf_leafdata[i]) newloc++;
329     ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
330     ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
331     for (i=0,newloc=0;i<matis->sf_nleaves;i++)
332       if (matis->sf_leafdata[i]) {
333         lidxs[newloc] = i;
334         newgidxs[newloc++] = matis->sf_leafdata[i]-1;
335       }
336     ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
337     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
338     ierr = ISDestroy(&is);CHKERRQ(ierr);
339     /* local is to extract local submatrix */
340     newmatis = (Mat_IS*)(*newmat)->data;
341     ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr);
342     if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
343       PetscBool cong;
344       ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr);
345       if (cong) mat->congruentlayouts = 1;
346       else      mat->congruentlayouts = 0;
347     }
348     if (mat->congruentlayouts && irow == icol) {
349       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr);
350       ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr);
351       newmatis->getsub_cis = newmatis->getsub_ris;
352     } else {
353       ISLocalToGlobalMapping cl2g;
354 
355       /* communicate icol to their owners in the layout */
356       ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
357       ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
358       ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
359       ierr = PetscMemzero(matis->csf_rootdata,matis->csf_nroots*sizeof(PetscInt));CHKERRQ(ierr);
360       for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1;
361       ierr = PetscFree(lidxs);CHKERRQ(ierr);
362       ierr = PetscFree(lgidxs);CHKERRQ(ierr);
363       ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
364       ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
365       for (i=0,newloc=0;i<matis->csf_nleaves;i++) if (matis->csf_leafdata[i]) newloc++;
366       ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
367       ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
368       for (i=0,newloc=0;i<matis->csf_nleaves;i++)
369         if (matis->csf_leafdata[i]) {
370           lidxs[newloc] = i;
371           newgidxs[newloc++] = matis->csf_leafdata[i]-1;
372         }
373       ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
374       ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
375       ierr = ISDestroy(&is);CHKERRQ(ierr);
376       /* local is to extract local submatrix */
377       ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr);
378       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr);
379       ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
380     }
381     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
382   } else {
383     ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr);
384   }
385   ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr);
386   newmatis = (Mat_IS*)(*newmat)->data;
387   ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr);
388   if (scall == MAT_INITIAL_MATRIX) {
389     ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr);
390     ierr = MatDestroy(&newlocmat);CHKERRQ(ierr);
391   }
392   ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
393   ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
394   PetscFunctionReturn(0);
395 }
396 
397 #undef __FUNCT__
398 #define __FUNCT__ "MatCopy_IS"
399 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str)
400 {
401   Mat_IS         *a = (Mat_IS*)A->data,*b;
402   PetscBool      ismatis;
403   PetscErrorCode ierr;
404 
405   PetscFunctionBegin;
406   ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr);
407   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented");
408   b = (Mat_IS*)B->data;
409   ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
410   PetscFunctionReturn(0);
411 }
412 
413 #undef __FUNCT__
414 #define __FUNCT__ "MatMissingDiagonal_IS"
415 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool  *missing,PetscInt *d)
416 {
417   Vec               v;
418   const PetscScalar *array;
419   PetscInt          i,n;
420   PetscErrorCode    ierr;
421 
422   PetscFunctionBegin;
423   *missing = PETSC_FALSE;
424   ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr);
425   ierr = MatGetDiagonal(A,v);CHKERRQ(ierr);
426   ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr);
427   ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr);
428   for (i=0;i<n;i++) if (array[i] == 0.) break;
429   ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr);
430   ierr = VecDestroy(&v);CHKERRQ(ierr);
431   if (i != n) *missing = PETSC_TRUE;
432   if (d) {
433     *d = -1;
434     if (*missing) {
435       PetscInt rstart;
436       ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
437       *d = i+rstart;
438     }
439   }
440   PetscFunctionReturn(0);
441 }
442 
443 #undef __FUNCT__
444 #define __FUNCT__ "MatISComputeSF_Private"
445 static PetscErrorCode MatISComputeSF_Private(Mat B)
446 {
447   Mat_IS         *matis = (Mat_IS*)(B->data);
448   const PetscInt *gidxs;
449   PetscErrorCode ierr;
450 
451   PetscFunctionBegin;
452   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
453   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
454   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
455   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
456   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
457   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
458   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
459   if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */
460     ierr = MatGetSize(matis->A,NULL,&matis->csf_nleaves);CHKERRQ(ierr);
461     ierr = MatGetLocalSize(B,NULL,&matis->csf_nroots);CHKERRQ(ierr);
462     ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr);
463     ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
464     ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,matis->csf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
465     ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
466     ierr = PetscMalloc2(matis->csf_nroots,&matis->csf_rootdata,matis->csf_nleaves,&matis->csf_leafdata);CHKERRQ(ierr);
467   } else {
468     matis->csf = matis->sf;
469     matis->csf_nleaves = matis->sf_nleaves;
470     matis->csf_nroots = matis->sf_nroots;
471     matis->csf_leafdata = matis->sf_leafdata;
472     matis->csf_rootdata = matis->sf_rootdata;
473   }
474   PetscFunctionReturn(0);
475 }
476 
477 #undef __FUNCT__
478 #define __FUNCT__ "MatISSetPreallocation"
479 /*@
480    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
481 
482    Collective on MPI_Comm
483 
484    Input Parameters:
485 +  B - the matrix
486 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
487            (same value is used for all local rows)
488 .  d_nnz - array containing the number of nonzeros in the various rows of the
489            DIAGONAL portion of the local submatrix (possibly different for each row)
490            or NULL, if d_nz is used to specify the nonzero structure.
491            The size of this array is equal to the number of local rows, i.e 'm'.
492            For matrices that will be factored, you must leave room for (and set)
493            the diagonal entry even if it is zero.
494 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
495            submatrix (same value is used for all local rows).
496 -  o_nnz - array containing the number of nonzeros in the various rows of the
497            OFF-DIAGONAL portion of the local submatrix (possibly different for
498            each row) or NULL, if o_nz is used to specify the nonzero
499            structure. The size of this array is equal to the number
500            of local rows, i.e 'm'.
501 
502    If the *_nnz parameter is given then the *_nz parameter is ignored
503 
504    Level: intermediate
505 
506    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
507           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
508           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
509 
510 .keywords: matrix
511 
512 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS
513 @*/
514 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
515 {
516   PetscErrorCode ierr;
517 
518   PetscFunctionBegin;
519   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
520   PetscValidType(B,1);
521   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
522   PetscFunctionReturn(0);
523 }
524 
525 #undef __FUNCT__
526 #define __FUNCT__ "MatISSetPreallocation_IS"
527 static PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
528 {
529   Mat_IS         *matis = (Mat_IS*)(B->data);
530   PetscInt       bs,i,nlocalcols;
531   PetscErrorCode ierr;
532 
533   PetscFunctionBegin;
534   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
535   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
536     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
537   }
538   if (!d_nnz) {
539     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
540   } else {
541     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
542   }
543   if (!o_nnz) {
544     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
545   } else {
546     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
547   }
548   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
549   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
550   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
551   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
552   for (i=0;i<matis->sf_nleaves;i++) {
553     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
554   }
555   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
556   for (i=0;i<matis->sf_nleaves/bs;i++) {
557     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
558   }
559   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
560   for (i=0;i<matis->sf_nleaves/bs;i++) {
561     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
562   }
563   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
564   PetscFunctionReturn(0);
565 }
566 
567 #undef __FUNCT__
568 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
569 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
570 {
571   Mat_IS          *matis = (Mat_IS*)(A->data);
572   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
573   const PetscInt  *global_indices_r,*global_indices_c;
574   PetscInt        i,j,bs,rows,cols;
575   PetscInt        lrows,lcols;
576   PetscInt        local_rows,local_cols;
577   PetscMPIInt     nsubdomains;
578   PetscBool       isdense,issbaij;
579   PetscErrorCode  ierr;
580 
581   PetscFunctionBegin;
582   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
583   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
584   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
585   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
586   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
587   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
588   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
589   if (A->rmap->mapping != A->cmap->mapping) {
590     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
591   } else {
592     global_indices_c = global_indices_r;
593   }
594 
595   if (issbaij) {
596     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
597   }
598   /*
599      An SF reduce is needed to sum up properly on shared rows.
600      Note that generally preallocation is not exact, since it overestimates nonzeros
601   */
602   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
603     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
604   }
605   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
606   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
607   /* All processes need to compute entire row ownership */
608   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
609   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
610   for (i=0;i<nsubdomains;i++) {
611     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
612       row_ownership[j] = i;
613     }
614   }
615   ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
616 
617   /*
618      my_dnz and my_onz contains exact contribution to preallocation from each local mat
619      then, they will be summed up properly. This way, preallocation is always sufficient
620   */
621   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
622   /* preallocation as a MATAIJ */
623   if (isdense) { /* special case for dense local matrices */
624     for (i=0;i<local_rows;i++) {
625       PetscInt index_row = global_indices_r[i];
626       for (j=i;j<local_rows;j++) {
627         PetscInt owner = row_ownership[index_row];
628         PetscInt index_col = global_indices_c[j];
629         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
630           my_dnz[i] += 1;
631         } else { /* offdiag block */
632           my_onz[i] += 1;
633         }
634         /* same as before, interchanging rows and cols */
635         if (i != j) {
636           owner = row_ownership[index_col];
637           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
638             my_dnz[j] += 1;
639           } else {
640             my_onz[j] += 1;
641           }
642         }
643       }
644     }
645   } else if (matis->A->ops->getrowij) {
646     const PetscInt *ii,*jj,*jptr;
647     PetscBool      done;
648     ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr);
649     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
650     jptr = jj;
651     for (i=0;i<local_rows;i++) {
652       PetscInt index_row = global_indices_r[i];
653       for (j=0;j<ii[i+1]-ii[i];j++,jptr++) {
654         PetscInt owner = row_ownership[index_row];
655         PetscInt index_col = global_indices_c[*jptr];
656         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
657           my_dnz[i] += 1;
658         } else { /* offdiag block */
659           my_onz[i] += 1;
660         }
661         /* same as before, interchanging rows and cols */
662         if (issbaij && index_col != index_row) {
663           owner = row_ownership[index_col];
664           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
665             my_dnz[*jptr] += 1;
666           } else {
667             my_onz[*jptr] += 1;
668           }
669         }
670       }
671     }
672     ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr);
673     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
674   } else { /* loop over rows and use MatGetRow */
675     for (i=0;i<local_rows;i++) {
676       const PetscInt *cols;
677       PetscInt       ncols,index_row = global_indices_r[i];
678       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
679       for (j=0;j<ncols;j++) {
680         PetscInt owner = row_ownership[index_row];
681         PetscInt index_col = global_indices_c[cols[j]];
682         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
683           my_dnz[i] += 1;
684         } else { /* offdiag block */
685           my_onz[i] += 1;
686         }
687         /* same as before, interchanging rows and cols */
688         if (issbaij && index_col != index_row) {
689           owner = row_ownership[index_col];
690           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
691             my_dnz[cols[j]] += 1;
692           } else {
693             my_onz[cols[j]] += 1;
694           }
695         }
696       }
697       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
698     }
699   }
700   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
701   if (global_indices_c != global_indices_r) {
702     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
703   }
704   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
705 
706   /* Reduce my_dnz and my_onz */
707   if (maxreduce) {
708     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
709     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
710     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
711     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
712   } else {
713     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
714     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
715     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
716     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
717   }
718   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
719 
720   /* Resize preallocation if overestimated */
721   for (i=0;i<lrows;i++) {
722     dnz[i] = PetscMin(dnz[i],lcols);
723     onz[i] = PetscMin(onz[i],cols-lcols);
724   }
725   /* set preallocation */
726   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
727   for (i=0;i<lrows/bs;i++) {
728     dnz[i] = dnz[i*bs]/bs;
729     onz[i] = onz[i*bs]/bs;
730   }
731   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
732   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
733   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
734   if (issbaij) {
735     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
736   }
737   PetscFunctionReturn(0);
738 }
739 
740 #undef __FUNCT__
741 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
742 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
743 {
744   Mat_IS         *matis = (Mat_IS*)(mat->data);
745   Mat            local_mat;
746   /* info on mat */
747   PetscInt       bs,rows,cols,lrows,lcols;
748   PetscInt       local_rows,local_cols;
749   PetscBool      isdense,issbaij,isseqaij;
750   PetscMPIInt    nsubdomains;
751   /* values insertion */
752   PetscScalar    *array;
753   /* work */
754   PetscErrorCode ierr;
755 
756   PetscFunctionBegin;
757   /* get info from mat */
758   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
759   if (nsubdomains == 1) {
760     if (reuse == MAT_INITIAL_MATRIX) {
761       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
762     } else {
763       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
764     }
765     PetscFunctionReturn(0);
766   }
767   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
768   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
769   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
770   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
771   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
772   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
773   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
774 
775   if (reuse == MAT_INITIAL_MATRIX) {
776     MatType     new_mat_type;
777     PetscBool   issbaij_red;
778 
779     /* determining new matrix type */
780     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
781     if (issbaij_red) {
782       new_mat_type = MATSBAIJ;
783     } else {
784       if (bs>1) {
785         new_mat_type = MATBAIJ;
786       } else {
787         new_mat_type = MATAIJ;
788       }
789     }
790 
791     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
792     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
793     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
794     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
795     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
796   } else {
797     PetscInt mbs,mrows,mcols,mlrows,mlcols;
798     /* some checks */
799     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
800     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
801     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
802     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
803     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
804     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
805     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
806     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
807     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
808   }
809 
810   if (issbaij) {
811     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
812   } else {
813     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
814     local_mat = matis->A;
815   }
816 
817   /* Set values */
818   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
819   if (isdense) { /* special case for dense local matrices */
820     PetscInt i,*dummy_rows,*dummy_cols;
821 
822     if (local_rows != local_cols) {
823       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
824       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
825       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
826     } else {
827       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
828       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
829       dummy_cols = dummy_rows;
830     }
831     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
832     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
833     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
834     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
835     if (dummy_rows != dummy_cols) {
836       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
837     } else {
838       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
839     }
840   } else if (isseqaij) {
841     PetscInt  i,nvtxs,*xadj,*adjncy;
842     PetscBool done;
843 
844     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
845     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
846     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
847     for (i=0;i<nvtxs;i++) {
848       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
849     }
850     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
851     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
852     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
853   } else { /* very basic values insertion for all other matrix types */
854     PetscInt i;
855 
856     for (i=0;i<local_rows;i++) {
857       PetscInt       j;
858       const PetscInt *local_indices_cols;
859 
860       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
861       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
862       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
863     }
864   }
865   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
866   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
867   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
868   if (isdense) {
869     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
870   }
871   PetscFunctionReturn(0);
872 }
873 
874 #undef __FUNCT__
875 #define __FUNCT__ "MatISGetMPIXAIJ"
876 /*@
877     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
878 
879   Input Parameter:
880 .  mat - the matrix (should be of type MATIS)
881 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
882 
883   Output Parameter:
884 .  newmat - the matrix in AIJ format
885 
886   Level: developer
887 
888   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
889 
890 .seealso: MATIS
891 @*/
892 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
893 {
894   PetscErrorCode ierr;
895 
896   PetscFunctionBegin;
897   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
898   PetscValidLogicalCollectiveEnum(mat,reuse,2);
899   PetscValidPointer(newmat,3);
900   if (reuse != MAT_INITIAL_MATRIX) {
901     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
902     PetscCheckSameComm(mat,1,*newmat,3);
903     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
904   }
905   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
906   PetscFunctionReturn(0);
907 }
908 
909 #undef __FUNCT__
910 #define __FUNCT__ "MatDuplicate_IS"
911 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
912 {
913   PetscErrorCode ierr;
914   Mat_IS         *matis = (Mat_IS*)(mat->data);
915   PetscInt       bs,m,n,M,N;
916   Mat            B,localmat;
917 
918   PetscFunctionBegin;
919   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
920   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
921   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
922   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
923   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
924   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
925   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
926   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
927   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
928   *newmat = B;
929   PetscFunctionReturn(0);
930 }
931 
932 #undef __FUNCT__
933 #define __FUNCT__ "MatIsHermitian_IS"
934 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
935 {
936   PetscErrorCode ierr;
937   Mat_IS         *matis = (Mat_IS*)A->data;
938   PetscBool      local_sym;
939 
940   PetscFunctionBegin;
941   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
942   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
943   PetscFunctionReturn(0);
944 }
945 
946 #undef __FUNCT__
947 #define __FUNCT__ "MatIsSymmetric_IS"
948 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
949 {
950   PetscErrorCode ierr;
951   Mat_IS         *matis = (Mat_IS*)A->data;
952   PetscBool      local_sym;
953 
954   PetscFunctionBegin;
955   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
956   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
957   PetscFunctionReturn(0);
958 }
959 
960 #undef __FUNCT__
961 #define __FUNCT__ "MatDestroy_IS"
962 static PetscErrorCode MatDestroy_IS(Mat A)
963 {
964   PetscErrorCode ierr;
965   Mat_IS         *b = (Mat_IS*)A->data;
966 
967   PetscFunctionBegin;
968   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
969   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
970   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
971   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
972   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
973   ierr = VecDestroy(&b->counter);CHKERRQ(ierr);
974   ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr);
975   ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr);
976   if (b->sf != b->csf) {
977     ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr);
978     ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr);
979   }
980   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
981   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
982   ierr = PetscFree(A->data);CHKERRQ(ierr);
983   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
984   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
985   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
986   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
987   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
988   PetscFunctionReturn(0);
989 }
990 
991 #undef __FUNCT__
992 #define __FUNCT__ "MatMult_IS"
993 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
994 {
995   PetscErrorCode ierr;
996   Mat_IS         *is  = (Mat_IS*)A->data;
997   PetscScalar    zero = 0.0;
998 
999   PetscFunctionBegin;
1000   /*  scatter the global vector x into the local work vector */
1001   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1002   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1003 
1004   /* multiply the local matrix */
1005   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
1006 
1007   /* scatter product back into global memory */
1008   ierr = VecSet(y,zero);CHKERRQ(ierr);
1009   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1010   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1011   PetscFunctionReturn(0);
1012 }
1013 
1014 #undef __FUNCT__
1015 #define __FUNCT__ "MatMultAdd_IS"
1016 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
1017 {
1018   Vec            temp_vec;
1019   PetscErrorCode ierr;
1020 
1021   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
1022   if (v3 != v2) {
1023     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
1024     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
1025   } else {
1026     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
1027     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
1028     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
1029     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
1030     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
1031   }
1032   PetscFunctionReturn(0);
1033 }
1034 
1035 #undef __FUNCT__
1036 #define __FUNCT__ "MatMultTranspose_IS"
1037 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
1038 {
1039   Mat_IS         *is = (Mat_IS*)A->data;
1040   PetscErrorCode ierr;
1041 
1042   PetscFunctionBegin;
1043   /*  scatter the global vector x into the local work vector */
1044   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1045   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1046 
1047   /* multiply the local matrix */
1048   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
1049 
1050   /* scatter product back into global vector */
1051   ierr = VecSet(x,0);CHKERRQ(ierr);
1052   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1053   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1054   PetscFunctionReturn(0);
1055 }
1056 
1057 #undef __FUNCT__
1058 #define __FUNCT__ "MatMultTransposeAdd_IS"
1059 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
1060 {
1061   Vec            temp_vec;
1062   PetscErrorCode ierr;
1063 
1064   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
1065   if (v3 != v2) {
1066     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
1067     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
1068   } else {
1069     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
1070     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
1071     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
1072     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
1073     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
1074   }
1075   PetscFunctionReturn(0);
1076 }
1077 
1078 #undef __FUNCT__
1079 #define __FUNCT__ "MatView_IS"
1080 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
1081 {
1082   Mat_IS         *a = (Mat_IS*)A->data;
1083   PetscErrorCode ierr;
1084   PetscViewer    sviewer;
1085 
1086   PetscFunctionBegin;
1087   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1088   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
1089   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1090   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1091   PetscFunctionReturn(0);
1092 }
1093 
1094 #undef __FUNCT__
1095 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
1096 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
1097 {
1098   PetscErrorCode ierr;
1099   PetscInt       nr,rbs,nc,cbs;
1100   Mat_IS         *is = (Mat_IS*)A->data;
1101   IS             from,to;
1102   Vec            cglobal,rglobal;
1103 
1104   PetscFunctionBegin;
1105   PetscCheckSameComm(A,1,rmapping,2);
1106   PetscCheckSameComm(A,1,cmapping,3);
1107   /* Destroy any previous data */
1108   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
1109   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
1110   ierr = VecDestroy(&is->counter);CHKERRQ(ierr);
1111   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
1112   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
1113   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
1114   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
1115   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
1116 
1117   /* Setup Layout and set local to global maps */
1118   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1119   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1120   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
1121   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
1122 
1123   /* Create the local matrix A */
1124   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
1125   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
1126   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
1127   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
1128   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
1129   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
1130   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
1131   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
1132   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
1133   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
1134   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
1135 
1136   if (!is->islocalref) { /* setup scatters and local vectors for MatMult */
1137     /* Create the local work vectors */
1138     ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
1139 
1140     /* setup the global to local scatters */
1141     ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
1142     ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
1143     ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
1144     ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
1145     if (rmapping != cmapping) {
1146       ierr = ISDestroy(&to);CHKERRQ(ierr);
1147       ierr = ISDestroy(&from);CHKERRQ(ierr);
1148       ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
1149       ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
1150       ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
1151     } else {
1152       ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
1153       is->cctx = is->rctx;
1154     }
1155 
1156     /* interface counter vector (local) */
1157     ierr = VecDuplicate(is->y,&is->counter);CHKERRQ(ierr);
1158     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
1159     ierr = VecScatterBegin(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1160     ierr = VecScatterEnd(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1161     ierr = VecScatterBegin(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1162     ierr = VecScatterEnd(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1163 
1164     /* free workspace */
1165     ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
1166     ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
1167     ierr = ISDestroy(&to);CHKERRQ(ierr);
1168     ierr = ISDestroy(&from);CHKERRQ(ierr);
1169   }
1170   ierr = MatSetUp(A);CHKERRQ(ierr);
1171   PetscFunctionReturn(0);
1172 }
1173 
1174 #undef __FUNCT__
1175 #define __FUNCT__ "MatSetValues_IS"
1176 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1177 {
1178   Mat_IS         *is = (Mat_IS*)mat->data;
1179   PetscErrorCode ierr;
1180 #if defined(PETSC_USE_DEBUG)
1181   PetscInt       i,zm,zn;
1182 #endif
1183   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1184 
1185   PetscFunctionBegin;
1186 #if defined(PETSC_USE_DEBUG)
1187   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1188   /* count negative indices */
1189   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1190   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1191 #endif
1192   ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1193   ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1194 #if defined(PETSC_USE_DEBUG)
1195   /* count negative indices (should be the same as before) */
1196   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1197   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1198   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1199   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1200 #endif
1201   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1202   PetscFunctionReturn(0);
1203 }
1204 
1205 #undef __FUNCT__
1206 #define __FUNCT__ "MatSetValuesBlocked_IS"
1207 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1208 {
1209   Mat_IS         *is = (Mat_IS*)mat->data;
1210   PetscErrorCode ierr;
1211 #if defined(PETSC_USE_DEBUG)
1212   PetscInt       i,zm,zn;
1213 #endif
1214   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1215 
1216   PetscFunctionBegin;
1217 #if defined(PETSC_USE_DEBUG)
1218   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1219   /* count negative indices */
1220   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1221   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1222 #endif
1223   ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1224   ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1225 #if defined(PETSC_USE_DEBUG)
1226   /* count negative indices (should be the same as before) */
1227   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1228   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1229   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1230   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1231 #endif
1232   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1233   PetscFunctionReturn(0);
1234 }
1235 
1236 #undef __FUNCT__
1237 #define __FUNCT__ "MatSetValuesLocal_IS"
1238 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1239 {
1240   PetscErrorCode ierr;
1241   Mat_IS         *is = (Mat_IS*)A->data;
1242 
1243   PetscFunctionBegin;
1244   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1245   PetscFunctionReturn(0);
1246 }
1247 
1248 #undef __FUNCT__
1249 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
1250 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1251 {
1252   PetscErrorCode ierr;
1253   Mat_IS         *is = (Mat_IS*)A->data;
1254 
1255   PetscFunctionBegin;
1256   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1257   PetscFunctionReturn(0);
1258 }
1259 
1260 #undef __FUNCT__
1261 #define __FUNCT__ "MatISZeroRowsColumnsLocal_Private"
1262 static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,PetscBool columns)
1263 {
1264   Mat_IS         *is = (Mat_IS*)A->data;
1265   PetscErrorCode ierr;
1266 
1267   PetscFunctionBegin;
1268   if (!n) {
1269     is->pure_neumann = PETSC_TRUE;
1270   } else {
1271     PetscInt i;
1272     is->pure_neumann = PETSC_FALSE;
1273 
1274     if (columns) {
1275       ierr = MatZeroRowsColumns(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
1276     } else {
1277       ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
1278     }
1279     if (diag != 0.) {
1280       const PetscScalar *array;
1281       ierr = VecGetArrayRead(is->counter,&array);CHKERRQ(ierr);
1282       for (i=0; i<n; i++) {
1283         ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
1284       }
1285       ierr = VecRestoreArrayRead(is->counter,&array);CHKERRQ(ierr);
1286     }
1287     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1288     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1289   }
1290   PetscFunctionReturn(0);
1291 }
1292 
1293 #undef __FUNCT__
1294 #define __FUNCT__ "MatZeroRowsColumns_Private_IS"
1295 static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b,PetscBool columns)
1296 {
1297   Mat_IS         *matis = (Mat_IS*)A->data;
1298   PetscInt       nr,nl,len,i;
1299   PetscInt       *lrows;
1300   PetscErrorCode ierr;
1301 
1302   PetscFunctionBegin;
1303 #if defined(PETSC_USE_DEBUG)
1304   if (columns || diag != 0. || (x && b)) {
1305     PetscBool cong;
1306     ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr);
1307     if (!cong && columns) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Columns can be zeroed if and only if A->rmap and A->cmap are congruent for MATIS");
1308     if (!cong && diag != 0.) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent for MATIS");
1309     if (!cong && x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"A->rmap and A->cmap need to be congruent");
1310   }
1311 #endif
1312   /* get locally owned rows */
1313   ierr = PetscLayoutMapLocal_Private(A->rmap,n,rows,&len,&lrows,NULL);CHKERRQ(ierr);
1314   /* fix right hand side if needed */
1315   if (x && b) {
1316     const PetscScalar *xx;
1317     PetscScalar       *bb;
1318 
1319     ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr);
1320     ierr = VecGetArray(b, &bb);CHKERRQ(ierr);
1321     for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]];
1322     ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr);
1323     ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr);
1324   }
1325   /* get rows associated to the local matrices */
1326   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
1327     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
1328   }
1329   ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr);
1330   ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr);
1331   ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
1332   for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1;
1333   ierr = PetscFree(lrows);CHKERRQ(ierr);
1334   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1335   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1336   ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr);
1337   for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i;
1338   ierr = MatISZeroRowsColumnsLocal_Private(A,nr,lrows,diag,columns);CHKERRQ(ierr);
1339   ierr = PetscFree(lrows);CHKERRQ(ierr);
1340   PetscFunctionReturn(0);
1341 }
1342 
1343 #undef __FUNCT__
1344 #define __FUNCT__ "MatZeroRows_IS"
1345 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1346 {
1347   PetscErrorCode ierr;
1348 
1349   PetscFunctionBegin;
1350   ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_FALSE);CHKERRQ(ierr);
1351   PetscFunctionReturn(0);
1352 }
1353 
1354 #undef __FUNCT__
1355 #define __FUNCT__ "MatZeroRowsColumns_IS"
1356 static PetscErrorCode MatZeroRowsColumns_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1357 {
1358   PetscErrorCode ierr;
1359 
1360   PetscFunctionBegin;
1361   ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_TRUE);CHKERRQ(ierr);
1362   PetscFunctionReturn(0);
1363 }
1364 
1365 #undef __FUNCT__
1366 #define __FUNCT__ "MatAssemblyBegin_IS"
1367 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
1368 {
1369   Mat_IS         *is = (Mat_IS*)A->data;
1370   PetscErrorCode ierr;
1371 
1372   PetscFunctionBegin;
1373   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
1374   PetscFunctionReturn(0);
1375 }
1376 
1377 #undef __FUNCT__
1378 #define __FUNCT__ "MatAssemblyEnd_IS"
1379 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
1380 {
1381   Mat_IS         *is = (Mat_IS*)A->data;
1382   PetscErrorCode ierr;
1383 
1384   PetscFunctionBegin;
1385   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
1386   PetscFunctionReturn(0);
1387 }
1388 
1389 #undef __FUNCT__
1390 #define __FUNCT__ "MatISGetLocalMat_IS"
1391 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
1392 {
1393   Mat_IS *is = (Mat_IS*)mat->data;
1394 
1395   PetscFunctionBegin;
1396   *local = is->A;
1397   PetscFunctionReturn(0);
1398 }
1399 
1400 #undef __FUNCT__
1401 #define __FUNCT__ "MatISGetLocalMat"
1402 /*@
1403     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
1404 
1405   Input Parameter:
1406 .  mat - the matrix
1407 
1408   Output Parameter:
1409 .  local - the local matrix
1410 
1411   Level: advanced
1412 
1413   Notes:
1414     This can be called if you have precomputed the nonzero structure of the
1415   matrix and want to provide it to the inner matrix object to improve the performance
1416   of the MatSetValues() operation.
1417 
1418 .seealso: MATIS
1419 @*/
1420 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
1421 {
1422   PetscErrorCode ierr;
1423 
1424   PetscFunctionBegin;
1425   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1426   PetscValidPointer(local,2);
1427   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
1428   PetscFunctionReturn(0);
1429 }
1430 
1431 #undef __FUNCT__
1432 #define __FUNCT__ "MatISSetLocalMat_IS"
1433 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
1434 {
1435   Mat_IS         *is = (Mat_IS*)mat->data;
1436   PetscInt       nrows,ncols,orows,ocols;
1437   PetscErrorCode ierr;
1438 
1439   PetscFunctionBegin;
1440   if (is->A) {
1441     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
1442     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
1443     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %Dx%D (you passed a %Dx%D matrix)",orows,ocols,nrows,ncols);
1444   }
1445   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
1446   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
1447   is->A = local;
1448   PetscFunctionReturn(0);
1449 }
1450 
1451 #undef __FUNCT__
1452 #define __FUNCT__ "MatISSetLocalMat"
1453 /*@
1454     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
1455 
1456   Input Parameter:
1457 .  mat - the matrix
1458 .  local - the local matrix
1459 
1460   Output Parameter:
1461 
1462   Level: advanced
1463 
1464   Notes:
1465     This can be called if you have precomputed the local matrix and
1466   want to provide it to the matrix object MATIS.
1467 
1468 .seealso: MATIS
1469 @*/
1470 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
1471 {
1472   PetscErrorCode ierr;
1473 
1474   PetscFunctionBegin;
1475   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1476   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
1477   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
1478   PetscFunctionReturn(0);
1479 }
1480 
1481 #undef __FUNCT__
1482 #define __FUNCT__ "MatZeroEntries_IS"
1483 static PetscErrorCode MatZeroEntries_IS(Mat A)
1484 {
1485   Mat_IS         *a = (Mat_IS*)A->data;
1486   PetscErrorCode ierr;
1487 
1488   PetscFunctionBegin;
1489   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
1490   PetscFunctionReturn(0);
1491 }
1492 
1493 #undef __FUNCT__
1494 #define __FUNCT__ "MatScale_IS"
1495 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
1496 {
1497   Mat_IS         *is = (Mat_IS*)A->data;
1498   PetscErrorCode ierr;
1499 
1500   PetscFunctionBegin;
1501   ierr = MatScale(is->A,a);CHKERRQ(ierr);
1502   PetscFunctionReturn(0);
1503 }
1504 
1505 #undef __FUNCT__
1506 #define __FUNCT__ "MatGetDiagonal_IS"
1507 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
1508 {
1509   Mat_IS         *is = (Mat_IS*)A->data;
1510   PetscErrorCode ierr;
1511 
1512   PetscFunctionBegin;
1513   /* get diagonal of the local matrix */
1514   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
1515 
1516   /* scatter diagonal back into global vector */
1517   ierr = VecSet(v,0);CHKERRQ(ierr);
1518   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1519   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1520   PetscFunctionReturn(0);
1521 }
1522 
1523 #undef __FUNCT__
1524 #define __FUNCT__ "MatSetOption_IS"
1525 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
1526 {
1527   Mat_IS         *a = (Mat_IS*)A->data;
1528   PetscErrorCode ierr;
1529 
1530   PetscFunctionBegin;
1531   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1532   PetscFunctionReturn(0);
1533 }
1534 
1535 #undef __FUNCT__
1536 #define __FUNCT__ "MatAXPY_IS"
1537 static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str)
1538 {
1539   Mat_IS         *y = (Mat_IS*)Y->data;
1540   Mat_IS         *x;
1541 #if defined(PETSC_USE_DEBUG)
1542   PetscBool      ismatis;
1543 #endif
1544   PetscErrorCode ierr;
1545 
1546   PetscFunctionBegin;
1547 #if defined(PETSC_USE_DEBUG)
1548   ierr = PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);CHKERRQ(ierr);
1549   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS");
1550 #endif
1551   x = (Mat_IS*)X->data;
1552   ierr = MatAXPY(y->A,a,x->A,str);CHKERRQ(ierr);
1553   PetscFunctionReturn(0);
1554 }
1555 
1556 #undef __FUNCT__
1557 #define __FUNCT__ "MatGetLocalSubMatrix_IS"
1558 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat)
1559 {
1560   Mat                    lA;
1561   Mat_IS                 *matis;
1562   ISLocalToGlobalMapping rl2g,cl2g;
1563   IS                     is;
1564   const PetscInt         *rg,*rl;
1565   PetscInt               nrg;
1566   PetscInt               N,M,nrl,i,*idxs;
1567   PetscErrorCode         ierr;
1568 
1569   PetscFunctionBegin;
1570   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
1571   ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr);
1572   ierr = ISGetIndices(row,&rl);CHKERRQ(ierr);
1573   ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr);
1574 #if defined(PETSC_USE_DEBUG)
1575   for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %D -> %D greater than maximum possible %D",i,rl[i],nrg);
1576 #endif
1577   ierr = PetscMalloc1(nrg,&idxs);CHKERRQ(ierr);
1578   /* map from [0,nrl) to row */
1579   for (i=0;i<nrl;i++) idxs[i] = rl[i];
1580 #if defined(PETSC_USE_DEBUG)
1581   for (i=nrl;i<nrg;i++) idxs[i] = nrg;
1582 #else
1583   for (i=nrl;i<nrg;i++) idxs[i] = -1;
1584 #endif
1585   ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr);
1586   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
1587   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
1588   ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
1589   ierr = ISDestroy(&is);CHKERRQ(ierr);
1590   /* compute new l2g map for columns */
1591   if (col != row || A->rmap->mapping != A->cmap->mapping) {
1592     const PetscInt *cg,*cl;
1593     PetscInt       ncg;
1594     PetscInt       ncl;
1595 
1596     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
1597     ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr);
1598     ierr = ISGetIndices(col,&cl);CHKERRQ(ierr);
1599     ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr);
1600 #if defined(PETSC_USE_DEBUG)
1601     for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %D -> %D greater than maximum possible %D",i,cl[i],ncg);
1602 #endif
1603     ierr = PetscMalloc1(ncg,&idxs);CHKERRQ(ierr);
1604     /* map from [0,ncl) to col */
1605     for (i=0;i<ncl;i++) idxs[i] = cl[i];
1606 #if defined(PETSC_USE_DEBUG)
1607     for (i=ncl;i<ncg;i++) idxs[i] = ncg;
1608 #else
1609     for (i=ncl;i<ncg;i++) idxs[i] = -1;
1610 #endif
1611     ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr);
1612     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
1613     ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
1614     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
1615     ierr = ISDestroy(&is);CHKERRQ(ierr);
1616   } else {
1617     ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr);
1618     cl2g = rl2g;
1619   }
1620   /* create the MATIS submatrix */
1621   ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
1622   ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr);
1623   ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1624   ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr);
1625   matis = (Mat_IS*)((*submat)->data);
1626   matis->islocalref = PETSC_TRUE;
1627   ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr);
1628   ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr);
1629   ierr = MatISSetLocalMat(*submat,lA);CHKERRQ(ierr);
1630   ierr = MatSetUp(*submat);CHKERRQ(ierr);
1631   ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1632   ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1633   ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
1634   ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
1635   /* remove unsupported ops */
1636   ierr = PetscMemzero((*submat)->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1637   (*submat)->ops->destroy               = MatDestroy_IS;
1638   (*submat)->ops->setvalueslocal        = MatSetValuesLocal_SubMat_IS;
1639   (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS;
1640   (*submat)->ops->assemblybegin         = MatAssemblyBegin_IS;
1641   (*submat)->ops->assemblyend           = MatAssemblyEnd_IS;
1642   PetscFunctionReturn(0);
1643 }
1644 
1645 #undef __FUNCT__
1646 #define __FUNCT__ "MatCreateIS"
1647 /*@
1648     MatCreateIS - Creates a "process" unassembled matrix, assembled on each
1649        process but not across processes.
1650 
1651    Input Parameters:
1652 +     comm    - MPI communicator that will share the matrix
1653 .     bs      - block size of the matrix
1654 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
1655 .     rmap    - local to global map for rows
1656 -     cmap    - local to global map for cols
1657 
1658    Output Parameter:
1659 .    A - the resulting matrix
1660 
1661    Level: advanced
1662 
1663    Notes: See MATIS for more details.
1664           m and n are NOT related to the size of the map; they represent the size of the local parts of the vectors
1665           used in MatMult operations. The sizes of rmap and cmap define the size of the local matrices.
1666           If either rmap or cmap are NULL, then the matrix is assumed to be square.
1667 
1668 .seealso: MATIS, MatSetLocalToGlobalMapping()
1669 @*/
1670 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1671 {
1672   PetscErrorCode ierr;
1673 
1674   PetscFunctionBegin;
1675   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mappings");
1676   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1677   if (bs > 0) {
1678     ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1679   }
1680   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1681   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1682   if (rmap && cmap) {
1683     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1684   } else if (!rmap) {
1685     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1686   } else {
1687     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1688   }
1689   PetscFunctionReturn(0);
1690 }
1691 
1692 /*MC
1693    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP).
1694    This stores the matrices in globally unassembled form. Each processor
1695    assembles only its local Neumann problem and the parallel matrix vector
1696    product is handled "implicitly".
1697 
1698    Operations Provided:
1699 +  MatMult()
1700 .  MatMultAdd()
1701 .  MatMultTranspose()
1702 .  MatMultTransposeAdd()
1703 .  MatZeroEntries()
1704 .  MatSetOption()
1705 .  MatZeroRows()
1706 .  MatSetValues()
1707 .  MatSetValuesBlocked()
1708 .  MatSetValuesLocal()
1709 .  MatSetValuesBlockedLocal()
1710 .  MatScale()
1711 .  MatGetDiagonal()
1712 .  MatMissingDiagonal()
1713 .  MatDuplicate()
1714 .  MatCopy()
1715 .  MatAXPY()
1716 .  MatGetSubMatrix()
1717 .  MatGetLocalSubMatrix()
1718 .  MatTranspose()
1719 -  MatSetLocalToGlobalMapping()
1720 
1721    Options Database Keys:
1722 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1723 
1724    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1725 
1726           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1727 
1728           You can do matrix preallocation on the local matrix after you obtain it with
1729           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1730 
1731   Level: advanced
1732 
1733 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP
1734 
1735 M*/
1736 
1737 #undef __FUNCT__
1738 #define __FUNCT__ "MatCreate_IS"
1739 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1740 {
1741   PetscErrorCode ierr;
1742   Mat_IS         *b;
1743 
1744   PetscFunctionBegin;
1745   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1746   A->data = (void*)b;
1747 
1748   /* matrix ops */
1749   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1750   A->ops->mult                    = MatMult_IS;
1751   A->ops->multadd                 = MatMultAdd_IS;
1752   A->ops->multtranspose           = MatMultTranspose_IS;
1753   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1754   A->ops->destroy                 = MatDestroy_IS;
1755   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1756   A->ops->setvalues               = MatSetValues_IS;
1757   A->ops->setvaluesblocked        = MatSetValuesBlocked_IS;
1758   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1759   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1760   A->ops->zerorows                = MatZeroRows_IS;
1761   A->ops->zerorowscolumns         = MatZeroRowsColumns_IS;
1762   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1763   A->ops->assemblyend             = MatAssemblyEnd_IS;
1764   A->ops->view                    = MatView_IS;
1765   A->ops->zeroentries             = MatZeroEntries_IS;
1766   A->ops->scale                   = MatScale_IS;
1767   A->ops->getdiagonal             = MatGetDiagonal_IS;
1768   A->ops->setoption               = MatSetOption_IS;
1769   A->ops->ishermitian             = MatIsHermitian_IS;
1770   A->ops->issymmetric             = MatIsSymmetric_IS;
1771   A->ops->duplicate               = MatDuplicate_IS;
1772   A->ops->missingdiagonal         = MatMissingDiagonal_IS;
1773   A->ops->copy                    = MatCopy_IS;
1774   A->ops->getlocalsubmatrix       = MatGetLocalSubMatrix_IS;
1775   A->ops->getsubmatrix            = MatGetSubMatrix_IS;
1776   A->ops->axpy                    = MatAXPY_IS;
1777   A->ops->diagonalset             = MatDiagonalSet_IS;
1778   A->ops->shift                   = MatShift_IS;
1779   A->ops->transpose               = MatTranspose_IS;
1780   A->ops->getinfo                 = MatGetInfo_IS;
1781 
1782   /* special MATIS functions */
1783   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1784   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1785   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1786   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1787   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1788   PetscFunctionReturn(0);
1789 }
1790