xref: /petsc/src/mat/impls/is/matis.c (revision a2ccb5f9ee22d16282353a7d1b8730ab22a81dc7)
1 /*
2     Creates a matrix class for using the Neumann-Neumann type preconditioners.
3     This stores the matrices in globally unassembled form. Each processor
4     assembles only its local Neumann problem and the parallel matrix vector
5     product is handled "implicitly".
6 
7     Currently this allows for only one subdomain per processor.
8 */
9 
10 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
11 
12 #define MATIS_MAX_ENTRIES_INSERTION 2048
13 
14 static PetscErrorCode MatISComputeSF_Private(Mat);
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "MatGetInfo_IS"
18 static PetscErrorCode MatGetInfo_IS(Mat A,MatInfoType flag,MatInfo *ginfo)
19 {
20   Mat_IS         *matis = (Mat_IS*)A->data;
21   MatInfo        info;
22   PetscReal      isend[6],irecv[6];
23   PetscInt       bs;
24   PetscErrorCode ierr;
25 
26   PetscFunctionBegin;
27   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
28   if (matis->A->ops->getinfo) {
29     ierr     = MatGetInfo(matis->A,MAT_LOCAL,&info);CHKERRQ(ierr);
30     isend[0] = info.nz_used;
31     isend[1] = info.nz_allocated;
32     isend[2] = info.nz_unneeded;
33     isend[3] = info.memory;
34     isend[4] = info.mallocs;
35   } else {
36     isend[0] = 0.;
37     isend[1] = 0.;
38     isend[2] = 0.;
39     isend[3] = 0.;
40     isend[4] = 0.;
41   }
42   isend[5] = matis->A->num_ass;
43   if (flag == MAT_LOCAL) {
44     ginfo->nz_used      = isend[0];
45     ginfo->nz_allocated = isend[1];
46     ginfo->nz_unneeded  = isend[2];
47     ginfo->memory       = isend[3];
48     ginfo->mallocs      = isend[4];
49     ginfo->assemblies   = isend[5];
50   } else if (flag == MAT_GLOBAL_MAX) {
51     ierr = MPIU_Allreduce(isend,irecv,6,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
52 
53     ginfo->nz_used      = irecv[0];
54     ginfo->nz_allocated = irecv[1];
55     ginfo->nz_unneeded  = irecv[2];
56     ginfo->memory       = irecv[3];
57     ginfo->mallocs      = irecv[4];
58     ginfo->assemblies   = irecv[5];
59   } else if (flag == MAT_GLOBAL_SUM) {
60     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
61 
62     ginfo->nz_used      = irecv[0];
63     ginfo->nz_allocated = irecv[1];
64     ginfo->nz_unneeded  = irecv[2];
65     ginfo->memory       = irecv[3];
66     ginfo->mallocs      = irecv[4];
67     ginfo->assemblies   = A->num_ass;
68   }
69   ginfo->block_size        = bs;
70   ginfo->fill_ratio_given  = 0;
71   ginfo->fill_ratio_needed = 0;
72   ginfo->factor_mallocs    = 0;
73   PetscFunctionReturn(0);
74 }
75 
76 #undef __FUNCT__
77 #define __FUNCT__ "MatTranspose_IS"
78 PetscErrorCode MatTranspose_IS(Mat A,MatReuse reuse,Mat *B)
79 {
80   Mat                    C,lC,lA;
81   ISLocalToGlobalMapping rl2g,cl2g;
82   PetscBool              notset = PETSC_FALSE,cong = PETSC_TRUE;
83   PetscErrorCode         ierr;
84 
85   PetscFunctionBegin;
86   if (reuse == MAT_REUSE_MATRIX) {
87     PetscBool rcong,ccong;
88 
89     ierr = PetscLayoutCompare((*B)->cmap,A->rmap,&rcong);CHKERRQ(ierr);
90     ierr = PetscLayoutCompare((*B)->rmap,A->cmap,&ccong);CHKERRQ(ierr);
91     cong = (PetscBool)(rcong || ccong);
92   }
93   if (reuse == MAT_INITIAL_MATRIX || *B == A || !cong) {
94     ierr = MatCreate(PetscObjectComm((PetscObject)A),&C);CHKERRQ(ierr);
95   } else {
96     ierr = PetscObjectTypeCompare((PetscObject)*B,MATIS,&notset);CHKERRQ(ierr);
97     C = *B;
98   }
99 
100   if (!notset) {
101     ierr = MatSetSizes(C,A->cmap->n,A->rmap->n,A->cmap->N,A->rmap->N);CHKERRQ(ierr);
102     ierr = MatSetBlockSizes(C,PetscAbs(A->cmap->bs),PetscAbs(A->rmap->bs));CHKERRQ(ierr);
103     ierr = MatSetType(C,MATIS);CHKERRQ(ierr);
104     ierr = MatGetLocalToGlobalMapping(A,&rl2g,&cl2g);CHKERRQ(ierr);
105     ierr = MatSetLocalToGlobalMapping(C,cl2g,rl2g);CHKERRQ(ierr);
106   }
107 
108   /* perform local transposition */
109   ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr);
110   ierr = MatTranspose(lA,MAT_INITIAL_MATRIX,&lC);CHKERRQ(ierr);
111   ierr = MatISSetLocalMat(C,lC);CHKERRQ(ierr);
112   ierr = MatDestroy(&lC);CHKERRQ(ierr);
113 
114   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
115   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
116 
117   if (reuse == MAT_INITIAL_MATRIX || *B != A) {
118     if (!cong) {
119       ierr = MatDestroy(B);CHKERRQ(ierr);
120     }
121     *B = C;
122   } else {
123     ierr = MatHeaderMerge(A,&C);CHKERRQ(ierr);
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 #undef __FUNCT__
129 #define __FUNCT__ "MatDiagonalSet_IS"
130 PetscErrorCode  MatDiagonalSet_IS(Mat A,Vec D,InsertMode insmode)
131 {
132   Mat_IS         *is = (Mat_IS*)A->data;
133   PetscErrorCode ierr;
134 
135   PetscFunctionBegin;
136   if (!D) { /* this code branch is used by MatShift_IS */
137     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
138   } else {
139     ierr = VecScatterBegin(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
140     ierr = VecScatterEnd(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
141   }
142   ierr = VecPointwiseDivide(is->y,is->y,is->counter);CHKERRQ(ierr);
143   ierr = MatDiagonalSet(is->A,is->y,insmode);CHKERRQ(ierr);
144   PetscFunctionReturn(0);
145 }
146 
147 #undef __FUNCT__
148 #define __FUNCT__ "MatShift_IS"
149 PetscErrorCode  MatShift_IS(Mat A,PetscScalar a)
150 {
151   PetscErrorCode ierr;
152 
153   PetscFunctionBegin;
154   ierr = MatDiagonalSet_IS(A,NULL,ADD_VALUES);CHKERRQ(ierr);
155   PetscFunctionReturn(0);
156 }
157 
158 #undef __FUNCT__
159 #define __FUNCT__ "MatSetValuesLocal_SubMat_IS"
160 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
161 {
162   PetscErrorCode ierr;
163   Mat_IS         *is = (Mat_IS*)A->data;
164   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
165 
166   PetscFunctionBegin;
167 #if defined(PETSC_USE_DEBUG)
168   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
169 #endif
170   ierr = ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
171   ierr = ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
172   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "MatSetValuesBlockedLocal_SubMat_IS"
178 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
179 {
180   PetscErrorCode ierr;
181   Mat_IS         *is = (Mat_IS*)A->data;
182   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
183 
184   PetscFunctionBegin;
185 #if defined(PETSC_USE_DEBUG)
186   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
187 #endif
188   ierr = ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
189   ierr = ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
190   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
191   PetscFunctionReturn(0);
192 }
193 
194 #undef __FUNCT__
195 #define __FUNCT__ "PetscLayoutMapLocal_Private"
196 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
197 {
198   PetscInt      *owners = map->range;
199   PetscInt       n      = map->n;
200   PetscSF        sf;
201   PetscInt      *lidxs,*work = NULL;
202   PetscSFNode   *ridxs;
203   PetscMPIInt    rank;
204   PetscInt       r, p = 0, len = 0;
205   PetscErrorCode ierr;
206 
207   PetscFunctionBegin;
208   /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */
209   ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr);
210   ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr);
211   for (r = 0; r < n; ++r) lidxs[r] = -1;
212   ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr);
213   for (r = 0; r < N; ++r) {
214     const PetscInt idx = idxs[r];
215     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
216     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
217       ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr);
218     }
219     ridxs[r].rank = p;
220     ridxs[r].index = idxs[r] - owners[p];
221   }
222   ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr);
223   ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr);
224   ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
225   ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
226   if (ogidxs) { /* communicate global idxs */
227     PetscInt cum = 0,start,*work2;
228 
229     ierr = PetscMalloc1(n,&work);CHKERRQ(ierr);
230     ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr);
231     for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
232     ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
233     start -= cum;
234     cum = 0;
235     for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
236     ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
237     ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
238     ierr = PetscFree(work2);CHKERRQ(ierr);
239   }
240   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
241   /* Compress and put in indices */
242   for (r = 0; r < n; ++r)
243     if (lidxs[r] >= 0) {
244       if (work) work[len] = work[r];
245       lidxs[len++] = r;
246     }
247   if (on) *on = len;
248   if (oidxs) *oidxs = lidxs;
249   if (ogidxs) *ogidxs = work;
250   PetscFunctionReturn(0);
251 }
252 
253 #undef __FUNCT__
254 #define __FUNCT__ "MatGetSubMatrix_IS"
255 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat)
256 {
257   Mat               locmat,newlocmat;
258   Mat_IS            *newmatis;
259 #if defined(PETSC_USE_DEBUG)
260   Vec               rtest,ltest;
261   const PetscScalar *array;
262 #endif
263   const PetscInt    *idxs;
264   PetscInt          i,m,n;
265   PetscErrorCode    ierr;
266 
267   PetscFunctionBegin;
268   if (scall == MAT_REUSE_MATRIX) {
269     PetscBool ismatis;
270 
271     ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr);
272     if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type");
273     newmatis = (Mat_IS*)(*newmat)->data;
274     if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS");
275     if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS");
276   }
277   /* irow and icol may not have duplicate entries */
278 #if defined(PETSC_USE_DEBUG)
279   ierr = MatCreateVecs(mat,&ltest,&rtest);CHKERRQ(ierr);
280   ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr);
281   ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
282   for (i=0;i<n;i++) {
283     ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
284   }
285   ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr);
286   ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr);
287   ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr);
288   ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr);
289   ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr);
290   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
291   ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr);
292   ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
293   ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
294   ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
295   for (i=0;i<n;i++) {
296     ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
297   }
298   ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr);
299   ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr);
300   ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr);
301   ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr);
302   ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr);
303   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
304   ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr);
305   ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
306   ierr = VecDestroy(&rtest);CHKERRQ(ierr);
307   ierr = VecDestroy(&ltest);CHKERRQ(ierr);
308 #endif
309   if (scall == MAT_INITIAL_MATRIX) {
310     Mat_IS                 *matis = (Mat_IS*)mat->data;
311     ISLocalToGlobalMapping rl2g;
312     IS                     is;
313     PetscInt               *lidxs,*lgidxs,*newgidxs;
314     PetscInt               ll,newloc;
315     MPI_Comm               comm;
316 
317     ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
318     ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr);
319     ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
320     ierr = MatCreate(comm,newmat);CHKERRQ(ierr);
321     ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr);
322     ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
323     /* communicate irow to their owners in the layout */
324     ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
325     ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
326     ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
327     if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
328       ierr = MatISComputeSF_Private(mat);CHKERRQ(ierr);
329     }
330     ierr = PetscMemzero(matis->sf_rootdata,matis->sf_nroots*sizeof(PetscInt));CHKERRQ(ierr);
331     for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1;
332     ierr = PetscFree(lidxs);CHKERRQ(ierr);
333     ierr = PetscFree(lgidxs);CHKERRQ(ierr);
334     ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
335     ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
336     for (i=0,newloc=0;i<matis->sf_nleaves;i++) if (matis->sf_leafdata[i]) newloc++;
337     ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
338     ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
339     for (i=0,newloc=0;i<matis->sf_nleaves;i++)
340       if (matis->sf_leafdata[i]) {
341         lidxs[newloc] = i;
342         newgidxs[newloc++] = matis->sf_leafdata[i]-1;
343       }
344     ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
345     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
346     ierr = ISDestroy(&is);CHKERRQ(ierr);
347     /* local is to extract local submatrix */
348     newmatis = (Mat_IS*)(*newmat)->data;
349     ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr);
350     if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
351       PetscBool cong;
352       ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr);
353       if (cong) mat->congruentlayouts = 1;
354       else      mat->congruentlayouts = 0;
355     }
356     if (mat->congruentlayouts && irow == icol) {
357       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr);
358       ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr);
359       newmatis->getsub_cis = newmatis->getsub_ris;
360     } else {
361       ISLocalToGlobalMapping cl2g;
362 
363       /* communicate icol to their owners in the layout */
364       ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
365       ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
366       ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
367       ierr = PetscMemzero(matis->csf_rootdata,matis->csf_nroots*sizeof(PetscInt));CHKERRQ(ierr);
368       for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1;
369       ierr = PetscFree(lidxs);CHKERRQ(ierr);
370       ierr = PetscFree(lgidxs);CHKERRQ(ierr);
371       ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
372       ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
373       for (i=0,newloc=0;i<matis->csf_nleaves;i++) if (matis->csf_leafdata[i]) newloc++;
374       ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
375       ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
376       for (i=0,newloc=0;i<matis->csf_nleaves;i++)
377         if (matis->csf_leafdata[i]) {
378           lidxs[newloc] = i;
379           newgidxs[newloc++] = matis->csf_leafdata[i]-1;
380         }
381       ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
382       ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
383       ierr = ISDestroy(&is);CHKERRQ(ierr);
384       /* local is to extract local submatrix */
385       ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr);
386       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr);
387       ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
388     }
389     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
390   } else {
391     ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr);
392   }
393   ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr);
394   newmatis = (Mat_IS*)(*newmat)->data;
395   ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr);
396   if (scall == MAT_INITIAL_MATRIX) {
397     ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr);
398     ierr = MatDestroy(&newlocmat);CHKERRQ(ierr);
399   }
400   ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
401   ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
402   PetscFunctionReturn(0);
403 }
404 
405 #undef __FUNCT__
406 #define __FUNCT__ "MatCopy_IS"
407 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str)
408 {
409   Mat_IS         *a = (Mat_IS*)A->data,*b;
410   PetscBool      ismatis;
411   PetscErrorCode ierr;
412 
413   PetscFunctionBegin;
414   ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr);
415   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented");
416   b = (Mat_IS*)B->data;
417   ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
418   PetscFunctionReturn(0);
419 }
420 
421 #undef __FUNCT__
422 #define __FUNCT__ "MatMissingDiagonal_IS"
423 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool  *missing,PetscInt *d)
424 {
425   Vec               v;
426   const PetscScalar *array;
427   PetscInt          i,n;
428   PetscErrorCode    ierr;
429 
430   PetscFunctionBegin;
431   *missing = PETSC_FALSE;
432   ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr);
433   ierr = MatGetDiagonal(A,v);CHKERRQ(ierr);
434   ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr);
435   ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr);
436   for (i=0;i<n;i++) if (array[i] == 0.) break;
437   ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr);
438   ierr = VecDestroy(&v);CHKERRQ(ierr);
439   if (i != n) *missing = PETSC_TRUE;
440   if (d) {
441     *d = -1;
442     if (*missing) {
443       PetscInt rstart;
444       ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
445       *d = i+rstart;
446     }
447   }
448   PetscFunctionReturn(0);
449 }
450 
451 #undef __FUNCT__
452 #define __FUNCT__ "MatISComputeSF_Private"
453 static PetscErrorCode MatISComputeSF_Private(Mat B)
454 {
455   Mat_IS         *matis = (Mat_IS*)(B->data);
456   const PetscInt *gidxs;
457   PetscErrorCode ierr;
458 
459   PetscFunctionBegin;
460   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
461   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
462   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
463   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
464   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
465   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
466   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
467   if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */
468     ierr = MatGetSize(matis->A,NULL,&matis->csf_nleaves);CHKERRQ(ierr);
469     ierr = MatGetLocalSize(B,NULL,&matis->csf_nroots);CHKERRQ(ierr);
470     ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr);
471     ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
472     ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,matis->csf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
473     ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
474     ierr = PetscMalloc2(matis->csf_nroots,&matis->csf_rootdata,matis->csf_nleaves,&matis->csf_leafdata);CHKERRQ(ierr);
475   } else {
476     matis->csf = matis->sf;
477     matis->csf_nleaves = matis->sf_nleaves;
478     matis->csf_nroots = matis->sf_nroots;
479     matis->csf_leafdata = matis->sf_leafdata;
480     matis->csf_rootdata = matis->sf_rootdata;
481   }
482   PetscFunctionReturn(0);
483 }
484 
485 #undef __FUNCT__
486 #define __FUNCT__ "MatISSetPreallocation"
487 /*@
488    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
489 
490    Collective on MPI_Comm
491 
492    Input Parameters:
493 +  B - the matrix
494 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
495            (same value is used for all local rows)
496 .  d_nnz - array containing the number of nonzeros in the various rows of the
497            DIAGONAL portion of the local submatrix (possibly different for each row)
498            or NULL, if d_nz is used to specify the nonzero structure.
499            The size of this array is equal to the number of local rows, i.e 'm'.
500            For matrices that will be factored, you must leave room for (and set)
501            the diagonal entry even if it is zero.
502 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
503            submatrix (same value is used for all local rows).
504 -  o_nnz - array containing the number of nonzeros in the various rows of the
505            OFF-DIAGONAL portion of the local submatrix (possibly different for
506            each row) or NULL, if o_nz is used to specify the nonzero
507            structure. The size of this array is equal to the number
508            of local rows, i.e 'm'.
509 
510    If the *_nnz parameter is given then the *_nz parameter is ignored
511 
512    Level: intermediate
513 
514    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
515           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
516           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
517 
518 .keywords: matrix
519 
520 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS
521 @*/
522 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
523 {
524   PetscErrorCode ierr;
525 
526   PetscFunctionBegin;
527   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
528   PetscValidType(B,1);
529   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
530   PetscFunctionReturn(0);
531 }
532 
533 #undef __FUNCT__
534 #define __FUNCT__ "MatISSetPreallocation_IS"
535 static PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
536 {
537   Mat_IS         *matis = (Mat_IS*)(B->data);
538   PetscInt       bs,i,nlocalcols;
539   PetscErrorCode ierr;
540 
541   PetscFunctionBegin;
542   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
543   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
544     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
545   }
546   if (!d_nnz) {
547     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
548   } else {
549     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
550   }
551   if (!o_nnz) {
552     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
553   } else {
554     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
555   }
556   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
557   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
558   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
559   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
560   for (i=0;i<matis->sf_nleaves;i++) {
561     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
562   }
563   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
564   for (i=0;i<matis->sf_nleaves/bs;i++) {
565     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
566   }
567   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
568   for (i=0;i<matis->sf_nleaves/bs;i++) {
569     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
570   }
571   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
572   PetscFunctionReturn(0);
573 }
574 
575 #undef __FUNCT__
576 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
577 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
578 {
579   Mat_IS          *matis = (Mat_IS*)(A->data);
580   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
581   const PetscInt  *global_indices_r,*global_indices_c;
582   PetscInt        i,j,bs,rows,cols;
583   PetscInt        lrows,lcols;
584   PetscInt        local_rows,local_cols;
585   PetscMPIInt     nsubdomains;
586   PetscBool       isdense,issbaij;
587   PetscErrorCode  ierr;
588 
589   PetscFunctionBegin;
590   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
591   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
592   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
593   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
594   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
595   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
596   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
597   if (A->rmap->mapping != A->cmap->mapping) {
598     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
599   } else {
600     global_indices_c = global_indices_r;
601   }
602 
603   if (issbaij) {
604     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
605   }
606   /*
607      An SF reduce is needed to sum up properly on shared rows.
608      Note that generally preallocation is not exact, since it overestimates nonzeros
609   */
610   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
611     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
612   }
613   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
614   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
615   /* All processes need to compute entire row ownership */
616   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
617   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
618   for (i=0;i<nsubdomains;i++) {
619     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
620       row_ownership[j] = i;
621     }
622   }
623   ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
624 
625   /*
626      my_dnz and my_onz contains exact contribution to preallocation from each local mat
627      then, they will be summed up properly. This way, preallocation is always sufficient
628   */
629   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
630   /* preallocation as a MATAIJ */
631   if (isdense) { /* special case for dense local matrices */
632     for (i=0;i<local_rows;i++) {
633       PetscInt index_row = global_indices_r[i];
634       for (j=i;j<local_rows;j++) {
635         PetscInt owner = row_ownership[index_row];
636         PetscInt index_col = global_indices_c[j];
637         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
638           my_dnz[i] += 1;
639         } else { /* offdiag block */
640           my_onz[i] += 1;
641         }
642         /* same as before, interchanging rows and cols */
643         if (i != j) {
644           owner = row_ownership[index_col];
645           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
646             my_dnz[j] += 1;
647           } else {
648             my_onz[j] += 1;
649           }
650         }
651       }
652     }
653   } else if (matis->A->ops->getrowij) {
654     const PetscInt *ii,*jj,*jptr;
655     PetscBool      done;
656     ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr);
657     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
658     jptr = jj;
659     for (i=0;i<local_rows;i++) {
660       PetscInt index_row = global_indices_r[i];
661       for (j=0;j<ii[i+1]-ii[i];j++,jptr++) {
662         PetscInt owner = row_ownership[index_row];
663         PetscInt index_col = global_indices_c[*jptr];
664         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
665           my_dnz[i] += 1;
666         } else { /* offdiag block */
667           my_onz[i] += 1;
668         }
669         /* same as before, interchanging rows and cols */
670         if (issbaij && index_col != index_row) {
671           owner = row_ownership[index_col];
672           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
673             my_dnz[*jptr] += 1;
674           } else {
675             my_onz[*jptr] += 1;
676           }
677         }
678       }
679     }
680     ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr);
681     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
682   } else { /* loop over rows and use MatGetRow */
683     for (i=0;i<local_rows;i++) {
684       const PetscInt *cols;
685       PetscInt       ncols,index_row = global_indices_r[i];
686       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
687       for (j=0;j<ncols;j++) {
688         PetscInt owner = row_ownership[index_row];
689         PetscInt index_col = global_indices_c[cols[j]];
690         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
691           my_dnz[i] += 1;
692         } else { /* offdiag block */
693           my_onz[i] += 1;
694         }
695         /* same as before, interchanging rows and cols */
696         if (issbaij && index_col != index_row) {
697           owner = row_ownership[index_col];
698           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
699             my_dnz[cols[j]] += 1;
700           } else {
701             my_onz[cols[j]] += 1;
702           }
703         }
704       }
705       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
706     }
707   }
708   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
709   if (global_indices_c != global_indices_r) {
710     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
711   }
712   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
713 
714   /* Reduce my_dnz and my_onz */
715   if (maxreduce) {
716     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
717     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
718     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
719     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
720   } else {
721     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
722     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
723     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
724     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
725   }
726   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
727 
728   /* Resize preallocation if overestimated */
729   for (i=0;i<lrows;i++) {
730     dnz[i] = PetscMin(dnz[i],lcols);
731     onz[i] = PetscMin(onz[i],cols-lcols);
732   }
733   /* set preallocation */
734   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
735   for (i=0;i<lrows/bs;i++) {
736     dnz[i] = dnz[i*bs]/bs;
737     onz[i] = onz[i*bs]/bs;
738   }
739   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
740   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
741   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
742   if (issbaij) {
743     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
744   }
745   PetscFunctionReturn(0);
746 }
747 
748 #undef __FUNCT__
749 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
750 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
751 {
752   Mat_IS         *matis = (Mat_IS*)(mat->data);
753   Mat            local_mat;
754   /* info on mat */
755   PetscInt       bs,rows,cols,lrows,lcols;
756   PetscInt       local_rows,local_cols;
757   PetscBool      isdense,issbaij,isseqaij;
758   PetscMPIInt    nsubdomains;
759   /* values insertion */
760   PetscScalar    *array;
761   /* work */
762   PetscErrorCode ierr;
763 
764   PetscFunctionBegin;
765   /* get info from mat */
766   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
767   if (nsubdomains == 1) {
768     if (reuse == MAT_INITIAL_MATRIX) {
769       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
770     } else {
771       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
772     }
773     PetscFunctionReturn(0);
774   }
775   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
776   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
777   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
778   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
779   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
780   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
781   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
782 
783   if (reuse == MAT_INITIAL_MATRIX) {
784     MatType     new_mat_type;
785     PetscBool   issbaij_red;
786 
787     /* determining new matrix type */
788     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
789     if (issbaij_red) {
790       new_mat_type = MATSBAIJ;
791     } else {
792       if (bs>1) {
793         new_mat_type = MATBAIJ;
794       } else {
795         new_mat_type = MATAIJ;
796       }
797     }
798 
799     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
800     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
801     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
802     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
803     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
804   } else {
805     PetscInt mbs,mrows,mcols,mlrows,mlcols;
806     /* some checks */
807     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
808     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
809     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
810     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
811     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
812     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
813     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
814     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
815     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
816   }
817 
818   if (issbaij) {
819     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
820   } else {
821     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
822     local_mat = matis->A;
823   }
824 
825   /* Set values */
826   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
827   if (isdense) { /* special case for dense local matrices */
828     PetscInt i,*dummy_rows,*dummy_cols;
829 
830     if (local_rows != local_cols) {
831       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
832       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
833       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
834     } else {
835       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
836       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
837       dummy_cols = dummy_rows;
838     }
839     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
840     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
841     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
842     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
843     if (dummy_rows != dummy_cols) {
844       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
845     } else {
846       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
847     }
848   } else if (isseqaij) {
849     PetscInt  i,nvtxs,*xadj,*adjncy;
850     PetscBool done;
851 
852     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
853     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
854     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
855     for (i=0;i<nvtxs;i++) {
856       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
857     }
858     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
859     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
860     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
861   } else { /* very basic values insertion for all other matrix types */
862     PetscInt i;
863 
864     for (i=0;i<local_rows;i++) {
865       PetscInt       j;
866       const PetscInt *local_indices_cols;
867 
868       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
869       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
870       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
871     }
872   }
873   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
874   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
875   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
876   if (isdense) {
877     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
878   }
879   PetscFunctionReturn(0);
880 }
881 
882 #undef __FUNCT__
883 #define __FUNCT__ "MatISGetMPIXAIJ"
884 /*@
885     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
886 
887   Input Parameter:
888 .  mat - the matrix (should be of type MATIS)
889 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
890 
891   Output Parameter:
892 .  newmat - the matrix in AIJ format
893 
894   Level: developer
895 
896   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
897 
898 .seealso: MATIS
899 @*/
900 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
901 {
902   PetscErrorCode ierr;
903 
904   PetscFunctionBegin;
905   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
906   PetscValidLogicalCollectiveEnum(mat,reuse,2);
907   PetscValidPointer(newmat,3);
908   if (reuse != MAT_INITIAL_MATRIX) {
909     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
910     PetscCheckSameComm(mat,1,*newmat,3);
911     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
912   }
913   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
914   PetscFunctionReturn(0);
915 }
916 
917 #undef __FUNCT__
918 #define __FUNCT__ "MatDuplicate_IS"
919 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
920 {
921   PetscErrorCode ierr;
922   Mat_IS         *matis = (Mat_IS*)(mat->data);
923   PetscInt       bs,m,n,M,N;
924   Mat            B,localmat;
925 
926   PetscFunctionBegin;
927   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
928   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
929   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
930   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
931   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
932   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
933   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
934   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
935   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
936   *newmat = B;
937   PetscFunctionReturn(0);
938 }
939 
940 #undef __FUNCT__
941 #define __FUNCT__ "MatIsHermitian_IS"
942 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
943 {
944   PetscErrorCode ierr;
945   Mat_IS         *matis = (Mat_IS*)A->data;
946   PetscBool      local_sym;
947 
948   PetscFunctionBegin;
949   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
950   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
951   PetscFunctionReturn(0);
952 }
953 
954 #undef __FUNCT__
955 #define __FUNCT__ "MatIsSymmetric_IS"
956 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
957 {
958   PetscErrorCode ierr;
959   Mat_IS         *matis = (Mat_IS*)A->data;
960   PetscBool      local_sym;
961 
962   PetscFunctionBegin;
963   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
964   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
965   PetscFunctionReturn(0);
966 }
967 
968 #undef __FUNCT__
969 #define __FUNCT__ "MatDestroy_IS"
970 static PetscErrorCode MatDestroy_IS(Mat A)
971 {
972   PetscErrorCode ierr;
973   Mat_IS         *b = (Mat_IS*)A->data;
974 
975   PetscFunctionBegin;
976   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
977   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
978   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
979   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
980   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
981   ierr = VecDestroy(&b->counter);CHKERRQ(ierr);
982   ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr);
983   ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr);
984   if (b->sf != b->csf) {
985     ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr);
986     ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr);
987   }
988   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
989   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
990   ierr = PetscFree(A->data);CHKERRQ(ierr);
991   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
992   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
993   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
994   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
995   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
996   PetscFunctionReturn(0);
997 }
998 
999 #undef __FUNCT__
1000 #define __FUNCT__ "MatMult_IS"
1001 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
1002 {
1003   PetscErrorCode ierr;
1004   Mat_IS         *is  = (Mat_IS*)A->data;
1005   PetscScalar    zero = 0.0;
1006 
1007   PetscFunctionBegin;
1008   /*  scatter the global vector x into the local work vector */
1009   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1010   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1011 
1012   /* multiply the local matrix */
1013   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
1014 
1015   /* scatter product back into global memory */
1016   ierr = VecSet(y,zero);CHKERRQ(ierr);
1017   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1018   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1019   PetscFunctionReturn(0);
1020 }
1021 
1022 #undef __FUNCT__
1023 #define __FUNCT__ "MatMultAdd_IS"
1024 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
1025 {
1026   Vec            temp_vec;
1027   PetscErrorCode ierr;
1028 
1029   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
1030   if (v3 != v2) {
1031     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
1032     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
1033   } else {
1034     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
1035     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
1036     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
1037     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
1038     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
1039   }
1040   PetscFunctionReturn(0);
1041 }
1042 
1043 #undef __FUNCT__
1044 #define __FUNCT__ "MatMultTranspose_IS"
1045 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
1046 {
1047   Mat_IS         *is = (Mat_IS*)A->data;
1048   PetscErrorCode ierr;
1049 
1050   PetscFunctionBegin;
1051   /*  scatter the global vector x into the local work vector */
1052   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1053   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1054 
1055   /* multiply the local matrix */
1056   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
1057 
1058   /* scatter product back into global vector */
1059   ierr = VecSet(x,0);CHKERRQ(ierr);
1060   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1061   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1062   PetscFunctionReturn(0);
1063 }
1064 
1065 #undef __FUNCT__
1066 #define __FUNCT__ "MatMultTransposeAdd_IS"
1067 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
1068 {
1069   Vec            temp_vec;
1070   PetscErrorCode ierr;
1071 
1072   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
1073   if (v3 != v2) {
1074     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
1075     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
1076   } else {
1077     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
1078     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
1079     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
1080     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
1081     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
1082   }
1083   PetscFunctionReturn(0);
1084 }
1085 
1086 #undef __FUNCT__
1087 #define __FUNCT__ "MatView_IS"
1088 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
1089 {
1090   Mat_IS         *a = (Mat_IS*)A->data;
1091   PetscErrorCode ierr;
1092   PetscViewer    sviewer;
1093 
1094   PetscFunctionBegin;
1095   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1096   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
1097   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1098   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1099   PetscFunctionReturn(0);
1100 }
1101 
1102 #undef __FUNCT__
1103 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
1104 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
1105 {
1106   PetscErrorCode ierr;
1107   PetscInt       nr,rbs,nc,cbs;
1108   Mat_IS         *is = (Mat_IS*)A->data;
1109   IS             from,to;
1110   Vec            cglobal,rglobal;
1111 
1112   PetscFunctionBegin;
1113   PetscCheckSameComm(A,1,rmapping,2);
1114   PetscCheckSameComm(A,1,cmapping,3);
1115   /* Destroy any previous data */
1116   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
1117   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
1118   ierr = VecDestroy(&is->counter);CHKERRQ(ierr);
1119   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
1120   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
1121   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
1122   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
1123   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
1124 
1125   /* Setup Layout and set local to global maps */
1126   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1127   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1128   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
1129   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
1130 
1131   /* Create the local matrix A */
1132   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
1133   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
1134   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
1135   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
1136   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
1137   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
1138   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
1139   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
1140   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
1141   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
1142   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
1143 
1144   if (!is->islocalref) { /* setup scatters and local vectors for MatMult */
1145     /* Create the local work vectors */
1146     ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
1147 
1148     /* setup the global to local scatters */
1149     ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
1150     ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
1151     ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
1152     ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
1153     if (rmapping != cmapping) {
1154       ierr = ISDestroy(&to);CHKERRQ(ierr);
1155       ierr = ISDestroy(&from);CHKERRQ(ierr);
1156       ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
1157       ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
1158       ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
1159     } else {
1160       ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
1161       is->cctx = is->rctx;
1162     }
1163 
1164     /* interface counter vector (local) */
1165     ierr = VecDuplicate(is->y,&is->counter);CHKERRQ(ierr);
1166     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
1167     ierr = VecScatterBegin(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1168     ierr = VecScatterEnd(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1169     ierr = VecScatterBegin(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1170     ierr = VecScatterEnd(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1171 
1172     /* free workspace */
1173     ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
1174     ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
1175     ierr = ISDestroy(&to);CHKERRQ(ierr);
1176     ierr = ISDestroy(&from);CHKERRQ(ierr);
1177   }
1178   ierr = MatSetUp(A);CHKERRQ(ierr);
1179   PetscFunctionReturn(0);
1180 }
1181 
1182 #undef __FUNCT__
1183 #define __FUNCT__ "MatSetValues_IS"
1184 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1185 {
1186   Mat_IS         *is = (Mat_IS*)mat->data;
1187   PetscErrorCode ierr;
1188 #if defined(PETSC_USE_DEBUG)
1189   PetscInt       i,zm,zn;
1190 #endif
1191   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1192 
1193   PetscFunctionBegin;
1194 #if defined(PETSC_USE_DEBUG)
1195   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1196   /* count negative indices */
1197   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1198   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1199 #endif
1200   ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1201   ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1202 #if defined(PETSC_USE_DEBUG)
1203   /* count negative indices (should be the same as before) */
1204   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1205   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1206   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1207   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1208 #endif
1209   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1210   PetscFunctionReturn(0);
1211 }
1212 
1213 #undef __FUNCT__
1214 #define __FUNCT__ "MatSetValuesBlocked_IS"
1215 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1216 {
1217   Mat_IS         *is = (Mat_IS*)mat->data;
1218   PetscErrorCode ierr;
1219 #if defined(PETSC_USE_DEBUG)
1220   PetscInt       i,zm,zn;
1221 #endif
1222   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1223 
1224   PetscFunctionBegin;
1225 #if defined(PETSC_USE_DEBUG)
1226   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1227   /* count negative indices */
1228   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1229   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1230 #endif
1231   ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1232   ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1233 #if defined(PETSC_USE_DEBUG)
1234   /* count negative indices (should be the same as before) */
1235   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1236   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1237   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1238   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1239 #endif
1240   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1241   PetscFunctionReturn(0);
1242 }
1243 
1244 #undef __FUNCT__
1245 #define __FUNCT__ "MatSetValuesLocal_IS"
1246 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1247 {
1248   PetscErrorCode ierr;
1249   Mat_IS         *is = (Mat_IS*)A->data;
1250 
1251   PetscFunctionBegin;
1252   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1253   PetscFunctionReturn(0);
1254 }
1255 
1256 #undef __FUNCT__
1257 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
1258 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1259 {
1260   PetscErrorCode ierr;
1261   Mat_IS         *is = (Mat_IS*)A->data;
1262 
1263   PetscFunctionBegin;
1264   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1265   PetscFunctionReturn(0);
1266 }
1267 
1268 #undef __FUNCT__
1269 #define __FUNCT__ "MatISZeroRowsColumnsLocal_Private"
1270 static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,PetscBool columns)
1271 {
1272   Mat_IS         *is = (Mat_IS*)A->data;
1273   PetscErrorCode ierr;
1274 
1275   PetscFunctionBegin;
1276   if (!n) {
1277     is->pure_neumann = PETSC_TRUE;
1278   } else {
1279     PetscInt i;
1280     is->pure_neumann = PETSC_FALSE;
1281 
1282     if (columns) {
1283       ierr = MatZeroRowsColumns(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
1284     } else {
1285       ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
1286     }
1287     if (diag != 0.) {
1288       const PetscScalar *array;
1289       ierr = VecGetArrayRead(is->counter,&array);CHKERRQ(ierr);
1290       for (i=0; i<n; i++) {
1291         ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
1292       }
1293       ierr = VecRestoreArrayRead(is->counter,&array);CHKERRQ(ierr);
1294     }
1295     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1296     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1297   }
1298   PetscFunctionReturn(0);
1299 }
1300 
1301 #undef __FUNCT__
1302 #define __FUNCT__ "MatZeroRowsColumns_Private_IS"
1303 static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b,PetscBool columns)
1304 {
1305   Mat_IS         *matis = (Mat_IS*)A->data;
1306   PetscInt       nr,nl,len,i;
1307   PetscInt       *lrows;
1308   PetscErrorCode ierr;
1309 
1310   PetscFunctionBegin;
1311 #if defined(PETSC_USE_DEBUG)
1312   if (columns || diag != 0. || (x && b)) {
1313     PetscBool cong;
1314     ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr);
1315     if (!cong && columns) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Columns can be zeroed if and only if A->rmap and A->cmap are congruent for MATIS");
1316     if (!cong && diag != 0.) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent for MATIS");
1317     if (!cong && x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"A->rmap and A->cmap need to be congruent");
1318   }
1319 #endif
1320   /* get locally owned rows */
1321   ierr = PetscLayoutMapLocal_Private(A->rmap,n,rows,&len,&lrows,NULL);CHKERRQ(ierr);
1322   /* fix right hand side if needed */
1323   if (x && b) {
1324     const PetscScalar *xx;
1325     PetscScalar       *bb;
1326 
1327     ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr);
1328     ierr = VecGetArray(b, &bb);CHKERRQ(ierr);
1329     for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]];
1330     ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr);
1331     ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr);
1332   }
1333   /* get rows associated to the local matrices */
1334   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
1335     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
1336   }
1337   ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr);
1338   ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr);
1339   ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
1340   for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1;
1341   ierr = PetscFree(lrows);CHKERRQ(ierr);
1342   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1343   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1344   ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr);
1345   for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i;
1346   ierr = MatISZeroRowsColumnsLocal_Private(A,nr,lrows,diag,columns);CHKERRQ(ierr);
1347   ierr = PetscFree(lrows);CHKERRQ(ierr);
1348   PetscFunctionReturn(0);
1349 }
1350 
1351 #undef __FUNCT__
1352 #define __FUNCT__ "MatZeroRows_IS"
1353 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1354 {
1355   PetscErrorCode ierr;
1356 
1357   PetscFunctionBegin;
1358   ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_FALSE);CHKERRQ(ierr);
1359   PetscFunctionReturn(0);
1360 }
1361 
1362 #undef __FUNCT__
1363 #define __FUNCT__ "MatZeroRowsColumns_IS"
1364 static PetscErrorCode MatZeroRowsColumns_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1365 {
1366   PetscErrorCode ierr;
1367 
1368   PetscFunctionBegin;
1369   ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_TRUE);CHKERRQ(ierr);
1370   PetscFunctionReturn(0);
1371 }
1372 
1373 #undef __FUNCT__
1374 #define __FUNCT__ "MatAssemblyBegin_IS"
1375 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
1376 {
1377   Mat_IS         *is = (Mat_IS*)A->data;
1378   PetscErrorCode ierr;
1379 
1380   PetscFunctionBegin;
1381   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
1382   PetscFunctionReturn(0);
1383 }
1384 
1385 #undef __FUNCT__
1386 #define __FUNCT__ "MatAssemblyEnd_IS"
1387 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
1388 {
1389   Mat_IS         *is = (Mat_IS*)A->data;
1390   PetscErrorCode ierr;
1391 
1392   PetscFunctionBegin;
1393   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
1394   PetscFunctionReturn(0);
1395 }
1396 
1397 #undef __FUNCT__
1398 #define __FUNCT__ "MatISGetLocalMat_IS"
1399 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
1400 {
1401   Mat_IS *is = (Mat_IS*)mat->data;
1402 
1403   PetscFunctionBegin;
1404   *local = is->A;
1405   PetscFunctionReturn(0);
1406 }
1407 
1408 #undef __FUNCT__
1409 #define __FUNCT__ "MatISGetLocalMat"
1410 /*@
1411     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
1412 
1413   Input Parameter:
1414 .  mat - the matrix
1415 
1416   Output Parameter:
1417 .  local - the local matrix
1418 
1419   Level: advanced
1420 
1421   Notes:
1422     This can be called if you have precomputed the nonzero structure of the
1423   matrix and want to provide it to the inner matrix object to improve the performance
1424   of the MatSetValues() operation.
1425 
1426 .seealso: MATIS
1427 @*/
1428 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
1429 {
1430   PetscErrorCode ierr;
1431 
1432   PetscFunctionBegin;
1433   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1434   PetscValidPointer(local,2);
1435   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
1436   PetscFunctionReturn(0);
1437 }
1438 
1439 #undef __FUNCT__
1440 #define __FUNCT__ "MatISSetLocalMat_IS"
1441 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
1442 {
1443   Mat_IS         *is = (Mat_IS*)mat->data;
1444   PetscInt       nrows,ncols,orows,ocols;
1445   PetscErrorCode ierr;
1446 
1447   PetscFunctionBegin;
1448   if (is->A) {
1449     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
1450     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
1451     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %Dx%D (you passed a %Dx%D matrix)",orows,ocols,nrows,ncols);
1452   }
1453   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
1454   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
1455   is->A = local;
1456   PetscFunctionReturn(0);
1457 }
1458 
1459 #undef __FUNCT__
1460 #define __FUNCT__ "MatISSetLocalMat"
1461 /*@
1462     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
1463 
1464   Input Parameter:
1465 .  mat - the matrix
1466 .  local - the local matrix
1467 
1468   Output Parameter:
1469 
1470   Level: advanced
1471 
1472   Notes:
1473     This can be called if you have precomputed the local matrix and
1474   want to provide it to the matrix object MATIS.
1475 
1476 .seealso: MATIS
1477 @*/
1478 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
1479 {
1480   PetscErrorCode ierr;
1481 
1482   PetscFunctionBegin;
1483   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1484   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
1485   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
1486   PetscFunctionReturn(0);
1487 }
1488 
1489 #undef __FUNCT__
1490 #define __FUNCT__ "MatZeroEntries_IS"
1491 static PetscErrorCode MatZeroEntries_IS(Mat A)
1492 {
1493   Mat_IS         *a = (Mat_IS*)A->data;
1494   PetscErrorCode ierr;
1495 
1496   PetscFunctionBegin;
1497   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
1498   PetscFunctionReturn(0);
1499 }
1500 
1501 #undef __FUNCT__
1502 #define __FUNCT__ "MatScale_IS"
1503 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
1504 {
1505   Mat_IS         *is = (Mat_IS*)A->data;
1506   PetscErrorCode ierr;
1507 
1508   PetscFunctionBegin;
1509   ierr = MatScale(is->A,a);CHKERRQ(ierr);
1510   PetscFunctionReturn(0);
1511 }
1512 
1513 #undef __FUNCT__
1514 #define __FUNCT__ "MatGetDiagonal_IS"
1515 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
1516 {
1517   Mat_IS         *is = (Mat_IS*)A->data;
1518   PetscErrorCode ierr;
1519 
1520   PetscFunctionBegin;
1521   /* get diagonal of the local matrix */
1522   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
1523 
1524   /* scatter diagonal back into global vector */
1525   ierr = VecSet(v,0);CHKERRQ(ierr);
1526   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1527   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1528   PetscFunctionReturn(0);
1529 }
1530 
1531 #undef __FUNCT__
1532 #define __FUNCT__ "MatSetOption_IS"
1533 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
1534 {
1535   Mat_IS         *a = (Mat_IS*)A->data;
1536   PetscErrorCode ierr;
1537 
1538   PetscFunctionBegin;
1539   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1540   PetscFunctionReturn(0);
1541 }
1542 
1543 #undef __FUNCT__
1544 #define __FUNCT__ "MatAXPY_IS"
1545 static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str)
1546 {
1547   Mat_IS         *y = (Mat_IS*)Y->data;
1548   Mat_IS         *x;
1549 #if defined(PETSC_USE_DEBUG)
1550   PetscBool      ismatis;
1551 #endif
1552   PetscErrorCode ierr;
1553 
1554   PetscFunctionBegin;
1555 #if defined(PETSC_USE_DEBUG)
1556   ierr = PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);CHKERRQ(ierr);
1557   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS");
1558 #endif
1559   x = (Mat_IS*)X->data;
1560   ierr = MatAXPY(y->A,a,x->A,str);CHKERRQ(ierr);
1561   PetscFunctionReturn(0);
1562 }
1563 
1564 #undef __FUNCT__
1565 #define __FUNCT__ "MatGetLocalSubMatrix_IS"
1566 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat)
1567 {
1568   Mat                    lA;
1569   Mat_IS                 *matis;
1570   ISLocalToGlobalMapping rl2g,cl2g;
1571   IS                     is;
1572   const PetscInt         *rg,*rl;
1573   PetscInt               nrg;
1574   PetscInt               N,M,nrl,i,*idxs;
1575   PetscErrorCode         ierr;
1576 
1577   PetscFunctionBegin;
1578   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
1579   ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr);
1580   ierr = ISGetIndices(row,&rl);CHKERRQ(ierr);
1581   ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr);
1582 #if defined(PETSC_USE_DEBUG)
1583   for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %D -> %D greater than maximum possible %D",i,rl[i],nrg);
1584 #endif
1585   ierr = PetscMalloc1(nrg,&idxs);CHKERRQ(ierr);
1586   /* map from [0,nrl) to row */
1587   for (i=0;i<nrl;i++) idxs[i] = rl[i];
1588 #if defined(PETSC_USE_DEBUG)
1589   for (i=nrl;i<nrg;i++) idxs[i] = nrg;
1590 #else
1591   for (i=nrl;i<nrg;i++) idxs[i] = -1;
1592 #endif
1593   ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr);
1594   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
1595   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
1596   ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
1597   ierr = ISDestroy(&is);CHKERRQ(ierr);
1598   /* compute new l2g map for columns */
1599   if (col != row || A->rmap->mapping != A->cmap->mapping) {
1600     const PetscInt *cg,*cl;
1601     PetscInt       ncg;
1602     PetscInt       ncl;
1603 
1604     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
1605     ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr);
1606     ierr = ISGetIndices(col,&cl);CHKERRQ(ierr);
1607     ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr);
1608 #if defined(PETSC_USE_DEBUG)
1609     for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %D -> %D greater than maximum possible %D",i,cl[i],ncg);
1610 #endif
1611     ierr = PetscMalloc1(ncg,&idxs);CHKERRQ(ierr);
1612     /* map from [0,ncl) to col */
1613     for (i=0;i<ncl;i++) idxs[i] = cl[i];
1614 #if defined(PETSC_USE_DEBUG)
1615     for (i=ncl;i<ncg;i++) idxs[i] = ncg;
1616 #else
1617     for (i=ncl;i<ncg;i++) idxs[i] = -1;
1618 #endif
1619     ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr);
1620     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
1621     ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
1622     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
1623     ierr = ISDestroy(&is);CHKERRQ(ierr);
1624   } else {
1625     ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr);
1626     cl2g = rl2g;
1627   }
1628   /* create the MATIS submatrix */
1629   ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
1630   ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr);
1631   ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1632   ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr);
1633   matis = (Mat_IS*)((*submat)->data);
1634   matis->islocalref = PETSC_TRUE;
1635   ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr);
1636   ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr);
1637   ierr = MatISSetLocalMat(*submat,lA);CHKERRQ(ierr);
1638   ierr = MatSetUp(*submat);CHKERRQ(ierr);
1639   ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1640   ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1641   ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
1642   ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
1643   /* remove unsupported ops */
1644   ierr = PetscMemzero((*submat)->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1645   (*submat)->ops->destroy               = MatDestroy_IS;
1646   (*submat)->ops->setvalueslocal        = MatSetValuesLocal_SubMat_IS;
1647   (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS;
1648   (*submat)->ops->assemblybegin         = MatAssemblyBegin_IS;
1649   (*submat)->ops->assemblyend           = MatAssemblyEnd_IS;
1650   PetscFunctionReturn(0);
1651 }
1652 
1653 #undef __FUNCT__
1654 #define __FUNCT__ "MatCreateIS"
1655 /*@
1656     MatCreateIS - Creates a "process" unassembled matrix, assembled on each
1657        process but not across processes.
1658 
1659    Input Parameters:
1660 +     comm    - MPI communicator that will share the matrix
1661 .     bs      - block size of the matrix
1662 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
1663 .     rmap    - local to global map for rows
1664 -     cmap    - local to global map for cols
1665 
1666    Output Parameter:
1667 .    A - the resulting matrix
1668 
1669    Level: advanced
1670 
1671    Notes: See MATIS for more details.
1672           m and n are NOT related to the size of the map; they represent the size of the local parts of the vectors
1673           used in MatMult operations. The sizes of rmap and cmap define the size of the local matrices.
1674           If either rmap or cmap are NULL, then the matrix is assumed to be square.
1675 
1676 .seealso: MATIS, MatSetLocalToGlobalMapping()
1677 @*/
1678 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1679 {
1680   PetscErrorCode ierr;
1681 
1682   PetscFunctionBegin;
1683   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mappings");
1684   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1685   if (bs > 0) {
1686     ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1687   }
1688   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1689   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1690   if (rmap && cmap) {
1691     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1692   } else if (!rmap) {
1693     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1694   } else {
1695     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1696   }
1697   PetscFunctionReturn(0);
1698 }
1699 
1700 /*MC
1701    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP).
1702    This stores the matrices in globally unassembled form. Each processor
1703    assembles only its local Neumann problem and the parallel matrix vector
1704    product is handled "implicitly".
1705 
1706    Operations Provided:
1707 +  MatMult()
1708 .  MatMultAdd()
1709 .  MatMultTranspose()
1710 .  MatMultTransposeAdd()
1711 .  MatZeroEntries()
1712 .  MatSetOption()
1713 .  MatZeroRows()
1714 .  MatSetValues()
1715 .  MatSetValuesBlocked()
1716 .  MatSetValuesLocal()
1717 .  MatSetValuesBlockedLocal()
1718 .  MatScale()
1719 .  MatGetDiagonal()
1720 .  MatMissingDiagonal()
1721 .  MatDuplicate()
1722 .  MatCopy()
1723 .  MatAXPY()
1724 .  MatGetSubMatrix()
1725 .  MatGetLocalSubMatrix()
1726 .  MatTranspose()
1727 -  MatSetLocalToGlobalMapping()
1728 
1729    Options Database Keys:
1730 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1731 
1732    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1733 
1734           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1735 
1736           You can do matrix preallocation on the local matrix after you obtain it with
1737           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1738 
1739   Level: advanced
1740 
1741 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP
1742 
1743 M*/
1744 
1745 #undef __FUNCT__
1746 #define __FUNCT__ "MatCreate_IS"
1747 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1748 {
1749   PetscErrorCode ierr;
1750   Mat_IS         *b;
1751 
1752   PetscFunctionBegin;
1753   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1754   A->data = (void*)b;
1755 
1756   /* matrix ops */
1757   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1758   A->ops->mult                    = MatMult_IS;
1759   A->ops->multadd                 = MatMultAdd_IS;
1760   A->ops->multtranspose           = MatMultTranspose_IS;
1761   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1762   A->ops->destroy                 = MatDestroy_IS;
1763   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1764   A->ops->setvalues               = MatSetValues_IS;
1765   A->ops->setvaluesblocked        = MatSetValuesBlocked_IS;
1766   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1767   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1768   A->ops->zerorows                = MatZeroRows_IS;
1769   A->ops->zerorowscolumns         = MatZeroRowsColumns_IS;
1770   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1771   A->ops->assemblyend             = MatAssemblyEnd_IS;
1772   A->ops->view                    = MatView_IS;
1773   A->ops->zeroentries             = MatZeroEntries_IS;
1774   A->ops->scale                   = MatScale_IS;
1775   A->ops->getdiagonal             = MatGetDiagonal_IS;
1776   A->ops->setoption               = MatSetOption_IS;
1777   A->ops->ishermitian             = MatIsHermitian_IS;
1778   A->ops->issymmetric             = MatIsSymmetric_IS;
1779   A->ops->duplicate               = MatDuplicate_IS;
1780   A->ops->missingdiagonal         = MatMissingDiagonal_IS;
1781   A->ops->copy                    = MatCopy_IS;
1782   A->ops->getlocalsubmatrix       = MatGetLocalSubMatrix_IS;
1783   A->ops->getsubmatrix            = MatGetSubMatrix_IS;
1784   A->ops->axpy                    = MatAXPY_IS;
1785   A->ops->diagonalset             = MatDiagonalSet_IS;
1786   A->ops->shift                   = MatShift_IS;
1787   A->ops->transpose               = MatTranspose_IS;
1788   A->ops->getinfo                 = MatGetInfo_IS;
1789 
1790   /* special MATIS functions */
1791   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1792   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1793   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1794   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1795   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1796   PetscFunctionReturn(0);
1797 }
1798