xref: /petsc/src/mat/impls/is/matis.c (revision 3927de2ed0f0aec79c45dc8add25bfe718efd5fa)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 static PetscErrorCode MatISComputeSF_Private(Mat);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatISComputeSF_Private"
21 static PetscErrorCode MatISComputeSF_Private(Mat B)
22 {
23   Mat_IS         *matis = (Mat_IS*)(B->data);
24   const PetscInt *gidxs;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
29   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
30   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
31   ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&gidxs);CHKERRQ(ierr);
32   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
33   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
34   ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&gidxs);CHKERRQ(ierr);
35   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatISSetPreallocation"
41 /*
42    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
43 
44    Collective on MPI_Comm
45 
46    Input Parameters:
47 +  B - the matrix
48 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
49            (same value is used for all local rows)
50 .  d_nnz - array containing the number of nonzeros in the various rows of the
51            DIAGONAL portion of the local submatrix (possibly different for each row)
52            or NULL, if d_nz is used to specify the nonzero structure.
53            The size of this array is equal to the number of local rows, i.e 'm'.
54            For matrices that will be factored, you must leave room for (and set)
55            the diagonal entry even if it is zero.
56 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
57            submatrix (same value is used for all local rows).
58 -  o_nnz - array containing the number of nonzeros in the various rows of the
59            OFF-DIAGONAL portion of the local submatrix (possibly different for
60            each row) or NULL, if o_nz is used to specify the nonzero
61            structure. The size of this array is equal to the number
62            of local rows, i.e 'm'.
63 
64    If the *_nnz parameter is given then the *_nz parameter is ignored
65 
66    Level: intermediate
67 
68    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
69           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
70           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
71 
72 .keywords: matrix
73 
74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
75 @*/
76 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
77 {
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
82   PetscValidType(B,1);
83   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatISSetPreallocation_IS"
89 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
90 {
91   Mat_IS         *matis = (Mat_IS*)(B->data);
92   PetscInt       bs,i,nlocalcols;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   if (!matis->A) {
97     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
98   }
99   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
100     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
101   }
102   if (!d_nnz) {
103     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
104   } else {
105     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
106   }
107   if (!o_nnz) {
108     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
109   } else {
110     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
111   }
112   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
113   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
114   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
115   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
116   for (i=0;i<matis->sf_nleaves;i++) {
117     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
118   }
119   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
120   for (i=0;i<matis->sf_nleaves/bs;i++) {
121     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
122   }
123   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
124   for (i=0;i<matis->sf_nleaves/bs;i++) {
125     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
126   }
127   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
133 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
134 {
135   Mat_IS          *matis = (Mat_IS*)(A->data);
136   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
137   const PetscInt* global_indices;
138   PetscInt        i,j,bs,rows,cols;
139   PetscInt        lrows,lcols;
140   PetscInt        local_rows,local_cols;
141   PetscMPIInt     nsubdomains;
142   PetscBool       isdense,issbaij;
143   PetscErrorCode  ierr;
144 
145   PetscFunctionBegin;
146   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
147   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
148   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
149   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
150   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
151   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
152   ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&global_indices);CHKERRQ(ierr);
153   if (issbaij) {
154     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
155   }
156   /*
157      An SF reduce is needed to sum up properly on shared interface dofs.
158      Note that generally preallocation is not exact, since it overestimates nonzeros
159   */
160   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
161     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
162   }
163   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
164   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
165   /* All processes need to compute entire row ownership */
166   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
167   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
168   for (i=0;i<nsubdomains;i++) {
169     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
170       row_ownership[j]=i;
171     }
172   }
173 
174   /*
175      my_dnz and my_onz contains exact contribution to preallocation from each local mat
176      then, they will be summed up properly. This way, preallocation is always sufficient
177   */
178   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
179   /* preallocation as a MATAIJ */
180   if (isdense) { /* special case for dense local matrices */
181     for (i=0;i<local_rows;i++) {
182       PetscInt index_row = global_indices[i];
183       for (j=i;j<local_rows;j++) {
184         PetscInt owner = row_ownership[index_row];
185         PetscInt index_col = global_indices[j];
186         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
187           my_dnz[i] += 1;
188         } else { /* offdiag block */
189           my_onz[i] += 1;
190         }
191         /* same as before, interchanging rows and cols */
192         if (i != j) {
193           owner = row_ownership[index_col];
194           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
195             my_dnz[j] += 1;
196           } else {
197             my_onz[j] += 1;
198           }
199         }
200       }
201     }
202   } else {
203     for (i=0;i<local_rows;i++) {
204       const PetscInt *cols;
205       PetscInt       ncols,index_row = global_indices[i];
206       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
207       for (j=0;j<ncols;j++) {
208         PetscInt owner = row_ownership[index_row];
209         PetscInt index_col = global_indices[cols[j]];
210         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
211           my_dnz[i] += 1;
212         } else { /* offdiag block */
213           my_onz[i] += 1;
214         }
215         /* same as before, interchanging rows and cols */
216         if (issbaij) {
217           owner = row_ownership[index_col];
218           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
219             my_dnz[j] += 1;
220           } else {
221             my_onz[j] += 1;
222           }
223         }
224       }
225       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
226     }
227   }
228   ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&global_indices);CHKERRQ(ierr);
229   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
230   if (maxreduce) {
231     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
232     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
233     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
234     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
235   } else {
236     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
237     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
238     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
239     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
240   }
241   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
242 
243   /* Resize preallocation if overestimated */
244   for (i=0;i<lrows;i++) {
245     dnz[i] = PetscMin(dnz[i],lcols);
246     onz[i] = PetscMin(onz[i],cols-lcols);
247   }
248   /* set preallocation */
249   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
250   for (i=0;i<lrows/bs;i++) {
251     dnz[i] = dnz[i*bs]/bs;
252     onz[i] = onz[i*bs]/bs;
253   }
254   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
255   for (i=0;i<lrows/bs;i++) {
256     dnz[i] = dnz[i]-i;
257   }
258   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
259   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
260   if (issbaij) {
261     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
262   }
263   PetscFunctionReturn(0);
264 }
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
268 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
269 {
270   Mat_IS                 *matis = (Mat_IS*)(mat->data);
271   /* info on mat */
272   /* ISLocalToGlobalMapping rmapping,cmapping; */
273   PetscInt               bs,rows,cols;
274   PetscInt               local_rows,local_cols;
275   PetscBool              isdense,issbaij;
276   PetscMPIInt            nsubdomains;
277   /* values insertion */
278   PetscScalar            *array;
279   PetscInt               *local_indices,*global_indices;
280   /* work */
281   PetscInt               i,j,index_row;
282   PetscErrorCode         ierr;
283 
284   PetscFunctionBegin;
285   /* MISSING CHECKS
286     - rectangular case not covered (it is not allowed by MATIS)
287   */
288   /* get info from mat */
289   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
290   if (nsubdomains == 1) {
291     if (reuse == MAT_INITIAL_MATRIX) {
292       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
293     } else {
294       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
295     }
296     PetscFunctionReturn(0);
297   }
298   /* ierr = MatGetLocalToGlobalMapping(mat,&rmapping,&cmapping);CHKERRQ(ierr); */
299   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
300   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
301   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
302   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
303   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
304 
305   /* work */
306   ierr = PetscMalloc1(local_rows,&local_indices);CHKERRQ(ierr);
307   for (i=0;i<local_rows;i++) local_indices[i]=i;
308   /* map indices of local mat to global values */
309   ierr = PetscMalloc1(PetscMax(local_cols,local_rows),&global_indices);CHKERRQ(ierr);
310   /* ierr = ISLocalToGlobalMappingApply(rmapping,local_rows,local_indices,global_indices);CHKERRQ(ierr); */
311   ierr = ISLocalToGlobalMappingApply(matis->mapping,local_rows,local_indices,global_indices);CHKERRQ(ierr);
312 
313   if (issbaij) {
314     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
315   }
316 
317   if (reuse == MAT_INITIAL_MATRIX) {
318     MatType     new_mat_type;
319     PetscBool   issbaij_red;
320 
321     /* determining new matrix type */
322     ierr = MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
323     if (issbaij_red) {
324       new_mat_type = MATSBAIJ;
325     } else {
326       if (bs>1) {
327         new_mat_type = MATBAIJ;
328       } else {
329         new_mat_type = MATAIJ;
330       }
331     }
332 
333     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
334     ierr = MatSetSizes(*M,PETSC_DECIDE,PETSC_DECIDE,rows,cols);CHKERRQ(ierr);
335     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
336     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
337     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
338   } else {
339     PetscInt mbs,mrows,mcols;
340     /* some checks */
341     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
342     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
343     if (mrows != rows) {
344       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
345     }
346     if (mrows != rows) {
347       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
348     }
349     if (mbs != bs) {
350       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
351     }
352     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
353   }
354   /* set local to global mappings */
355   /* ierr = MatSetLocalToGlobalMapping(*M,rmapping,cmapping);CHKERRQ(ierr); */
356   /* Set values */
357   if (isdense) { /* special case for dense local matrices */
358     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
359     ierr = MatDenseGetArray(matis->A,&array);CHKERRQ(ierr);
360     ierr = MatSetValues(*M,local_rows,global_indices,local_cols,global_indices,array,ADD_VALUES);CHKERRQ(ierr);
361     ierr = MatDenseRestoreArray(matis->A,&array);CHKERRQ(ierr);
362     ierr = PetscFree(local_indices);CHKERRQ(ierr);
363     ierr = PetscFree(global_indices);CHKERRQ(ierr);
364   } else { /* very basic values insertion for all other matrix types */
365     ierr = PetscFree(local_indices);CHKERRQ(ierr);
366     for (i=0;i<local_rows;i++) {
367       ierr = MatGetRow(matis->A,i,&j,(const PetscInt**)&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr);
368       /* ierr = MatSetValuesLocal(*M,1,&i,j,local_indices,array,ADD_VALUES);CHKERRQ(ierr); */
369       ierr = ISLocalToGlobalMappingApply(matis->mapping,j,local_indices,global_indices);CHKERRQ(ierr);
370       ierr = ISLocalToGlobalMappingApply(matis->mapping,1,&i,&index_row);CHKERRQ(ierr);
371       ierr = MatSetValues(*M,1,&index_row,j,global_indices,array,ADD_VALUES);CHKERRQ(ierr);
372       ierr = MatRestoreRow(matis->A,i,&j,(const PetscInt**)&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr);
373     }
374     ierr = PetscFree(global_indices);CHKERRQ(ierr);
375   }
376   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
377   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
378   if (isdense) {
379     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
380   }
381   if (issbaij) {
382     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
383   }
384   PetscFunctionReturn(0);
385 }
386 
387 #undef __FUNCT__
388 #define __FUNCT__ "MatISGetMPIXAIJ"
389 /*@
390     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
391 
392   Input Parameter:
393 .  mat - the matrix (should be of type MATIS)
394 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
395 
396   Output Parameter:
397 .  newmat - the matrix in AIJ format
398 
399   Level: developer
400 
401   Notes:
402 
403 .seealso: MATIS
404 @*/
405 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
406 {
407   PetscErrorCode ierr;
408 
409   PetscFunctionBegin;
410   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
411   PetscValidLogicalCollectiveEnum(mat,reuse,2);
412   PetscValidPointer(newmat,3);
413   if (reuse != MAT_INITIAL_MATRIX) {
414     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
415     PetscCheckSameComm(mat,1,*newmat,3);
416   }
417   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
418   PetscFunctionReturn(0);
419 }
420 
421 #undef __FUNCT__
422 #define __FUNCT__ "MatDuplicate_IS"
423 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
424 {
425   PetscErrorCode ierr;
426   Mat_IS         *matis = (Mat_IS*)(mat->data);
427   PetscInt       bs,m,n,M,N;
428   Mat            B,localmat;
429 
430   PetscFunctionBegin;
431   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
432   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
433   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
434   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,matis->mapping,&B);CHKERRQ(ierr);
435   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
436   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
437   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
438   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
439   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
440   *newmat = B;
441   PetscFunctionReturn(0);
442 }
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "MatIsHermitian_IS"
446 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
447 {
448   PetscErrorCode ierr;
449   Mat_IS         *matis = (Mat_IS*)A->data;
450   PetscBool      local_sym;
451 
452   PetscFunctionBegin;
453   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
454   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
455   PetscFunctionReturn(0);
456 }
457 
458 #undef __FUNCT__
459 #define __FUNCT__ "MatIsSymmetric_IS"
460 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
461 {
462   PetscErrorCode ierr;
463   Mat_IS         *matis = (Mat_IS*)A->data;
464   PetscBool      local_sym;
465 
466   PetscFunctionBegin;
467   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
468   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
469   PetscFunctionReturn(0);
470 }
471 
472 #undef __FUNCT__
473 #define __FUNCT__ "MatDestroy_IS"
474 PetscErrorCode MatDestroy_IS(Mat A)
475 {
476   PetscErrorCode ierr;
477   Mat_IS         *b = (Mat_IS*)A->data;
478 
479   PetscFunctionBegin;
480   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
481   ierr = VecScatterDestroy(&b->ctx);CHKERRQ(ierr);
482   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
483   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
484   ierr = ISLocalToGlobalMappingDestroy(&b->mapping);CHKERRQ(ierr);
485   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
486   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
487   ierr = PetscFree(A->data);CHKERRQ(ierr);
488   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
489   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
490   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
491   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
492   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
493   PetscFunctionReturn(0);
494 }
495 
496 #undef __FUNCT__
497 #define __FUNCT__ "MatMult_IS"
498 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
499 {
500   PetscErrorCode ierr;
501   Mat_IS         *is  = (Mat_IS*)A->data;
502   PetscScalar    zero = 0.0;
503 
504   PetscFunctionBegin;
505   /*  scatter the global vector x into the local work vector */
506   ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
507   ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
508 
509   /* multiply the local matrix */
510   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
511 
512   /* scatter product back into global memory */
513   ierr = VecSet(y,zero);CHKERRQ(ierr);
514   ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
515   ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
516   PetscFunctionReturn(0);
517 }
518 
519 #undef __FUNCT__
520 #define __FUNCT__ "MatMultAdd_IS"
521 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
522 {
523   Vec            temp_vec;
524   PetscErrorCode ierr;
525 
526   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
527   if (v3 != v2) {
528     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
529     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
530   } else {
531     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
532     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
533     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
534     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
535     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
536   }
537   PetscFunctionReturn(0);
538 }
539 
540 #undef __FUNCT__
541 #define __FUNCT__ "MatMultTranspose_IS"
542 PetscErrorCode MatMultTranspose_IS(Mat A,Vec x,Vec y)
543 {
544   Mat_IS         *is = (Mat_IS*)A->data;
545   PetscErrorCode ierr;
546 
547   PetscFunctionBegin; /*  y = A' * x */
548   /*  scatter the global vector x into the local work vector */
549   ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
550   ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
551 
552   /* multiply the local matrix */
553   ierr = MatMultTranspose(is->A,is->x,is->y);CHKERRQ(ierr);
554 
555   /* scatter product back into global vector */
556   ierr = VecSet(y,0);CHKERRQ(ierr);
557   ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
558   ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
559   PetscFunctionReturn(0);
560 }
561 
562 #undef __FUNCT__
563 #define __FUNCT__ "MatMultTransposeAdd_IS"
564 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
565 {
566   Vec            temp_vec;
567   PetscErrorCode ierr;
568 
569   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
570   if (v3 != v2) {
571     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
572     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
573   } else {
574     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
575     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
576     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
577     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
578     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
579   }
580   PetscFunctionReturn(0);
581 }
582 
583 #undef __FUNCT__
584 #define __FUNCT__ "MatView_IS"
585 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
586 {
587   Mat_IS         *a = (Mat_IS*)A->data;
588   PetscErrorCode ierr;
589   PetscViewer    sviewer;
590 
591   PetscFunctionBegin;
592   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
593   ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
594   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
595   ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
596   PetscFunctionReturn(0);
597 }
598 
599 #undef __FUNCT__
600 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
601 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
602 {
603   PetscErrorCode ierr;
604   PetscInt       n,bs;
605   Mat_IS         *is = (Mat_IS*)A->data;
606   IS             from,to;
607   Vec            global;
608 
609   PetscFunctionBegin;
610   PetscCheckSameComm(A,1,rmapping,2);
611   if (rmapping != cmapping) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MATIS requires the row and column mappings to be identical");
612   if (is->mapping) { /* Currenly destroys the objects that will be created by this routine. Is there anything else that should be checked? */
613     ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr);
614     ierr = VecDestroy(&is->x);CHKERRQ(ierr);
615     ierr = VecDestroy(&is->y);CHKERRQ(ierr);
616     ierr = VecScatterDestroy(&is->ctx);CHKERRQ(ierr);
617     ierr = MatDestroy(&is->A);CHKERRQ(ierr);
618     ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
619     ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
620   }
621   ierr = PetscObjectReference((PetscObject)rmapping);CHKERRQ(ierr);
622   ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr);
623   is->mapping = rmapping;
624 /*
625   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
626   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
627 */
628 
629   /* Create the local matrix A */
630   ierr = ISLocalToGlobalMappingGetSize(rmapping,&n);CHKERRQ(ierr);
631   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&bs);CHKERRQ(ierr);
632   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
633   if (bs > 1) {
634     ierr = MatSetType(is->A,MATSEQBAIJ);CHKERRQ(ierr);
635   } else {
636     ierr = MatSetType(is->A,MATSEQAIJ);CHKERRQ(ierr);
637   }
638   ierr = MatSetSizes(is->A,n,n,n,n);CHKERRQ(ierr);
639   ierr = MatSetBlockSize(is->A,bs);CHKERRQ(ierr);
640   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
641   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
642   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
643 
644   /* Create the local work vectors */
645   ierr = VecCreate(PETSC_COMM_SELF,&is->x);CHKERRQ(ierr);
646   ierr = VecSetBlockSize(is->x,bs);CHKERRQ(ierr);
647   ierr = VecSetSizes(is->x,n,n);CHKERRQ(ierr);
648   ierr = VecSetOptionsPrefix(is->x,((PetscObject)A)->prefix);CHKERRQ(ierr);
649   ierr = VecAppendOptionsPrefix(is->x,"is_");CHKERRQ(ierr);
650   ierr = VecSetFromOptions(is->x);CHKERRQ(ierr);
651   ierr = VecDuplicate(is->x,&is->y);CHKERRQ(ierr);
652 
653   /* setup the global to local scatter */
654   ierr = ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);CHKERRQ(ierr);
655   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
656   ierr = MatCreateVecs(A,&global,NULL);CHKERRQ(ierr);
657   ierr = VecScatterCreate(global,from,is->x,to,&is->ctx);CHKERRQ(ierr);
658   ierr = VecDestroy(&global);CHKERRQ(ierr);
659   ierr = ISDestroy(&to);CHKERRQ(ierr);
660   ierr = ISDestroy(&from);CHKERRQ(ierr);
661   PetscFunctionReturn(0);
662 }
663 
664 #undef __FUNCT__
665 #define __FUNCT__ "MatSetValues_IS"
666 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
667 {
668   Mat_IS         *is = (Mat_IS*)mat->data;
669   PetscInt       rows_l[2048],cols_l[2048];
670   PetscErrorCode ierr;
671 
672   PetscFunctionBegin;
673 #if defined(PETSC_USE_DEBUG)
674   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
675 #endif
676   ierr = ISG2LMapApply(is->mapping,m,rows,rows_l);CHKERRQ(ierr);
677   ierr = ISG2LMapApply(is->mapping,n,cols,cols_l);CHKERRQ(ierr);
678   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
679   PetscFunctionReturn(0);
680 }
681 
682 #undef ISG2LMapSetUp
683 #undef ISG2LMapApply
684 
685 #undef __FUNCT__
686 #define __FUNCT__ "MatSetValuesLocal_IS"
687 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
688 {
689   PetscErrorCode ierr;
690   Mat_IS         *is = (Mat_IS*)A->data;
691 
692   PetscFunctionBegin;
693   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
694   PetscFunctionReturn(0);
695 }
696 
697 #undef __FUNCT__
698 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
699 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
700 {
701   PetscErrorCode ierr;
702   Mat_IS         *is = (Mat_IS*)A->data;
703 
704   PetscFunctionBegin;
705   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
706   PetscFunctionReturn(0);
707 }
708 
709 #undef __FUNCT__
710 #define __FUNCT__ "MatZeroRows_IS"
711 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
712 {
713   Mat_IS         *is = (Mat_IS*)A->data;
714   PetscInt       n_l = 0, *rows_l = NULL;
715   PetscErrorCode ierr;
716 
717   PetscFunctionBegin;
718   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
719   if (n) {
720     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
721     ierr = ISGlobalToLocalMappingApply(is->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
722   }
723   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
724   ierr = PetscFree(rows_l);CHKERRQ(ierr);
725   PetscFunctionReturn(0);
726 }
727 
728 #undef __FUNCT__
729 #define __FUNCT__ "MatZeroRowsLocal_IS"
730 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
731 {
732   Mat_IS         *is = (Mat_IS*)A->data;
733   PetscErrorCode ierr;
734   PetscInt       i;
735   PetscScalar    *array;
736 
737   PetscFunctionBegin;
738   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
739   {
740     /*
741        Set up is->x as a "counting vector". This is in order to MatMult_IS
742        work properly in the interface nodes.
743     */
744     Vec         counter;
745     PetscScalar one=1.0, zero=0.0;
746     ierr = MatCreateVecs(A,&counter,NULL);CHKERRQ(ierr);
747     ierr = VecSet(counter,zero);CHKERRQ(ierr);
748     ierr = VecSet(is->x,one);CHKERRQ(ierr);
749     ierr = VecScatterBegin(is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
750     ierr = VecScatterEnd  (is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
751     ierr = VecScatterBegin(is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
752     ierr = VecScatterEnd  (is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
753     ierr = VecDestroy(&counter);CHKERRQ(ierr);
754   }
755   if (!n) {
756     is->pure_neumann = PETSC_TRUE;
757   } else {
758     is->pure_neumann = PETSC_FALSE;
759 
760     ierr = VecGetArray(is->x,&array);CHKERRQ(ierr);
761     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
762     for (i=0; i<n; i++) {
763       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
764     }
765     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
766     ierr = MatAssemblyEnd  (is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
767     ierr = VecRestoreArray(is->x,&array);CHKERRQ(ierr);
768   }
769   PetscFunctionReturn(0);
770 }
771 
772 #undef __FUNCT__
773 #define __FUNCT__ "MatAssemblyBegin_IS"
774 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
775 {
776   Mat_IS         *is = (Mat_IS*)A->data;
777   PetscErrorCode ierr;
778 
779   PetscFunctionBegin;
780   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
781   PetscFunctionReturn(0);
782 }
783 
784 #undef __FUNCT__
785 #define __FUNCT__ "MatAssemblyEnd_IS"
786 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
787 {
788   Mat_IS         *is = (Mat_IS*)A->data;
789   PetscErrorCode ierr;
790 
791   PetscFunctionBegin;
792   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
793   PetscFunctionReturn(0);
794 }
795 
796 #undef __FUNCT__
797 #define __FUNCT__ "MatISGetLocalMat_IS"
798 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
799 {
800   Mat_IS *is = (Mat_IS*)mat->data;
801 
802   PetscFunctionBegin;
803   *local = is->A;
804   PetscFunctionReturn(0);
805 }
806 
807 #undef __FUNCT__
808 #define __FUNCT__ "MatISGetLocalMat"
809 /*@
810     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
811 
812   Input Parameter:
813 .  mat - the matrix
814 
815   Output Parameter:
816 .  local - the local matrix usually MATSEQAIJ
817 
818   Level: advanced
819 
820   Notes:
821     This can be called if you have precomputed the nonzero structure of the
822   matrix and want to provide it to the inner matrix object to improve the performance
823   of the MatSetValues() operation.
824 
825 .seealso: MATIS
826 @*/
827 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
828 {
829   PetscErrorCode ierr;
830 
831   PetscFunctionBegin;
832   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
833   PetscValidPointer(local,2);
834   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
835   PetscFunctionReturn(0);
836 }
837 
838 #undef __FUNCT__
839 #define __FUNCT__ "MatISSetLocalMat_IS"
840 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
841 {
842   Mat_IS         *is = (Mat_IS*)mat->data;
843   PetscInt       nrows,ncols,orows,ocols;
844   PetscErrorCode ierr;
845 
846   PetscFunctionBegin;
847   if (is->A) {
848     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
849     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
850     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
851   }
852   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
853   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
854   is->A = local;
855   PetscFunctionReturn(0);
856 }
857 
858 #undef __FUNCT__
859 #define __FUNCT__ "MatISSetLocalMat"
860 /*@
861     MatISSetLocalMat - Set the local matrix stored inside a MATIS matrix.
862 
863   Input Parameter:
864 .  mat - the matrix
865 .  local - the local matrix usually MATSEQAIJ
866 
867   Output Parameter:
868 
869   Level: advanced
870 
871   Notes:
872     This can be called if you have precomputed the local matrix and
873   want to provide it to the matrix object MATIS.
874 
875 .seealso: MATIS
876 @*/
877 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
878 {
879   PetscErrorCode ierr;
880 
881   PetscFunctionBegin;
882   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
883   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
884   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
885   PetscFunctionReturn(0);
886 }
887 
888 #undef __FUNCT__
889 #define __FUNCT__ "MatZeroEntries_IS"
890 PetscErrorCode MatZeroEntries_IS(Mat A)
891 {
892   Mat_IS         *a = (Mat_IS*)A->data;
893   PetscErrorCode ierr;
894 
895   PetscFunctionBegin;
896   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
897   PetscFunctionReturn(0);
898 }
899 
900 #undef __FUNCT__
901 #define __FUNCT__ "MatScale_IS"
902 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
903 {
904   Mat_IS         *is = (Mat_IS*)A->data;
905   PetscErrorCode ierr;
906 
907   PetscFunctionBegin;
908   ierr = MatScale(is->A,a);CHKERRQ(ierr);
909   PetscFunctionReturn(0);
910 }
911 
912 #undef __FUNCT__
913 #define __FUNCT__ "MatGetDiagonal_IS"
914 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
915 {
916   Mat_IS         *is = (Mat_IS*)A->data;
917   PetscErrorCode ierr;
918 
919   PetscFunctionBegin;
920   /* get diagonal of the local matrix */
921   ierr = MatGetDiagonal(is->A,is->x);CHKERRQ(ierr);
922 
923   /* scatter diagonal back into global vector */
924   ierr = VecSet(v,0);CHKERRQ(ierr);
925   ierr = VecScatterBegin(is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
926   ierr = VecScatterEnd  (is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
927   PetscFunctionReturn(0);
928 }
929 
930 #undef __FUNCT__
931 #define __FUNCT__ "MatSetOption_IS"
932 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
933 {
934   Mat_IS         *a = (Mat_IS*)A->data;
935   PetscErrorCode ierr;
936 
937   PetscFunctionBegin;
938   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
939   PetscFunctionReturn(0);
940 }
941 
942 #undef __FUNCT__
943 #define __FUNCT__ "MatCreateIS"
944 /*@
945     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
946        process but not across processes.
947 
948    Input Parameters:
949 +     comm - MPI communicator that will share the matrix
950 .     bs - local and global block size of the matrix
951 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
952 -     map - mapping that defines the global number for each local number
953 
954    Output Parameter:
955 .    A - the resulting matrix
956 
957    Level: advanced
958 
959    Notes: See MATIS for more details
960           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
961           by that process. m + nghosts (or n + nghosts) is the length of map since map maps all local points
962           plus the ghost points to global indices.
963 
964 .seealso: MATIS, MatSetLocalToGlobalMapping()
965 @*/
966 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping map,Mat *A)
967 {
968   PetscErrorCode ierr;
969 
970   PetscFunctionBegin;
971   ierr = MatCreate(comm,A);CHKERRQ(ierr);
972   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
973   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
974   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
975   ierr = MatSetUp(*A);CHKERRQ(ierr);
976   ierr = MatSetLocalToGlobalMapping(*A,map,map);CHKERRQ(ierr);
977   PetscFunctionReturn(0);
978 }
979 
980 /*MC
981    MATIS - MATIS = "is" - A matrix type to be used for using the Neumann-Neumann type preconditioners, see PCBDDC.
982    This stores the matrices in globally unassembled form. Each processor
983    assembles only its local Neumann problem and the parallel matrix vector
984    product is handled "implicitly".
985 
986    Operations Provided:
987 +  MatMult()
988 .  MatMultAdd()
989 .  MatMultTranspose()
990 .  MatMultTransposeAdd()
991 .  MatZeroEntries()
992 .  MatSetOption()
993 .  MatZeroRows()
994 .  MatZeroRowsLocal()
995 .  MatSetValues()
996 .  MatSetValuesLocal()
997 .  MatScale()
998 .  MatGetDiagonal()
999 -  MatSetLocalToGlobalMapping()
1000 
1001    Options Database Keys:
1002 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1003 
1004    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1005 
1006           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1007 
1008           You can do matrix preallocation on the local matrix after you obtain it with
1009           MatISGetLocalMat()
1010 
1011   Level: advanced
1012 
1013 .seealso: PC, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), PCBDDC
1014 
1015 M*/
1016 
1017 #undef __FUNCT__
1018 #define __FUNCT__ "MatCreate_IS"
1019 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1020 {
1021   PetscErrorCode ierr;
1022   Mat_IS         *b;
1023 
1024   PetscFunctionBegin;
1025   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1026   A->data = (void*)b;
1027   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1028 
1029   A->ops->mult                    = MatMult_IS;
1030   A->ops->multadd                 = MatMultAdd_IS;
1031   A->ops->multtranspose           = MatMultTranspose_IS;
1032   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1033   A->ops->destroy                 = MatDestroy_IS;
1034   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1035   A->ops->setvalues               = MatSetValues_IS;
1036   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1037   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1038   A->ops->zerorows                = MatZeroRows_IS;
1039   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1040   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1041   A->ops->assemblyend             = MatAssemblyEnd_IS;
1042   A->ops->view                    = MatView_IS;
1043   A->ops->zeroentries             = MatZeroEntries_IS;
1044   A->ops->scale                   = MatScale_IS;
1045   A->ops->getdiagonal             = MatGetDiagonal_IS;
1046   A->ops->setoption               = MatSetOption_IS;
1047   A->ops->ishermitian             = MatIsHermitian_IS;
1048   A->ops->issymmetric             = MatIsSymmetric_IS;
1049   A->ops->duplicate               = MatDuplicate_IS;
1050 
1051   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1052   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1053 
1054   /* zeros pointer */
1055   b->A           = 0;
1056   b->ctx         = 0;
1057   b->x           = 0;
1058   b->y           = 0;
1059   b->mapping     = 0;
1060   b->sf          = 0;
1061   b->sf_rootdata = 0;
1062   b->sf_leafdata = 0;
1063 
1064   /* special MATIS functions */
1065   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1066   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1067   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1068   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1069   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1070   PetscFunctionReturn(0);
1071 }
1072