xref: /petsc/src/mat/impls/is/matis.c (revision ecf5a87373d776f071f7480159d2011bfc64ef89)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 static PetscErrorCode MatISComputeSF_Private(Mat);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatISComputeSF_Private"
21 static PetscErrorCode MatISComputeSF_Private(Mat B)
22 {
23   Mat_IS         *matis = (Mat_IS*)(B->data);
24   const PetscInt *gidxs;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
29   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
30   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
31   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
32   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
33   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
34   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
35   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatISSetPreallocation"
41 /*@
42    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
43 
44    Collective on MPI_Comm
45 
46    Input Parameters:
47 +  B - the matrix
48 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
49            (same value is used for all local rows)
50 .  d_nnz - array containing the number of nonzeros in the various rows of the
51            DIAGONAL portion of the local submatrix (possibly different for each row)
52            or NULL, if d_nz is used to specify the nonzero structure.
53            The size of this array is equal to the number of local rows, i.e 'm'.
54            For matrices that will be factored, you must leave room for (and set)
55            the diagonal entry even if it is zero.
56 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
57            submatrix (same value is used for all local rows).
58 -  o_nnz - array containing the number of nonzeros in the various rows of the
59            OFF-DIAGONAL portion of the local submatrix (possibly different for
60            each row) or NULL, if o_nz is used to specify the nonzero
61            structure. The size of this array is equal to the number
62            of local rows, i.e 'm'.
63 
64    If the *_nnz parameter is given then the *_nz parameter is ignored
65 
66    Level: intermediate
67 
68    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
69           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
70           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
71 
72 .keywords: matrix
73 
74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
75 @*/
76 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
77 {
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
82   PetscValidType(B,1);
83   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatISSetPreallocation_IS"
89 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
90 {
91   Mat_IS         *matis = (Mat_IS*)(B->data);
92   PetscInt       bs,i,nlocalcols;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   if (!matis->A) {
97     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
98   }
99   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
100     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
101   }
102   if (!d_nnz) {
103     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
104   } else {
105     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
106   }
107   if (!o_nnz) {
108     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
109   } else {
110     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
111   }
112   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
113   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
114   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
115   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
116   for (i=0;i<matis->sf_nleaves;i++) {
117     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
118   }
119   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
120   for (i=0;i<matis->sf_nleaves/bs;i++) {
121     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
122   }
123   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
124   for (i=0;i<matis->sf_nleaves/bs;i++) {
125     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
126   }
127   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
133 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
134 {
135   Mat_IS          *matis = (Mat_IS*)(A->data);
136   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
137   const PetscInt  *global_indices_r,*global_indices_c;
138   PetscInt        i,j,bs,rows,cols;
139   PetscInt        lrows,lcols;
140   PetscInt        local_rows,local_cols;
141   PetscMPIInt     nsubdomains;
142   PetscBool       isdense,issbaij;
143   PetscErrorCode  ierr;
144 
145   PetscFunctionBegin;
146   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
147   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
148   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
149   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
150   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
151   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
152   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
153   if (A->rmap->mapping != A->cmap->mapping) {
154     ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
155   } else {
156     global_indices_c = global_indices_r;
157   }
158 
159   if (issbaij) {
160     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
161   }
162   /*
163      An SF reduce is needed to sum up properly on shared rows.
164      Note that generally preallocation is not exact, since it overestimates nonzeros
165   */
166   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
167     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
168   }
169   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
170   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
171   /* All processes need to compute entire row ownership */
172   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
173   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
174   for (i=0;i<nsubdomains;i++) {
175     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
176       row_ownership[j] = i;
177     }
178   }
179 
180   /*
181      my_dnz and my_onz contains exact contribution to preallocation from each local mat
182      then, they will be summed up properly. This way, preallocation is always sufficient
183   */
184   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
185   /* preallocation as a MATAIJ */
186   if (isdense) { /* special case for dense local matrices */
187     for (i=0;i<local_rows;i++) {
188       PetscInt index_row = global_indices_r[i];
189       for (j=i;j<local_rows;j++) {
190         PetscInt owner = row_ownership[index_row];
191         PetscInt index_col = global_indices_c[j];
192         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
193           my_dnz[i] += 1;
194         } else { /* offdiag block */
195           my_onz[i] += 1;
196         }
197         /* same as before, interchanging rows and cols */
198         if (i != j) {
199           owner = row_ownership[index_col];
200           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
201             my_dnz[j] += 1;
202           } else {
203             my_onz[j] += 1;
204           }
205         }
206       }
207     }
208   } else { /* TODO: this could be optimized using MatGetRowIJ */
209     for (i=0;i<local_rows;i++) {
210       const PetscInt *cols;
211       PetscInt       ncols,index_row = global_indices_r[i];
212       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
213       for (j=0;j<ncols;j++) {
214         PetscInt owner = row_ownership[index_row];
215         PetscInt index_col = global_indices_c[cols[j]];
216         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
217           my_dnz[i] += 1;
218         } else { /* offdiag block */
219           my_onz[i] += 1;
220         }
221         /* same as before, interchanging rows and cols */
222         if (issbaij && index_col != index_row) {
223           owner = row_ownership[index_col];
224           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
225             my_dnz[cols[j]] += 1;
226           } else {
227             my_onz[cols[j]] += 1;
228           }
229         }
230       }
231       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
232     }
233   }
234   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
235   if (global_indices_c != global_indices_r) {
236     ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
237   }
238   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
239 
240   /* Reduce my_dnz and my_onz */
241   if (maxreduce) {
242     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
243     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
244     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
245     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
246   } else {
247     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
248     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
249     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
250     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
251   }
252   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
253 
254   /* Resize preallocation if overestimated */
255   for (i=0;i<lrows;i++) {
256     dnz[i] = PetscMin(dnz[i],lcols);
257     onz[i] = PetscMin(onz[i],cols-lcols);
258   }
259   /* set preallocation */
260   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
261   for (i=0;i<lrows/bs;i++) {
262     dnz[i] = dnz[i*bs]/bs;
263     onz[i] = onz[i*bs]/bs;
264   }
265   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
266   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
267   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
268   if (issbaij) {
269     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
270   }
271   PetscFunctionReturn(0);
272 }
273 
274 #undef __FUNCT__
275 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
276 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
277 {
278   Mat_IS         *matis = (Mat_IS*)(mat->data);
279   Mat            local_mat;
280   /* info on mat */
281   PetscInt       bs,rows,cols,lrows,lcols;
282   PetscInt       local_rows,local_cols;
283   PetscBool      isdense,issbaij,isseqaij;
284   PetscMPIInt    nsubdomains;
285   /* values insertion */
286   PetscScalar    *array;
287   /* work */
288   PetscErrorCode ierr;
289 
290   PetscFunctionBegin;
291   /* get info from mat */
292   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
293   if (nsubdomains == 1) {
294     if (reuse == MAT_INITIAL_MATRIX) {
295       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
296     } else {
297       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
298     }
299     PetscFunctionReturn(0);
300   }
301   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
302   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
303   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
304   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
305   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
306   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
307   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
308 
309   if (reuse == MAT_INITIAL_MATRIX) {
310     MatType     new_mat_type;
311     PetscBool   issbaij_red;
312 
313     /* determining new matrix type */
314     ierr = MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
315     if (issbaij_red) {
316       new_mat_type = MATSBAIJ;
317     } else {
318       if (bs>1) {
319         new_mat_type = MATBAIJ;
320       } else {
321         new_mat_type = MATAIJ;
322       }
323     }
324 
325     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
326     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
327     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
328     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
329     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
330   } else {
331     PetscInt mbs,mrows,mcols,mlrows,mlcols;
332     /* some checks */
333     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
334     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
335     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
336     if (mrows != rows) {
337       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
338     }
339     if (mcols != cols) {
340       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
341     }
342     if (mlrows != lrows) {
343       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
344     }
345     if (mlcols != lcols) {
346       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
347     }
348     if (mbs != bs) {
349       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
350     }
351     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
352   }
353 
354   if (issbaij) {
355     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
356   } else {
357     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
358     local_mat = matis->A;
359   }
360 
361   /* Set values */
362   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
363   if (isdense) { /* special case for dense local matrices */
364     PetscInt i,*dummy_rows,*dummy_cols;
365 
366     if (local_rows != local_cols) {
367       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
368       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
369       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
370     } else {
371       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
372       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
373       dummy_cols = dummy_rows;
374     }
375     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
376     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
377     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
378     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
379     if (dummy_rows != dummy_cols) {
380       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
381     } else {
382       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
383     }
384   } else if (isseqaij) {
385     PetscInt  i,nvtxs,*xadj,*adjncy;
386     PetscBool done;
387 
388     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
389     if (!done) {
390       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
391     }
392     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
393     for (i=0;i<nvtxs;i++) {
394       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
395     }
396     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
397     if (!done) {
398       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
399     }
400     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
401   } else { /* very basic values insertion for all other matrix types */
402     PetscInt i;
403 
404     for (i=0;i<local_rows;i++) {
405       PetscInt       j;
406       const PetscInt *local_indices_cols;
407 
408       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
409       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
410       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
411     }
412   }
413   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
414   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
415   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
416   if (isdense) {
417     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
418   }
419   PetscFunctionReturn(0);
420 }
421 
422 #undef __FUNCT__
423 #define __FUNCT__ "MatISGetMPIXAIJ"
424 /*@
425     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
426 
427   Input Parameter:
428 .  mat - the matrix (should be of type MATIS)
429 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
430 
431   Output Parameter:
432 .  newmat - the matrix in AIJ format
433 
434   Level: developer
435 
436   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
437 
438 .seealso: MATIS
439 @*/
440 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
441 {
442   PetscErrorCode ierr;
443 
444   PetscFunctionBegin;
445   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
446   PetscValidLogicalCollectiveEnum(mat,reuse,2);
447   PetscValidPointer(newmat,3);
448   if (reuse != MAT_INITIAL_MATRIX) {
449     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
450     PetscCheckSameComm(mat,1,*newmat,3);
451     if (mat == *newmat) {
452       SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
453     }
454   }
455   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
456   PetscFunctionReturn(0);
457 }
458 
459 #undef __FUNCT__
460 #define __FUNCT__ "MatDuplicate_IS"
461 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
462 {
463   PetscErrorCode ierr;
464   Mat_IS         *matis = (Mat_IS*)(mat->data);
465   PetscInt       bs,m,n,M,N;
466   Mat            B,localmat;
467 
468   PetscFunctionBegin;
469   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
470   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
471   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
472   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
473   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
474   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
475   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
476   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
477   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
478   *newmat = B;
479   PetscFunctionReturn(0);
480 }
481 
482 #undef __FUNCT__
483 #define __FUNCT__ "MatIsHermitian_IS"
484 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
485 {
486   PetscErrorCode ierr;
487   Mat_IS         *matis = (Mat_IS*)A->data;
488   PetscBool      local_sym;
489 
490   PetscFunctionBegin;
491   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
492   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
493   PetscFunctionReturn(0);
494 }
495 
496 #undef __FUNCT__
497 #define __FUNCT__ "MatIsSymmetric_IS"
498 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
499 {
500   PetscErrorCode ierr;
501   Mat_IS         *matis = (Mat_IS*)A->data;
502   PetscBool      local_sym;
503 
504   PetscFunctionBegin;
505   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
506   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
507   PetscFunctionReturn(0);
508 }
509 
510 #undef __FUNCT__
511 #define __FUNCT__ "MatDestroy_IS"
512 PetscErrorCode MatDestroy_IS(Mat A)
513 {
514   PetscErrorCode ierr;
515   Mat_IS         *b = (Mat_IS*)A->data;
516 
517   PetscFunctionBegin;
518   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
519   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
520   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
521   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
522   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
523   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
524   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
525   ierr = PetscFree(A->data);CHKERRQ(ierr);
526   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
527   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
528   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
529   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
530   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
531   PetscFunctionReturn(0);
532 }
533 
534 #undef __FUNCT__
535 #define __FUNCT__ "MatMult_IS"
536 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
537 {
538   PetscErrorCode ierr;
539   Mat_IS         *is  = (Mat_IS*)A->data;
540   PetscScalar    zero = 0.0;
541 
542   PetscFunctionBegin;
543   /*  scatter the global vector x into the local work vector */
544   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
545   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
546 
547   /* multiply the local matrix */
548   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
549 
550   /* scatter product back into global memory */
551   ierr = VecSet(y,zero);CHKERRQ(ierr);
552   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
553   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
554   PetscFunctionReturn(0);
555 }
556 
557 #undef __FUNCT__
558 #define __FUNCT__ "MatMultAdd_IS"
559 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
560 {
561   Vec            temp_vec;
562   PetscErrorCode ierr;
563 
564   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
565   if (v3 != v2) {
566     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
567     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
568   } else {
569     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
570     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
571     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
572     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
573     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
574   }
575   PetscFunctionReturn(0);
576 }
577 
578 #undef __FUNCT__
579 #define __FUNCT__ "MatMultTranspose_IS"
580 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
581 {
582   Mat_IS         *is = (Mat_IS*)A->data;
583   PetscErrorCode ierr;
584 
585   PetscFunctionBegin;
586   /*  scatter the global vector x into the local work vector */
587   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
588   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
589 
590   /* multiply the local matrix */
591   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
592 
593   /* scatter product back into global vector */
594   ierr = VecSet(x,0);CHKERRQ(ierr);
595   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
596   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
597   PetscFunctionReturn(0);
598 }
599 
600 #undef __FUNCT__
601 #define __FUNCT__ "MatMultTransposeAdd_IS"
602 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
603 {
604   Vec            temp_vec;
605   PetscErrorCode ierr;
606 
607   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
608   if (v3 != v2) {
609     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
610     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
611   } else {
612     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
613     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
614     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
615     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
616     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
617   }
618   PetscFunctionReturn(0);
619 }
620 
621 #undef __FUNCT__
622 #define __FUNCT__ "MatView_IS"
623 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
624 {
625   Mat_IS         *a = (Mat_IS*)A->data;
626   PetscErrorCode ierr;
627   PetscViewer    sviewer;
628 
629   PetscFunctionBegin;
630   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
631   ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
632   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
633   ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
634   PetscFunctionReturn(0);
635 }
636 
637 #undef __FUNCT__
638 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
639 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
640 {
641   PetscErrorCode ierr;
642   PetscInt       nr,rbs,nc,cbs;
643   Mat_IS         *is = (Mat_IS*)A->data;
644   IS             from,to;
645   Vec            cglobal,rglobal;
646 
647   PetscFunctionBegin;
648   PetscCheckSameComm(A,1,rmapping,2);
649   PetscCheckSameComm(A,1,cmapping,3);
650   /* Destroy any previous data */
651   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
652   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
653   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
654   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
655   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
656   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
657   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
658 
659   /* Setup Layout and set local to global maps */
660   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
661   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
662   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
663   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
664 
665   /* Create the local matrix A */
666   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
667   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
668   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
669   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
670   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
671   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
672   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
673   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
674   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
675   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
676   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
677 
678   /* Create the local work vectors */
679   //ierr = MatSetUp(is->A);CHKERRQ(ierr);
680   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
681 
682   /* setup the global to local scatters */
683   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
684   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
685   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
686   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
687   if (rmapping != cmapping) {
688     ierr = ISDestroy(&to);CHKERRQ(ierr);
689     ierr = ISDestroy(&from);CHKERRQ(ierr);
690     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
691     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
692     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
693   } else {
694     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
695     is->cctx = is->rctx;
696   }
697   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
698   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
699   ierr = ISDestroy(&to);CHKERRQ(ierr);
700   ierr = ISDestroy(&from);CHKERRQ(ierr);
701   PetscFunctionReturn(0);
702 }
703 
704 #undef __FUNCT__
705 #define __FUNCT__ "MatSetValues_IS"
706 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
707 {
708   Mat_IS         *is = (Mat_IS*)mat->data;
709   PetscInt       rows_l[2048],cols_l[2048];
710   PetscErrorCode ierr;
711 
712   PetscFunctionBegin;
713 #if defined(PETSC_USE_DEBUG)
714   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
715 #endif
716   ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
717   ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
718   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
719   PetscFunctionReturn(0);
720 }
721 
722 #undef __FUNCT__
723 #define __FUNCT__ "MatSetValuesLocal_IS"
724 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
725 {
726   PetscErrorCode ierr;
727   Mat_IS         *is = (Mat_IS*)A->data;
728 
729   PetscFunctionBegin;
730   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
731   PetscFunctionReturn(0);
732 }
733 
734 #undef __FUNCT__
735 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
736 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
737 {
738   PetscErrorCode ierr;
739   Mat_IS         *is = (Mat_IS*)A->data;
740 
741   PetscFunctionBegin;
742   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
743   PetscFunctionReturn(0);
744 }
745 
746 #undef __FUNCT__
747 #define __FUNCT__ "MatZeroRows_IS"
748 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
749 {
750   PetscInt       n_l = 0, *rows_l = NULL;
751   PetscErrorCode ierr;
752 
753   PetscFunctionBegin;
754   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
755   if (n) {
756     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
757     ierr = ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
758   }
759   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
760   ierr = PetscFree(rows_l);CHKERRQ(ierr);
761   PetscFunctionReturn(0);
762 }
763 
764 #undef __FUNCT__
765 #define __FUNCT__ "MatZeroRowsLocal_IS"
766 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
767 {
768   Mat_IS         *is = (Mat_IS*)A->data;
769   PetscErrorCode ierr;
770   PetscInt       i;
771   PetscScalar    *array;
772 
773   PetscFunctionBegin;
774   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
775   {
776     /*
777        Set up is->x as a "counting vector". This is in order to MatMult_IS
778        work properly in the interface nodes.
779     */
780     Vec counter;
781     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
782     ierr = VecSet(counter,0.);CHKERRQ(ierr);
783     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
784     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
785     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
786     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
787     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
788     ierr = VecDestroy(&counter);CHKERRQ(ierr);
789   }
790   if (!n) {
791     is->pure_neumann = PETSC_TRUE;
792   } else {
793     is->pure_neumann = PETSC_FALSE;
794 
795     ierr = VecGetArray(is->y,&array);CHKERRQ(ierr);
796     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
797     for (i=0; i<n; i++) {
798       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
799     }
800     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
801     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
802     ierr = VecRestoreArray(is->y,&array);CHKERRQ(ierr);
803   }
804   PetscFunctionReturn(0);
805 }
806 
807 #undef __FUNCT__
808 #define __FUNCT__ "MatAssemblyBegin_IS"
809 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
810 {
811   Mat_IS         *is = (Mat_IS*)A->data;
812   PetscErrorCode ierr;
813 
814   PetscFunctionBegin;
815   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
816   PetscFunctionReturn(0);
817 }
818 
819 #undef __FUNCT__
820 #define __FUNCT__ "MatAssemblyEnd_IS"
821 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
822 {
823   Mat_IS         *is = (Mat_IS*)A->data;
824   PetscErrorCode ierr;
825 
826   PetscFunctionBegin;
827   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
828   PetscFunctionReturn(0);
829 }
830 
831 #undef __FUNCT__
832 #define __FUNCT__ "MatISGetLocalMat_IS"
833 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
834 {
835   Mat_IS *is = (Mat_IS*)mat->data;
836 
837   PetscFunctionBegin;
838   *local = is->A;
839   PetscFunctionReturn(0);
840 }
841 
842 #undef __FUNCT__
843 #define __FUNCT__ "MatISGetLocalMat"
844 /*@
845     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
846 
847   Input Parameter:
848 .  mat - the matrix
849 
850   Output Parameter:
851 .  local - the local matrix
852 
853   Level: advanced
854 
855   Notes:
856     This can be called if you have precomputed the nonzero structure of the
857   matrix and want to provide it to the inner matrix object to improve the performance
858   of the MatSetValues() operation.
859 
860 .seealso: MATIS
861 @*/
862 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
863 {
864   PetscErrorCode ierr;
865 
866   PetscFunctionBegin;
867   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
868   PetscValidPointer(local,2);
869   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
870   PetscFunctionReturn(0);
871 }
872 
873 #undef __FUNCT__
874 #define __FUNCT__ "MatISSetLocalMat_IS"
875 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
876 {
877   Mat_IS         *is = (Mat_IS*)mat->data;
878   PetscInt       nrows,ncols,orows,ocols;
879   PetscErrorCode ierr;
880 
881   PetscFunctionBegin;
882   if (is->A) {
883     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
884     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
885     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
886   }
887   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
888   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
889   is->A = local;
890   PetscFunctionReturn(0);
891 }
892 
893 #undef __FUNCT__
894 #define __FUNCT__ "MatISSetLocalMat"
895 /*@
896     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
897 
898   Input Parameter:
899 .  mat - the matrix
900 .  local - the local matrix
901 
902   Output Parameter:
903 
904   Level: advanced
905 
906   Notes:
907     This can be called if you have precomputed the local matrix and
908   want to provide it to the matrix object MATIS.
909 
910 .seealso: MATIS
911 @*/
912 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
913 {
914   PetscErrorCode ierr;
915 
916   PetscFunctionBegin;
917   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
918   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
919   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
920   PetscFunctionReturn(0);
921 }
922 
923 #undef __FUNCT__
924 #define __FUNCT__ "MatZeroEntries_IS"
925 PetscErrorCode MatZeroEntries_IS(Mat A)
926 {
927   Mat_IS         *a = (Mat_IS*)A->data;
928   PetscErrorCode ierr;
929 
930   PetscFunctionBegin;
931   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 #undef __FUNCT__
936 #define __FUNCT__ "MatScale_IS"
937 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
938 {
939   Mat_IS         *is = (Mat_IS*)A->data;
940   PetscErrorCode ierr;
941 
942   PetscFunctionBegin;
943   ierr = MatScale(is->A,a);CHKERRQ(ierr);
944   PetscFunctionReturn(0);
945 }
946 
947 #undef __FUNCT__
948 #define __FUNCT__ "MatGetDiagonal_IS"
949 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
950 {
951   Mat_IS         *is = (Mat_IS*)A->data;
952   PetscErrorCode ierr;
953 
954   PetscFunctionBegin;
955   /* get diagonal of the local matrix */
956   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
957 
958   /* scatter diagonal back into global vector */
959   ierr = VecSet(v,0);CHKERRQ(ierr);
960   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
961   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
962   PetscFunctionReturn(0);
963 }
964 
965 #undef __FUNCT__
966 #define __FUNCT__ "MatSetOption_IS"
967 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
968 {
969   Mat_IS         *a = (Mat_IS*)A->data;
970   PetscErrorCode ierr;
971 
972   PetscFunctionBegin;
973   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
974   PetscFunctionReturn(0);
975 }
976 
977 #undef __FUNCT__
978 #define __FUNCT__ "MatCreateIS"
979 /*@
980     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
981        process but not across processes.
982 
983    Input Parameters:
984 +     comm    - MPI communicator that will share the matrix
985 .     bs      - block size of the matrix
986 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
987 .     rmap    - local to global map for rows
988 -     cmap    - local to global map for cols
989 
990    Output Parameter:
991 .    A - the resulting matrix
992 
993    Level: advanced
994 
995    Notes: See MATIS for more details
996           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
997           by that process. The sizes of rmap and cmap define the size of the local matrices.
998           If either rmap or cmap are NULL, than the matrix is assumed to be square
999 
1000 .seealso: MATIS, MatSetLocalToGlobalMapping()
1001 @*/
1002 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1003 {
1004   PetscErrorCode ierr;
1005 
1006   PetscFunctionBegin;
1007   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1008   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1009   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1010   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1011   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1012   ierr = MatSetUp(*A);CHKERRQ(ierr);
1013   if (rmap && cmap) {
1014     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1015   } else if (!rmap) {
1016     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1017   } else {
1018     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1019   }
1020   PetscFunctionReturn(0);
1021 }
1022 
1023 /*MC
1024    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1025    This stores the matrices in globally unassembled form. Each processor
1026    assembles only its local Neumann problem and the parallel matrix vector
1027    product is handled "implicitly".
1028 
1029    Operations Provided:
1030 +  MatMult()
1031 .  MatMultAdd()
1032 .  MatMultTranspose()
1033 .  MatMultTransposeAdd()
1034 .  MatZeroEntries()
1035 .  MatSetOption()
1036 .  MatZeroRows()
1037 .  MatZeroRowsLocal()
1038 .  MatSetValues()
1039 .  MatSetValuesLocal()
1040 .  MatScale()
1041 .  MatGetDiagonal()
1042 -  MatSetLocalToGlobalMapping()
1043 
1044    Options Database Keys:
1045 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1046 
1047    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1048 
1049           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1050 
1051           You can do matrix preallocation on the local matrix after you obtain it with
1052           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1053 
1054   Level: advanced
1055 
1056 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC
1057 
1058 M*/
1059 
1060 #undef __FUNCT__
1061 #define __FUNCT__ "MatCreate_IS"
1062 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1063 {
1064   PetscErrorCode ierr;
1065   Mat_IS         *b;
1066 
1067   PetscFunctionBegin;
1068   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1069   A->data = (void*)b;
1070 
1071   /* matrix ops */
1072   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1073   A->ops->mult                    = MatMult_IS;
1074   A->ops->multadd                 = MatMultAdd_IS;
1075   A->ops->multtranspose           = MatMultTranspose_IS;
1076   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1077   A->ops->destroy                 = MatDestroy_IS;
1078   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1079   A->ops->setvalues               = MatSetValues_IS;
1080   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1081   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1082   A->ops->zerorows                = MatZeroRows_IS;
1083   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1084   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1085   A->ops->assemblyend             = MatAssemblyEnd_IS;
1086   A->ops->view                    = MatView_IS;
1087   A->ops->zeroentries             = MatZeroEntries_IS;
1088   A->ops->scale                   = MatScale_IS;
1089   A->ops->getdiagonal             = MatGetDiagonal_IS;
1090   A->ops->setoption               = MatSetOption_IS;
1091   A->ops->ishermitian             = MatIsHermitian_IS;
1092   A->ops->issymmetric             = MatIsSymmetric_IS;
1093   A->ops->duplicate               = MatDuplicate_IS;
1094 
1095   /* special MATIS functions */
1096   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1097   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1098   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1099   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1100   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1101   PetscFunctionReturn(0);
1102 }
1103