xref: /petsc/src/mat/impls/is/matis.c (revision d9a9e74cf33a2d7ecae4eb3f7ac582653bd9dec1)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 static PetscErrorCode MatISComputeSF_Private(Mat);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatISComputeSF_Private"
21 static PetscErrorCode MatISComputeSF_Private(Mat B)
22 {
23   Mat_IS         *matis = (Mat_IS*)(B->data);
24   const PetscInt *gidxs;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
29   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
30   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
31   ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&gidxs);CHKERRQ(ierr);
32   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
33   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
34   ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&gidxs);CHKERRQ(ierr);
35   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatISSetPreallocation"
41 /*
42    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
43 
44    Collective on MPI_Comm
45 
46    Input Parameters:
47 +  B - the matrix
48 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
49            (same value is used for all local rows)
50 .  d_nnz - array containing the number of nonzeros in the various rows of the
51            DIAGONAL portion of the local submatrix (possibly different for each row)
52            or NULL, if d_nz is used to specify the nonzero structure.
53            The size of this array is equal to the number of local rows, i.e 'm'.
54            For matrices that will be factored, you must leave room for (and set)
55            the diagonal entry even if it is zero.
56 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
57            submatrix (same value is used for all local rows).
58 -  o_nnz - array containing the number of nonzeros in the various rows of the
59            OFF-DIAGONAL portion of the local submatrix (possibly different for
60            each row) or NULL, if o_nz is used to specify the nonzero
61            structure. The size of this array is equal to the number
62            of local rows, i.e 'm'.
63 
64    If the *_nnz parameter is given then the *_nz parameter is ignored
65 
66    Level: intermediate
67 
68    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
69           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
70           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
71 
72 .keywords: matrix
73 
74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
75 @*/
76 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
77 {
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
82   PetscValidType(B,1);
83   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatISSetPreallocation_IS"
89 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
90 {
91   Mat_IS         *matis = (Mat_IS*)(B->data);
92   PetscInt       bs,i,nlocalcols;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   if (!matis->A) {
97     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
98   }
99   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
100     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
101   }
102   if (!d_nnz) {
103     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
104   } else {
105     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
106   }
107   if (!o_nnz) {
108     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
109   } else {
110     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
111   }
112   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
113   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
114   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
115   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
116   for (i=0;i<matis->sf_nleaves;i++) {
117     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
118   }
119   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
120   for (i=0;i<matis->sf_nleaves/bs;i++) {
121     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
122   }
123   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
124   for (i=0;i<matis->sf_nleaves/bs;i++) {
125     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
126   }
127   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
133 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
134 {
135   Mat_IS          *matis = (Mat_IS*)(A->data);
136   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
137   const PetscInt* global_indices;
138   PetscInt        i,j,bs,rows,cols;
139   PetscInt        lrows,lcols;
140   PetscInt        local_rows,local_cols;
141   PetscMPIInt     nsubdomains;
142   PetscBool       isdense,issbaij;
143   PetscErrorCode  ierr;
144 
145   PetscFunctionBegin;
146   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
147   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
148   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
149   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
150   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
151   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
152   ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&global_indices);CHKERRQ(ierr);
153   if (issbaij) {
154     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
155   }
156   /*
157      An SF reduce is needed to sum up properly on shared interface dofs.
158      Note that generally preallocation is not exact, since it overestimates nonzeros
159   */
160   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
161     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
162   }
163   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
164   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
165   /* All processes need to compute entire row ownership */
166   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
167   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
168   for (i=0;i<nsubdomains;i++) {
169     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
170       row_ownership[j]=i;
171     }
172   }
173 
174   /*
175      my_dnz and my_onz contains exact contribution to preallocation from each local mat
176      then, they will be summed up properly. This way, preallocation is always sufficient
177   */
178   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
179   /* preallocation as a MATAIJ */
180   if (isdense) { /* special case for dense local matrices */
181     for (i=0;i<local_rows;i++) {
182       PetscInt index_row = global_indices[i];
183       for (j=i;j<local_rows;j++) {
184         PetscInt owner = row_ownership[index_row];
185         PetscInt index_col = global_indices[j];
186         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
187           my_dnz[i] += 1;
188         } else { /* offdiag block */
189           my_onz[i] += 1;
190         }
191         /* same as before, interchanging rows and cols */
192         if (i != j) {
193           owner = row_ownership[index_col];
194           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
195             my_dnz[j] += 1;
196           } else {
197             my_onz[j] += 1;
198           }
199         }
200       }
201     }
202   } else {
203     for (i=0;i<local_rows;i++) {
204       const PetscInt *cols;
205       PetscInt       ncols,index_row = global_indices[i];
206       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
207       for (j=0;j<ncols;j++) {
208         PetscInt owner = row_ownership[index_row];
209         PetscInt index_col = global_indices[cols[j]];
210         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
211           my_dnz[i] += 1;
212         } else { /* offdiag block */
213           my_onz[i] += 1;
214         }
215         /* same as before, interchanging rows and cols */
216         if (issbaij && index_col != index_row) {
217           owner = row_ownership[index_col];
218           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
219             my_dnz[cols[j]] += 1;
220           } else {
221             my_onz[cols[j]] += 1;
222           }
223         }
224       }
225       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
226     }
227   }
228   ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&global_indices);CHKERRQ(ierr);
229   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
230   if (maxreduce) {
231     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
232     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
233     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
234     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
235   } else {
236     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
237     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
238     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
239     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
240   }
241   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
242 
243   /* Resize preallocation if overestimated */
244   for (i=0;i<lrows;i++) {
245     dnz[i] = PetscMin(dnz[i],lcols);
246     onz[i] = PetscMin(onz[i],cols-lcols);
247   }
248   /* set preallocation */
249   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
250   for (i=0;i<lrows/bs;i++) {
251     dnz[i] = dnz[i*bs]/bs;
252     onz[i] = onz[i*bs]/bs;
253   }
254   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
255   for (i=0;i<lrows/bs;i++) {
256     dnz[i] = dnz[i]-i;
257   }
258   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
259   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
260   if (issbaij) {
261     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
262   }
263   PetscFunctionReturn(0);
264 }
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
268 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
269 {
270   Mat_IS         *matis = (Mat_IS*)(mat->data);
271   Mat            local_mat;
272   /* info on mat */
273   PetscInt       bs,rows,cols,lrows,lcols;
274   PetscInt       local_rows,local_cols;
275   PetscBool      isdense,issbaij,isseqaij;
276   const PetscInt *global_indices_rows;
277   PetscMPIInt    nsubdomains;
278   /* values insertion */
279   PetscScalar    *array;
280   /* work */
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   /* MISSING CHECKS
285     - rectangular case not covered (it is not allowed by MATIS)
286   */
287   /* get info from mat */
288   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
289   if (nsubdomains == 1) {
290     if (reuse == MAT_INITIAL_MATRIX) {
291       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
292     } else {
293       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
294     }
295     PetscFunctionReturn(0);
296   }
297   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
298   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
299   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
300   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
301   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
302   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
303   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
304 
305   /* map indices of local mat rows to global values */
306   ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&global_indices_rows);CHKERRQ(ierr);
307 
308   if (reuse == MAT_INITIAL_MATRIX) {
309     MatType     new_mat_type;
310     PetscBool   issbaij_red;
311 
312     /* determining new matrix type */
313     ierr = MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
314     if (issbaij_red) {
315       new_mat_type = MATSBAIJ;
316     } else {
317       if (bs>1) {
318         new_mat_type = MATBAIJ;
319       } else {
320         new_mat_type = MATAIJ;
321       }
322     }
323 
324     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
325     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
326     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
327     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
328     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
329   } else {
330     PetscInt mbs,mrows,mcols,mlrows,mlcols;
331     /* some checks */
332     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
333     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
334     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
335     if (mrows != rows) {
336       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
337     }
338     if (mcols != cols) {
339       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
340     }
341     if (mlrows != lrows) {
342       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
343     }
344     if (mlcols != lcols) {
345       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
346     }
347     if (mbs != bs) {
348       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
349     }
350     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
351   }
352 
353   if (issbaij) {
354     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
355   } else {
356     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
357     local_mat = matis->A;
358   }
359 
360   /* Set values */
361   if (isdense) { /* special case for dense local matrices */
362     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
363     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
364     ierr = MatSetValues(*M,local_rows,global_indices_rows,local_cols,global_indices_rows,array,ADD_VALUES);CHKERRQ(ierr);
365     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
366   } else if (isseqaij) {
367     PetscInt  i,nvtxs,*xadj,*adjncy,*global_indices_cols;
368     PetscBool done;
369 
370     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
371     if (!done) {
372       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
373     }
374     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
375     ierr = PetscMalloc1(xadj[nvtxs],&global_indices_cols);CHKERRQ(ierr);
376     ierr = ISLocalToGlobalMappingApply(matis->mapping,xadj[nvtxs],adjncy,global_indices_cols);CHKERRQ(ierr);
377     for (i=0;i<nvtxs;i++) {
378       PetscInt    row,*cols,ncols;
379       PetscScalar *mat_vals;
380 
381       row = global_indices_rows[i];
382       ncols = xadj[i+1]-xadj[i];
383       cols = global_indices_cols+xadj[i];
384       mat_vals = array+xadj[i];
385       ierr = MatSetValues(*M,1,&row,ncols,cols,mat_vals,ADD_VALUES);CHKERRQ(ierr);
386     }
387     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
388     if (!done) {
389       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
390     }
391     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
392     ierr = PetscFree(global_indices_cols);CHKERRQ(ierr);
393   } else { /* very basic values insertion for all other matrix types */
394     PetscInt i,*global_indices_cols;
395 
396     ierr = PetscMalloc1(local_cols,&global_indices_cols);CHKERRQ(ierr);
397     for (i=0;i<local_rows;i++) {
398       PetscInt       j;
399       const PetscInt *local_indices;
400 
401       ierr = MatGetRow(local_mat,i,&j,&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr);
402       ierr = ISLocalToGlobalMappingApply(matis->mapping,j,local_indices,global_indices_cols);CHKERRQ(ierr);
403       ierr = MatSetValues(*M,1,&global_indices_rows[i],j,global_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
404       ierr = MatRestoreRow(local_mat,i,&j,&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr);
405     }
406     ierr = PetscFree(global_indices_cols);CHKERRQ(ierr);
407   }
408   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
409   ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&global_indices_rows);CHKERRQ(ierr);
410   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
411   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
412   if (isdense) {
413     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
414   }
415   PetscFunctionReturn(0);
416 }
417 
418 #undef __FUNCT__
419 #define __FUNCT__ "MatISGetMPIXAIJ"
420 /*@
421     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
422 
423   Input Parameter:
424 .  mat - the matrix (should be of type MATIS)
425 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
426 
427   Output Parameter:
428 .  newmat - the matrix in AIJ format
429 
430   Level: developer
431 
432   Notes:
433 
434 .seealso: MATIS
435 @*/
436 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
437 {
438   PetscErrorCode ierr;
439 
440   PetscFunctionBegin;
441   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
442   PetscValidLogicalCollectiveEnum(mat,reuse,2);
443   PetscValidPointer(newmat,3);
444   if (reuse != MAT_INITIAL_MATRIX) {
445     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
446     PetscCheckSameComm(mat,1,*newmat,3);
447   }
448   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
449   PetscFunctionReturn(0);
450 }
451 
452 #undef __FUNCT__
453 #define __FUNCT__ "MatDuplicate_IS"
454 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
455 {
456   PetscErrorCode ierr;
457   Mat_IS         *matis = (Mat_IS*)(mat->data);
458   PetscInt       bs,m,n,M,N;
459   Mat            B,localmat;
460 
461   PetscFunctionBegin;
462   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
463   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
464   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
465   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,matis->mapping,&B);CHKERRQ(ierr);
466   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
467   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
468   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
469   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
470   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
471   *newmat = B;
472   PetscFunctionReturn(0);
473 }
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "MatIsHermitian_IS"
477 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
478 {
479   PetscErrorCode ierr;
480   Mat_IS         *matis = (Mat_IS*)A->data;
481   PetscBool      local_sym;
482 
483   PetscFunctionBegin;
484   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
485   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
486   PetscFunctionReturn(0);
487 }
488 
489 #undef __FUNCT__
490 #define __FUNCT__ "MatIsSymmetric_IS"
491 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
492 {
493   PetscErrorCode ierr;
494   Mat_IS         *matis = (Mat_IS*)A->data;
495   PetscBool      local_sym;
496 
497   PetscFunctionBegin;
498   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
499   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
500   PetscFunctionReturn(0);
501 }
502 
503 #undef __FUNCT__
504 #define __FUNCT__ "MatDestroy_IS"
505 PetscErrorCode MatDestroy_IS(Mat A)
506 {
507   PetscErrorCode ierr;
508   Mat_IS         *b = (Mat_IS*)A->data;
509 
510   PetscFunctionBegin;
511   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
512   ierr = VecScatterDestroy(&b->ctx);CHKERRQ(ierr);
513   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
514   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
515   ierr = ISLocalToGlobalMappingDestroy(&b->mapping);CHKERRQ(ierr);
516   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
517   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
518   ierr = PetscFree(A->data);CHKERRQ(ierr);
519   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
520   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
521   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
522   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
523   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
524   PetscFunctionReturn(0);
525 }
526 
527 #undef __FUNCT__
528 #define __FUNCT__ "MatMult_IS"
529 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
530 {
531   PetscErrorCode ierr;
532   Mat_IS         *is  = (Mat_IS*)A->data;
533   PetscScalar    zero = 0.0;
534 
535   PetscFunctionBegin;
536   /*  scatter the global vector x into the local work vector */
537   ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
538   ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
539 
540   /* multiply the local matrix */
541   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
542 
543   /* scatter product back into global memory */
544   ierr = VecSet(y,zero);CHKERRQ(ierr);
545   ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
546   ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
547   PetscFunctionReturn(0);
548 }
549 
550 #undef __FUNCT__
551 #define __FUNCT__ "MatMultAdd_IS"
552 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
553 {
554   Vec            temp_vec;
555   PetscErrorCode ierr;
556 
557   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
558   if (v3 != v2) {
559     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
560     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
561   } else {
562     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
563     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
564     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
565     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
566     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
567   }
568   PetscFunctionReturn(0);
569 }
570 
571 #undef __FUNCT__
572 #define __FUNCT__ "MatMultTranspose_IS"
573 PetscErrorCode MatMultTranspose_IS(Mat A,Vec x,Vec y)
574 {
575   Mat_IS         *is = (Mat_IS*)A->data;
576   PetscErrorCode ierr;
577 
578   PetscFunctionBegin; /*  y = A' * x */
579   /*  scatter the global vector x into the local work vector */
580   ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
581   ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
582 
583   /* multiply the local matrix */
584   ierr = MatMultTranspose(is->A,is->x,is->y);CHKERRQ(ierr);
585 
586   /* scatter product back into global vector */
587   ierr = VecSet(y,0);CHKERRQ(ierr);
588   ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
589   ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
590   PetscFunctionReturn(0);
591 }
592 
593 #undef __FUNCT__
594 #define __FUNCT__ "MatMultTransposeAdd_IS"
595 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
596 {
597   Vec            temp_vec;
598   PetscErrorCode ierr;
599 
600   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
601   if (v3 != v2) {
602     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
603     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
604   } else {
605     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
606     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
607     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
608     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
609     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
610   }
611   PetscFunctionReturn(0);
612 }
613 
614 #undef __FUNCT__
615 #define __FUNCT__ "MatView_IS"
616 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
617 {
618   Mat_IS         *a = (Mat_IS*)A->data;
619   PetscErrorCode ierr;
620   PetscViewer    sviewer;
621 
622   PetscFunctionBegin;
623   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
624   ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
625   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
626   ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
627   PetscFunctionReturn(0);
628 }
629 
630 #undef __FUNCT__
631 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
632 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
633 {
634   PetscErrorCode ierr;
635   PetscInt       n,bs;
636   Mat_IS         *is = (Mat_IS*)A->data;
637   IS             from,to;
638   Vec            global;
639 
640   PetscFunctionBegin;
641   PetscCheckSameComm(A,1,rmapping,2);
642   if (rmapping != cmapping) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MATIS requires the row and column mappings to be identical");
643   if (is->mapping) { /* Currenly destroys the objects that will be created by this routine. Is there anything else that should be checked? */
644     ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr);
645     ierr = VecDestroy(&is->x);CHKERRQ(ierr);
646     ierr = VecDestroy(&is->y);CHKERRQ(ierr);
647     ierr = VecScatterDestroy(&is->ctx);CHKERRQ(ierr);
648     ierr = MatDestroy(&is->A);CHKERRQ(ierr);
649     ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
650     ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
651   }
652   ierr = PetscObjectReference((PetscObject)rmapping);CHKERRQ(ierr);
653   ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr);
654   is->mapping = rmapping;
655 /*
656   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
657   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
658 */
659 
660   /* Create the local matrix A */
661   ierr = ISLocalToGlobalMappingGetSize(rmapping,&n);CHKERRQ(ierr);
662   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&bs);CHKERRQ(ierr);
663   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
664   if (bs > 1) {
665     ierr = MatSetType(is->A,MATSEQBAIJ);CHKERRQ(ierr);
666   } else {
667     ierr = MatSetType(is->A,MATSEQAIJ);CHKERRQ(ierr);
668   }
669   ierr = MatSetSizes(is->A,n,n,n,n);CHKERRQ(ierr);
670   ierr = MatSetBlockSize(is->A,bs);CHKERRQ(ierr);
671   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
672   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
673   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
674 
675   /* Create the local work vectors */
676   ierr = VecCreate(PETSC_COMM_SELF,&is->x);CHKERRQ(ierr);
677   ierr = VecSetBlockSize(is->x,bs);CHKERRQ(ierr);
678   ierr = VecSetSizes(is->x,n,n);CHKERRQ(ierr);
679   ierr = VecSetOptionsPrefix(is->x,((PetscObject)A)->prefix);CHKERRQ(ierr);
680   ierr = VecAppendOptionsPrefix(is->x,"is_");CHKERRQ(ierr);
681   ierr = VecSetFromOptions(is->x);CHKERRQ(ierr);
682   ierr = VecDuplicate(is->x,&is->y);CHKERRQ(ierr);
683 
684   /* setup the global to local scatter */
685   ierr = ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);CHKERRQ(ierr);
686   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
687   ierr = MatCreateVecs(A,&global,NULL);CHKERRQ(ierr);
688   ierr = VecScatterCreate(global,from,is->x,to,&is->ctx);CHKERRQ(ierr);
689   ierr = VecDestroy(&global);CHKERRQ(ierr);
690   ierr = ISDestroy(&to);CHKERRQ(ierr);
691   ierr = ISDestroy(&from);CHKERRQ(ierr);
692   PetscFunctionReturn(0);
693 }
694 
695 #undef __FUNCT__
696 #define __FUNCT__ "MatSetValues_IS"
697 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
698 {
699   Mat_IS         *is = (Mat_IS*)mat->data;
700   PetscInt       rows_l[2048],cols_l[2048];
701   PetscErrorCode ierr;
702 
703   PetscFunctionBegin;
704 #if defined(PETSC_USE_DEBUG)
705   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
706 #endif
707   ierr = ISG2LMapApply(is->mapping,m,rows,rows_l);CHKERRQ(ierr);
708   ierr = ISG2LMapApply(is->mapping,n,cols,cols_l);CHKERRQ(ierr);
709   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
710   PetscFunctionReturn(0);
711 }
712 
713 #undef ISG2LMapSetUp
714 #undef ISG2LMapApply
715 
716 #undef __FUNCT__
717 #define __FUNCT__ "MatSetValuesLocal_IS"
718 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
719 {
720   PetscErrorCode ierr;
721   Mat_IS         *is = (Mat_IS*)A->data;
722 
723   PetscFunctionBegin;
724   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
725   PetscFunctionReturn(0);
726 }
727 
728 #undef __FUNCT__
729 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
730 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
731 {
732   PetscErrorCode ierr;
733   Mat_IS         *is = (Mat_IS*)A->data;
734 
735   PetscFunctionBegin;
736   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
737   PetscFunctionReturn(0);
738 }
739 
740 #undef __FUNCT__
741 #define __FUNCT__ "MatZeroRows_IS"
742 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
743 {
744   Mat_IS         *is = (Mat_IS*)A->data;
745   PetscInt       n_l = 0, *rows_l = NULL;
746   PetscErrorCode ierr;
747 
748   PetscFunctionBegin;
749   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
750   if (n) {
751     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
752     ierr = ISGlobalToLocalMappingApply(is->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
753   }
754   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
755   ierr = PetscFree(rows_l);CHKERRQ(ierr);
756   PetscFunctionReturn(0);
757 }
758 
759 #undef __FUNCT__
760 #define __FUNCT__ "MatZeroRowsLocal_IS"
761 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
762 {
763   Mat_IS         *is = (Mat_IS*)A->data;
764   PetscErrorCode ierr;
765   PetscInt       i;
766   PetscScalar    *array;
767 
768   PetscFunctionBegin;
769   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
770   {
771     /*
772        Set up is->x as a "counting vector". This is in order to MatMult_IS
773        work properly in the interface nodes.
774     */
775     Vec         counter;
776     PetscScalar one=1.0, zero=0.0;
777     ierr = MatCreateVecs(A,&counter,NULL);CHKERRQ(ierr);
778     ierr = VecSet(counter,zero);CHKERRQ(ierr);
779     ierr = VecSet(is->x,one);CHKERRQ(ierr);
780     ierr = VecScatterBegin(is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
781     ierr = VecScatterEnd  (is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
782     ierr = VecScatterBegin(is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
783     ierr = VecScatterEnd  (is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
784     ierr = VecDestroy(&counter);CHKERRQ(ierr);
785   }
786   if (!n) {
787     is->pure_neumann = PETSC_TRUE;
788   } else {
789     is->pure_neumann = PETSC_FALSE;
790 
791     ierr = VecGetArray(is->x,&array);CHKERRQ(ierr);
792     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
793     for (i=0; i<n; i++) {
794       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
795     }
796     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
797     ierr = MatAssemblyEnd  (is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
798     ierr = VecRestoreArray(is->x,&array);CHKERRQ(ierr);
799   }
800   PetscFunctionReturn(0);
801 }
802 
803 #undef __FUNCT__
804 #define __FUNCT__ "MatAssemblyBegin_IS"
805 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
806 {
807   Mat_IS         *is = (Mat_IS*)A->data;
808   PetscErrorCode ierr;
809 
810   PetscFunctionBegin;
811   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
812   PetscFunctionReturn(0);
813 }
814 
815 #undef __FUNCT__
816 #define __FUNCT__ "MatAssemblyEnd_IS"
817 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
818 {
819   Mat_IS         *is = (Mat_IS*)A->data;
820   PetscErrorCode ierr;
821 
822   PetscFunctionBegin;
823   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
824   PetscFunctionReturn(0);
825 }
826 
827 #undef __FUNCT__
828 #define __FUNCT__ "MatISGetLocalMat_IS"
829 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
830 {
831   Mat_IS *is = (Mat_IS*)mat->data;
832 
833   PetscFunctionBegin;
834   *local = is->A;
835   PetscFunctionReturn(0);
836 }
837 
838 #undef __FUNCT__
839 #define __FUNCT__ "MatISGetLocalMat"
840 /*@
841     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
842 
843   Input Parameter:
844 .  mat - the matrix
845 
846   Output Parameter:
847 .  local - the local matrix usually MATSEQAIJ
848 
849   Level: advanced
850 
851   Notes:
852     This can be called if you have precomputed the nonzero structure of the
853   matrix and want to provide it to the inner matrix object to improve the performance
854   of the MatSetValues() operation.
855 
856 .seealso: MATIS
857 @*/
858 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
859 {
860   PetscErrorCode ierr;
861 
862   PetscFunctionBegin;
863   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
864   PetscValidPointer(local,2);
865   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
866   PetscFunctionReturn(0);
867 }
868 
869 #undef __FUNCT__
870 #define __FUNCT__ "MatISSetLocalMat_IS"
871 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
872 {
873   Mat_IS         *is = (Mat_IS*)mat->data;
874   PetscInt       nrows,ncols,orows,ocols;
875   PetscErrorCode ierr;
876 
877   PetscFunctionBegin;
878   if (is->A) {
879     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
880     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
881     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
882   }
883   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
884   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
885   is->A = local;
886   PetscFunctionReturn(0);
887 }
888 
889 #undef __FUNCT__
890 #define __FUNCT__ "MatISSetLocalMat"
891 /*@
892     MatISSetLocalMat - Set the local matrix stored inside a MATIS matrix.
893 
894   Input Parameter:
895 .  mat - the matrix
896 .  local - the local matrix usually MATSEQAIJ
897 
898   Output Parameter:
899 
900   Level: advanced
901 
902   Notes:
903     This can be called if you have precomputed the local matrix and
904   want to provide it to the matrix object MATIS.
905 
906 .seealso: MATIS
907 @*/
908 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
909 {
910   PetscErrorCode ierr;
911 
912   PetscFunctionBegin;
913   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
914   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
915   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
916   PetscFunctionReturn(0);
917 }
918 
919 #undef __FUNCT__
920 #define __FUNCT__ "MatZeroEntries_IS"
921 PetscErrorCode MatZeroEntries_IS(Mat A)
922 {
923   Mat_IS         *a = (Mat_IS*)A->data;
924   PetscErrorCode ierr;
925 
926   PetscFunctionBegin;
927   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
928   PetscFunctionReturn(0);
929 }
930 
931 #undef __FUNCT__
932 #define __FUNCT__ "MatScale_IS"
933 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
934 {
935   Mat_IS         *is = (Mat_IS*)A->data;
936   PetscErrorCode ierr;
937 
938   PetscFunctionBegin;
939   ierr = MatScale(is->A,a);CHKERRQ(ierr);
940   PetscFunctionReturn(0);
941 }
942 
943 #undef __FUNCT__
944 #define __FUNCT__ "MatGetDiagonal_IS"
945 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
946 {
947   Mat_IS         *is = (Mat_IS*)A->data;
948   PetscErrorCode ierr;
949 
950   PetscFunctionBegin;
951   /* get diagonal of the local matrix */
952   ierr = MatGetDiagonal(is->A,is->x);CHKERRQ(ierr);
953 
954   /* scatter diagonal back into global vector */
955   ierr = VecSet(v,0);CHKERRQ(ierr);
956   ierr = VecScatterBegin(is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
957   ierr = VecScatterEnd  (is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
958   PetscFunctionReturn(0);
959 }
960 
961 #undef __FUNCT__
962 #define __FUNCT__ "MatSetOption_IS"
963 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
964 {
965   Mat_IS         *a = (Mat_IS*)A->data;
966   PetscErrorCode ierr;
967 
968   PetscFunctionBegin;
969   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
970   PetscFunctionReturn(0);
971 }
972 
973 #undef __FUNCT__
974 #define __FUNCT__ "MatCreateIS"
975 /*@
976     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
977        process but not across processes.
978 
979    Input Parameters:
980 +     comm - MPI communicator that will share the matrix
981 .     bs - local and global block size of the matrix
982 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
983 -     map - mapping that defines the global number for each local number
984 
985    Output Parameter:
986 .    A - the resulting matrix
987 
988    Level: advanced
989 
990    Notes: See MATIS for more details
991           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
992           by that process. m + nghosts (or n + nghosts) is the length of map since map maps all local points
993           plus the ghost points to global indices.
994 
995 .seealso: MATIS, MatSetLocalToGlobalMapping()
996 @*/
997 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping map,Mat *A)
998 {
999   PetscErrorCode ierr;
1000 
1001   PetscFunctionBegin;
1002   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1003   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1004   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1005   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1006   ierr = MatSetUp(*A);CHKERRQ(ierr);
1007   ierr = MatSetLocalToGlobalMapping(*A,map,map);CHKERRQ(ierr);
1008   PetscFunctionReturn(0);
1009 }
1010 
1011 /*MC
1012    MATIS - MATIS = "is" - A matrix type to be used for using the Neumann-Neumann type preconditioners, see PCBDDC.
1013    This stores the matrices in globally unassembled form. Each processor
1014    assembles only its local Neumann problem and the parallel matrix vector
1015    product is handled "implicitly".
1016 
1017    Operations Provided:
1018 +  MatMult()
1019 .  MatMultAdd()
1020 .  MatMultTranspose()
1021 .  MatMultTransposeAdd()
1022 .  MatZeroEntries()
1023 .  MatSetOption()
1024 .  MatZeroRows()
1025 .  MatZeroRowsLocal()
1026 .  MatSetValues()
1027 .  MatSetValuesLocal()
1028 .  MatScale()
1029 .  MatGetDiagonal()
1030 -  MatSetLocalToGlobalMapping()
1031 
1032    Options Database Keys:
1033 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1034 
1035    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1036 
1037           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1038 
1039           You can do matrix preallocation on the local matrix after you obtain it with
1040           MatISGetLocalMat()
1041 
1042   Level: advanced
1043 
1044 .seealso: PC, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), PCBDDC
1045 
1046 M*/
1047 
1048 #undef __FUNCT__
1049 #define __FUNCT__ "MatCreate_IS"
1050 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1051 {
1052   PetscErrorCode ierr;
1053   Mat_IS         *b;
1054 
1055   PetscFunctionBegin;
1056   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1057   A->data = (void*)b;
1058   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1059 
1060   A->ops->mult                    = MatMult_IS;
1061   A->ops->multadd                 = MatMultAdd_IS;
1062   A->ops->multtranspose           = MatMultTranspose_IS;
1063   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1064   A->ops->destroy                 = MatDestroy_IS;
1065   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1066   A->ops->setvalues               = MatSetValues_IS;
1067   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1068   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1069   A->ops->zerorows                = MatZeroRows_IS;
1070   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1071   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1072   A->ops->assemblyend             = MatAssemblyEnd_IS;
1073   A->ops->view                    = MatView_IS;
1074   A->ops->zeroentries             = MatZeroEntries_IS;
1075   A->ops->scale                   = MatScale_IS;
1076   A->ops->getdiagonal             = MatGetDiagonal_IS;
1077   A->ops->setoption               = MatSetOption_IS;
1078   A->ops->ishermitian             = MatIsHermitian_IS;
1079   A->ops->issymmetric             = MatIsSymmetric_IS;
1080   A->ops->duplicate               = MatDuplicate_IS;
1081 
1082   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1083   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1084 
1085   /* zeros pointer */
1086   b->A           = 0;
1087   b->ctx         = 0;
1088   b->x           = 0;
1089   b->y           = 0;
1090   b->mapping     = 0;
1091   b->sf          = 0;
1092   b->sf_rootdata = 0;
1093   b->sf_leafdata = 0;
1094 
1095   /* special MATIS functions */
1096   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1097   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1098   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1099   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1100   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1101   PetscFunctionReturn(0);
1102 }
1103