xref: /petsc/src/mat/impls/is/matis.c (revision c0962df8a5994a6ff07e429209b5729e79c2950a)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 static PetscErrorCode MatISComputeSF_Private(Mat);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatISComputeSF_Private"
21 static PetscErrorCode MatISComputeSF_Private(Mat B)
22 {
23   Mat_IS         *matis = (Mat_IS*)(B->data);
24   const PetscInt *gidxs;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
29   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
30   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
31   ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&gidxs);CHKERRQ(ierr);
32   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
33   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
34   ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&gidxs);CHKERRQ(ierr);
35   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatISSetPreallocation"
41 /*
42    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
43 
44    Collective on MPI_Comm
45 
46    Input Parameters:
47 +  B - the matrix
48 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
49            (same value is used for all local rows)
50 .  d_nnz - array containing the number of nonzeros in the various rows of the
51            DIAGONAL portion of the local submatrix (possibly different for each row)
52            or NULL, if d_nz is used to specify the nonzero structure.
53            The size of this array is equal to the number of local rows, i.e 'm'.
54            For matrices that will be factored, you must leave room for (and set)
55            the diagonal entry even if it is zero.
56 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
57            submatrix (same value is used for all local rows).
58 -  o_nnz - array containing the number of nonzeros in the various rows of the
59            OFF-DIAGONAL portion of the local submatrix (possibly different for
60            each row) or NULL, if o_nz is used to specify the nonzero
61            structure. The size of this array is equal to the number
62            of local rows, i.e 'm'.
63 
64    If the *_nnz parameter is given then the *_nz parameter is ignored
65 
66    Level: intermediate
67 
68    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
69           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
70           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
71 
72 .keywords: matrix
73 
74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
75 @*/
76 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
77 {
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
82   PetscValidType(B,1);
83   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatISSetPreallocation_IS"
89 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
90 {
91   Mat_IS         *matis = (Mat_IS*)(B->data);
92   PetscInt       bs,i,nlocalcols;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   if (!matis->A) {
97     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
98   }
99   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
100     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
101   }
102   if (!d_nnz) {
103     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
104   } else {
105     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
106   }
107   if (!o_nnz) {
108     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
109   } else {
110     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
111   }
112   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
113   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
114   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
115   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
116   for (i=0;i<matis->sf_nleaves;i++) {
117     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
118   }
119   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
120   for (i=0;i<matis->sf_nleaves/bs;i++) {
121     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
122   }
123   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
124   for (i=0;i<matis->sf_nleaves/bs;i++) {
125     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
126   }
127   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
133 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
134 {
135   Mat_IS          *matis = (Mat_IS*)(A->data);
136   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
137   const PetscInt* global_indices;
138   PetscInt        i,j,bs,rows,cols;
139   PetscInt        lrows,lcols;
140   PetscInt        local_rows,local_cols;
141   PetscMPIInt     nsubdomains;
142   PetscBool       isdense,issbaij;
143   PetscErrorCode  ierr;
144 
145   PetscFunctionBegin;
146   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
147   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
148   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
149   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
150   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
151   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
152   ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&global_indices);CHKERRQ(ierr);
153   if (issbaij) {
154     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
155   }
156   /*
157      An SF reduce is needed to sum up properly on shared interface dofs.
158      Note that generally preallocation is not exact, since it overestimates nonzeros
159   */
160   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
161     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
162   }
163   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
164   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
165   /* All processes need to compute entire row ownership */
166   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
167   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
168   for (i=0;i<nsubdomains;i++) {
169     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
170       row_ownership[j]=i;
171     }
172   }
173 
174   /*
175      my_dnz and my_onz contains exact contribution to preallocation from each local mat
176      then, they will be summed up properly. This way, preallocation is always sufficient
177   */
178   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
179   /* preallocation as a MATAIJ */
180   if (isdense) { /* special case for dense local matrices */
181     for (i=0;i<local_rows;i++) {
182       PetscInt index_row = global_indices[i];
183       for (j=i;j<local_rows;j++) {
184         PetscInt owner = row_ownership[index_row];
185         PetscInt index_col = global_indices[j];
186         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
187           my_dnz[i] += 1;
188         } else { /* offdiag block */
189           my_onz[i] += 1;
190         }
191         /* same as before, interchanging rows and cols */
192         if (i != j) {
193           owner = row_ownership[index_col];
194           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
195             my_dnz[j] += 1;
196           } else {
197             my_onz[j] += 1;
198           }
199         }
200       }
201     }
202   } else {
203     for (i=0;i<local_rows;i++) {
204       const PetscInt *cols;
205       PetscInt       ncols,index_row = global_indices[i];
206       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
207       for (j=0;j<ncols;j++) {
208         PetscInt owner = row_ownership[index_row];
209         PetscInt index_col = global_indices[cols[j]];
210         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
211           my_dnz[i] += 1;
212         } else { /* offdiag block */
213           my_onz[i] += 1;
214         }
215         /* same as before, interchanging rows and cols */
216         if (issbaij) {
217           owner = row_ownership[index_col];
218           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
219             my_dnz[j] += 1;
220           } else {
221             my_onz[j] += 1;
222           }
223         }
224       }
225       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
226     }
227   }
228   ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&global_indices);CHKERRQ(ierr);
229   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
230   if (maxreduce) {
231     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
232     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
233     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
234     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
235   } else {
236     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
237     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
238     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
239     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
240   }
241   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
242 
243   /* Resize preallocation if overestimated */
244   for (i=0;i<lrows;i++) {
245     dnz[i] = PetscMin(dnz[i],lcols);
246     onz[i] = PetscMin(onz[i],cols-lcols);
247   }
248   /* set preallocation */
249   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
250   for (i=0;i<lrows/bs;i++) {
251     dnz[i] = dnz[i*bs]/bs;
252     onz[i] = onz[i*bs]/bs;
253   }
254   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
255   for (i=0;i<lrows/bs;i++) {
256     dnz[i] = dnz[i]-i;
257   }
258   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
259   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
260   if (issbaij) {
261     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
262   }
263   PetscFunctionReturn(0);
264 }
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
268 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
269 {
270   Mat_IS                 *matis = (Mat_IS*)(mat->data);
271   /* info on mat */
272   /* ISLocalToGlobalMapping rmapping,cmapping; */
273   PetscInt               bs,rows,cols;
274   PetscInt               local_rows,local_cols;
275   PetscBool              isdense,issbaij,isseqaij;
276   const PetscInt         *global_indices_rows;
277   PetscMPIInt            nsubdomains;
278   /* values insertion */
279   PetscScalar            *array;
280   /* work */
281   PetscErrorCode         ierr;
282 
283   PetscFunctionBegin;
284   /* MISSING CHECKS
285     - rectangular case not covered (it is not allowed by MATIS)
286   */
287   /* get info from mat */
288   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
289   if (nsubdomains == 1) {
290     if (reuse == MAT_INITIAL_MATRIX) {
291       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
292     } else {
293       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
294     }
295     PetscFunctionReturn(0);
296   }
297   /* ierr = MatGetLocalToGlobalMapping(mat,&rmapping,&cmapping);CHKERRQ(ierr); */
298   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
299   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
300   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
301   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
302   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
303   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
304 
305   /* map indices of local mat rows to global values */
306   ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&global_indices_rows);CHKERRQ(ierr);
307 
308   if (issbaij) {
309     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
310   }
311 
312   if (reuse == MAT_INITIAL_MATRIX) {
313     MatType     new_mat_type;
314     PetscBool   issbaij_red;
315 
316     /* determining new matrix type */
317     ierr = MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
318     if (issbaij_red) {
319       new_mat_type = MATSBAIJ;
320     } else {
321       if (bs>1) {
322         new_mat_type = MATBAIJ;
323       } else {
324         new_mat_type = MATAIJ;
325       }
326     }
327 
328     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
329     ierr = MatSetSizes(*M,PETSC_DECIDE,PETSC_DECIDE,rows,cols);CHKERRQ(ierr);
330     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
331     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
332     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
333   } else {
334     PetscInt mbs,mrows,mcols;
335     /* some checks */
336     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
337     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
338     if (mrows != rows) {
339       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
340     }
341     if (mrows != rows) {
342       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
343     }
344     if (mbs != bs) {
345       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
346     }
347     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
348   }
349   /* set local to global mappings */
350   /* ierr = MatSetLocalToGlobalMapping(*M,matis->mapping,matis->mapping);CHKERRQ(ierr); */
351 
352   /* Set values */
353   if (isdense) { /* special case for dense local matrices */
354     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
355     ierr = MatDenseGetArray(matis->A,&array);CHKERRQ(ierr);
356     ierr = MatSetValues(*M,local_rows,global_indices_rows,local_cols,global_indices_rows,array,ADD_VALUES);CHKERRQ(ierr);
357     ierr = MatDenseRestoreArray(matis->A,&array);CHKERRQ(ierr);
358   } else if (isseqaij) {
359     PetscInt  i,nvtxs,*xadj,*adjncy,*global_indices_cols;
360     PetscBool done;
361 
362     ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
363     if (!done) {
364       SETERRQ1(PetscObjectComm((PetscObject)matis->A),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
365     }
366     ierr = MatSeqAIJGetArray(matis->A,&array);CHKERRQ(ierr);
367     ierr = PetscMalloc1(xadj[nvtxs],&global_indices_cols);CHKERRQ(ierr);
368     ierr = ISLocalToGlobalMappingApply(matis->mapping,xadj[nvtxs],adjncy,global_indices_cols);CHKERRQ(ierr);
369     for (i=0;i<nvtxs;i++) {
370       PetscInt    row,*cols,ncols;
371       PetscScalar *mat_vals;
372 
373       row = global_indices_rows[i];
374       ncols = xadj[i+1]-xadj[i];
375       cols = global_indices_cols+xadj[i];
376       mat_vals = array+xadj[i];
377       ierr = MatSetValues(*M,1,&row,ncols,cols,mat_vals,ADD_VALUES);CHKERRQ(ierr);
378     }
379     ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
380     if (!done) {
381       SETERRQ1(PetscObjectComm((PetscObject)matis->A),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
382     }
383     ierr = MatSeqAIJRestoreArray(matis->A,&array);CHKERRQ(ierr);
384     ierr = PetscFree(global_indices_cols);CHKERRQ(ierr);
385   } else { /* very basic values insertion for all other matrix types */
386     PetscInt i,*global_indices_cols;
387 
388     ierr = PetscMalloc1(local_cols,&global_indices_cols);CHKERRQ(ierr);
389     for (i=0;i<local_rows;i++) {
390       PetscInt       j;
391       const PetscInt *local_indices;
392 
393       ierr = MatGetRow(matis->A,i,&j,&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr);
394       ierr = ISLocalToGlobalMappingApply(matis->mapping,j,local_indices,global_indices_cols);CHKERRQ(ierr);
395       ierr = MatSetValues(*M,1,&global_indices_rows[i],j,global_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
396       ierr = MatRestoreRow(matis->A,i,&j,&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr);
397     }
398     ierr = PetscFree(global_indices_cols);CHKERRQ(ierr);
399   }
400   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
401   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
402   if (isdense) {
403     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
404   }
405   if (issbaij) {
406     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
407   }
408   ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&global_indices_rows);CHKERRQ(ierr);
409   PetscFunctionReturn(0);
410 }
411 
412 #undef __FUNCT__
413 #define __FUNCT__ "MatISGetMPIXAIJ"
414 /*@
415     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
416 
417   Input Parameter:
418 .  mat - the matrix (should be of type MATIS)
419 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
420 
421   Output Parameter:
422 .  newmat - the matrix in AIJ format
423 
424   Level: developer
425 
426   Notes:
427 
428 .seealso: MATIS
429 @*/
430 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
431 {
432   PetscErrorCode ierr;
433 
434   PetscFunctionBegin;
435   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
436   PetscValidLogicalCollectiveEnum(mat,reuse,2);
437   PetscValidPointer(newmat,3);
438   if (reuse != MAT_INITIAL_MATRIX) {
439     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
440     PetscCheckSameComm(mat,1,*newmat,3);
441   }
442   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
443   PetscFunctionReturn(0);
444 }
445 
446 #undef __FUNCT__
447 #define __FUNCT__ "MatDuplicate_IS"
448 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
449 {
450   PetscErrorCode ierr;
451   Mat_IS         *matis = (Mat_IS*)(mat->data);
452   PetscInt       bs,m,n,M,N;
453   Mat            B,localmat;
454 
455   PetscFunctionBegin;
456   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
457   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
458   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
459   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,matis->mapping,&B);CHKERRQ(ierr);
460   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
461   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
462   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
463   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
464   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
465   *newmat = B;
466   PetscFunctionReturn(0);
467 }
468 
469 #undef __FUNCT__
470 #define __FUNCT__ "MatIsHermitian_IS"
471 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
472 {
473   PetscErrorCode ierr;
474   Mat_IS         *matis = (Mat_IS*)A->data;
475   PetscBool      local_sym;
476 
477   PetscFunctionBegin;
478   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
479   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 
483 #undef __FUNCT__
484 #define __FUNCT__ "MatIsSymmetric_IS"
485 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
486 {
487   PetscErrorCode ierr;
488   Mat_IS         *matis = (Mat_IS*)A->data;
489   PetscBool      local_sym;
490 
491   PetscFunctionBegin;
492   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
493   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
494   PetscFunctionReturn(0);
495 }
496 
497 #undef __FUNCT__
498 #define __FUNCT__ "MatDestroy_IS"
499 PetscErrorCode MatDestroy_IS(Mat A)
500 {
501   PetscErrorCode ierr;
502   Mat_IS         *b = (Mat_IS*)A->data;
503 
504   PetscFunctionBegin;
505   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
506   ierr = VecScatterDestroy(&b->ctx);CHKERRQ(ierr);
507   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
508   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
509   ierr = ISLocalToGlobalMappingDestroy(&b->mapping);CHKERRQ(ierr);
510   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
511   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
512   ierr = PetscFree(A->data);CHKERRQ(ierr);
513   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
514   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
515   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
516   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
517   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
518   PetscFunctionReturn(0);
519 }
520 
521 #undef __FUNCT__
522 #define __FUNCT__ "MatMult_IS"
523 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
524 {
525   PetscErrorCode ierr;
526   Mat_IS         *is  = (Mat_IS*)A->data;
527   PetscScalar    zero = 0.0;
528 
529   PetscFunctionBegin;
530   /*  scatter the global vector x into the local work vector */
531   ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
532   ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
533 
534   /* multiply the local matrix */
535   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
536 
537   /* scatter product back into global memory */
538   ierr = VecSet(y,zero);CHKERRQ(ierr);
539   ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
540   ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
541   PetscFunctionReturn(0);
542 }
543 
544 #undef __FUNCT__
545 #define __FUNCT__ "MatMultAdd_IS"
546 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
547 {
548   Vec            temp_vec;
549   PetscErrorCode ierr;
550 
551   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
552   if (v3 != v2) {
553     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
554     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
555   } else {
556     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
557     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
558     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
559     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
560     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
561   }
562   PetscFunctionReturn(0);
563 }
564 
565 #undef __FUNCT__
566 #define __FUNCT__ "MatMultTranspose_IS"
567 PetscErrorCode MatMultTranspose_IS(Mat A,Vec x,Vec y)
568 {
569   Mat_IS         *is = (Mat_IS*)A->data;
570   PetscErrorCode ierr;
571 
572   PetscFunctionBegin; /*  y = A' * x */
573   /*  scatter the global vector x into the local work vector */
574   ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
575   ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
576 
577   /* multiply the local matrix */
578   ierr = MatMultTranspose(is->A,is->x,is->y);CHKERRQ(ierr);
579 
580   /* scatter product back into global vector */
581   ierr = VecSet(y,0);CHKERRQ(ierr);
582   ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
583   ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
584   PetscFunctionReturn(0);
585 }
586 
587 #undef __FUNCT__
588 #define __FUNCT__ "MatMultTransposeAdd_IS"
589 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
590 {
591   Vec            temp_vec;
592   PetscErrorCode ierr;
593 
594   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
595   if (v3 != v2) {
596     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
597     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
598   } else {
599     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
600     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
601     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
602     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
603     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
604   }
605   PetscFunctionReturn(0);
606 }
607 
608 #undef __FUNCT__
609 #define __FUNCT__ "MatView_IS"
610 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
611 {
612   Mat_IS         *a = (Mat_IS*)A->data;
613   PetscErrorCode ierr;
614   PetscViewer    sviewer;
615 
616   PetscFunctionBegin;
617   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
618   ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
619   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
620   ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
621   PetscFunctionReturn(0);
622 }
623 
624 #undef __FUNCT__
625 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
626 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
627 {
628   PetscErrorCode ierr;
629   PetscInt       n,bs;
630   Mat_IS         *is = (Mat_IS*)A->data;
631   IS             from,to;
632   Vec            global;
633 
634   PetscFunctionBegin;
635   PetscCheckSameComm(A,1,rmapping,2);
636   if (rmapping != cmapping) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MATIS requires the row and column mappings to be identical");
637   if (is->mapping) { /* Currenly destroys the objects that will be created by this routine. Is there anything else that should be checked? */
638     ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr);
639     ierr = VecDestroy(&is->x);CHKERRQ(ierr);
640     ierr = VecDestroy(&is->y);CHKERRQ(ierr);
641     ierr = VecScatterDestroy(&is->ctx);CHKERRQ(ierr);
642     ierr = MatDestroy(&is->A);CHKERRQ(ierr);
643     ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
644     ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
645   }
646   ierr = PetscObjectReference((PetscObject)rmapping);CHKERRQ(ierr);
647   ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr);
648   is->mapping = rmapping;
649 /*
650   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
651   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
652 */
653 
654   /* Create the local matrix A */
655   ierr = ISLocalToGlobalMappingGetSize(rmapping,&n);CHKERRQ(ierr);
656   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&bs);CHKERRQ(ierr);
657   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
658   if (bs > 1) {
659     ierr = MatSetType(is->A,MATSEQBAIJ);CHKERRQ(ierr);
660   } else {
661     ierr = MatSetType(is->A,MATSEQAIJ);CHKERRQ(ierr);
662   }
663   ierr = MatSetSizes(is->A,n,n,n,n);CHKERRQ(ierr);
664   ierr = MatSetBlockSize(is->A,bs);CHKERRQ(ierr);
665   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
666   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
667   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
668 
669   /* Create the local work vectors */
670   ierr = VecCreate(PETSC_COMM_SELF,&is->x);CHKERRQ(ierr);
671   ierr = VecSetBlockSize(is->x,bs);CHKERRQ(ierr);
672   ierr = VecSetSizes(is->x,n,n);CHKERRQ(ierr);
673   ierr = VecSetOptionsPrefix(is->x,((PetscObject)A)->prefix);CHKERRQ(ierr);
674   ierr = VecAppendOptionsPrefix(is->x,"is_");CHKERRQ(ierr);
675   ierr = VecSetFromOptions(is->x);CHKERRQ(ierr);
676   ierr = VecDuplicate(is->x,&is->y);CHKERRQ(ierr);
677 
678   /* setup the global to local scatter */
679   ierr = ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);CHKERRQ(ierr);
680   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
681   ierr = MatCreateVecs(A,&global,NULL);CHKERRQ(ierr);
682   ierr = VecScatterCreate(global,from,is->x,to,&is->ctx);CHKERRQ(ierr);
683   ierr = VecDestroy(&global);CHKERRQ(ierr);
684   ierr = ISDestroy(&to);CHKERRQ(ierr);
685   ierr = ISDestroy(&from);CHKERRQ(ierr);
686   PetscFunctionReturn(0);
687 }
688 
689 #undef __FUNCT__
690 #define __FUNCT__ "MatSetValues_IS"
691 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
692 {
693   Mat_IS         *is = (Mat_IS*)mat->data;
694   PetscInt       rows_l[2048],cols_l[2048];
695   PetscErrorCode ierr;
696 
697   PetscFunctionBegin;
698 #if defined(PETSC_USE_DEBUG)
699   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
700 #endif
701   ierr = ISG2LMapApply(is->mapping,m,rows,rows_l);CHKERRQ(ierr);
702   ierr = ISG2LMapApply(is->mapping,n,cols,cols_l);CHKERRQ(ierr);
703   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
704   PetscFunctionReturn(0);
705 }
706 
707 #undef ISG2LMapSetUp
708 #undef ISG2LMapApply
709 
710 #undef __FUNCT__
711 #define __FUNCT__ "MatSetValuesLocal_IS"
712 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
713 {
714   PetscErrorCode ierr;
715   Mat_IS         *is = (Mat_IS*)A->data;
716 
717   PetscFunctionBegin;
718   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
719   PetscFunctionReturn(0);
720 }
721 
722 #undef __FUNCT__
723 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
724 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
725 {
726   PetscErrorCode ierr;
727   Mat_IS         *is = (Mat_IS*)A->data;
728 
729   PetscFunctionBegin;
730   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
731   PetscFunctionReturn(0);
732 }
733 
734 #undef __FUNCT__
735 #define __FUNCT__ "MatZeroRows_IS"
736 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
737 {
738   Mat_IS         *is = (Mat_IS*)A->data;
739   PetscInt       n_l = 0, *rows_l = NULL;
740   PetscErrorCode ierr;
741 
742   PetscFunctionBegin;
743   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
744   if (n) {
745     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
746     ierr = ISGlobalToLocalMappingApply(is->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
747   }
748   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
749   ierr = PetscFree(rows_l);CHKERRQ(ierr);
750   PetscFunctionReturn(0);
751 }
752 
753 #undef __FUNCT__
754 #define __FUNCT__ "MatZeroRowsLocal_IS"
755 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
756 {
757   Mat_IS         *is = (Mat_IS*)A->data;
758   PetscErrorCode ierr;
759   PetscInt       i;
760   PetscScalar    *array;
761 
762   PetscFunctionBegin;
763   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
764   {
765     /*
766        Set up is->x as a "counting vector". This is in order to MatMult_IS
767        work properly in the interface nodes.
768     */
769     Vec         counter;
770     PetscScalar one=1.0, zero=0.0;
771     ierr = MatCreateVecs(A,&counter,NULL);CHKERRQ(ierr);
772     ierr = VecSet(counter,zero);CHKERRQ(ierr);
773     ierr = VecSet(is->x,one);CHKERRQ(ierr);
774     ierr = VecScatterBegin(is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
775     ierr = VecScatterEnd  (is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
776     ierr = VecScatterBegin(is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
777     ierr = VecScatterEnd  (is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
778     ierr = VecDestroy(&counter);CHKERRQ(ierr);
779   }
780   if (!n) {
781     is->pure_neumann = PETSC_TRUE;
782   } else {
783     is->pure_neumann = PETSC_FALSE;
784 
785     ierr = VecGetArray(is->x,&array);CHKERRQ(ierr);
786     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
787     for (i=0; i<n; i++) {
788       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
789     }
790     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
791     ierr = MatAssemblyEnd  (is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
792     ierr = VecRestoreArray(is->x,&array);CHKERRQ(ierr);
793   }
794   PetscFunctionReturn(0);
795 }
796 
797 #undef __FUNCT__
798 #define __FUNCT__ "MatAssemblyBegin_IS"
799 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
800 {
801   Mat_IS         *is = (Mat_IS*)A->data;
802   PetscErrorCode ierr;
803 
804   PetscFunctionBegin;
805   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
806   PetscFunctionReturn(0);
807 }
808 
809 #undef __FUNCT__
810 #define __FUNCT__ "MatAssemblyEnd_IS"
811 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
812 {
813   Mat_IS         *is = (Mat_IS*)A->data;
814   PetscErrorCode ierr;
815 
816   PetscFunctionBegin;
817   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
818   PetscFunctionReturn(0);
819 }
820 
821 #undef __FUNCT__
822 #define __FUNCT__ "MatISGetLocalMat_IS"
823 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
824 {
825   Mat_IS *is = (Mat_IS*)mat->data;
826 
827   PetscFunctionBegin;
828   *local = is->A;
829   PetscFunctionReturn(0);
830 }
831 
832 #undef __FUNCT__
833 #define __FUNCT__ "MatISGetLocalMat"
834 /*@
835     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
836 
837   Input Parameter:
838 .  mat - the matrix
839 
840   Output Parameter:
841 .  local - the local matrix usually MATSEQAIJ
842 
843   Level: advanced
844 
845   Notes:
846     This can be called if you have precomputed the nonzero structure of the
847   matrix and want to provide it to the inner matrix object to improve the performance
848   of the MatSetValues() operation.
849 
850 .seealso: MATIS
851 @*/
852 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
853 {
854   PetscErrorCode ierr;
855 
856   PetscFunctionBegin;
857   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
858   PetscValidPointer(local,2);
859   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
860   PetscFunctionReturn(0);
861 }
862 
863 #undef __FUNCT__
864 #define __FUNCT__ "MatISSetLocalMat_IS"
865 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
866 {
867   Mat_IS         *is = (Mat_IS*)mat->data;
868   PetscInt       nrows,ncols,orows,ocols;
869   PetscErrorCode ierr;
870 
871   PetscFunctionBegin;
872   if (is->A) {
873     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
874     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
875     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
876   }
877   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
878   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
879   is->A = local;
880   PetscFunctionReturn(0);
881 }
882 
883 #undef __FUNCT__
884 #define __FUNCT__ "MatISSetLocalMat"
885 /*@
886     MatISSetLocalMat - Set the local matrix stored inside a MATIS matrix.
887 
888   Input Parameter:
889 .  mat - the matrix
890 .  local - the local matrix usually MATSEQAIJ
891 
892   Output Parameter:
893 
894   Level: advanced
895 
896   Notes:
897     This can be called if you have precomputed the local matrix and
898   want to provide it to the matrix object MATIS.
899 
900 .seealso: MATIS
901 @*/
902 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
903 {
904   PetscErrorCode ierr;
905 
906   PetscFunctionBegin;
907   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
908   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
909   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
910   PetscFunctionReturn(0);
911 }
912 
913 #undef __FUNCT__
914 #define __FUNCT__ "MatZeroEntries_IS"
915 PetscErrorCode MatZeroEntries_IS(Mat A)
916 {
917   Mat_IS         *a = (Mat_IS*)A->data;
918   PetscErrorCode ierr;
919 
920   PetscFunctionBegin;
921   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
922   PetscFunctionReturn(0);
923 }
924 
925 #undef __FUNCT__
926 #define __FUNCT__ "MatScale_IS"
927 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
928 {
929   Mat_IS         *is = (Mat_IS*)A->data;
930   PetscErrorCode ierr;
931 
932   PetscFunctionBegin;
933   ierr = MatScale(is->A,a);CHKERRQ(ierr);
934   PetscFunctionReturn(0);
935 }
936 
937 #undef __FUNCT__
938 #define __FUNCT__ "MatGetDiagonal_IS"
939 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
940 {
941   Mat_IS         *is = (Mat_IS*)A->data;
942   PetscErrorCode ierr;
943 
944   PetscFunctionBegin;
945   /* get diagonal of the local matrix */
946   ierr = MatGetDiagonal(is->A,is->x);CHKERRQ(ierr);
947 
948   /* scatter diagonal back into global vector */
949   ierr = VecSet(v,0);CHKERRQ(ierr);
950   ierr = VecScatterBegin(is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
951   ierr = VecScatterEnd  (is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
952   PetscFunctionReturn(0);
953 }
954 
955 #undef __FUNCT__
956 #define __FUNCT__ "MatSetOption_IS"
957 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
958 {
959   Mat_IS         *a = (Mat_IS*)A->data;
960   PetscErrorCode ierr;
961 
962   PetscFunctionBegin;
963   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
964   PetscFunctionReturn(0);
965 }
966 
967 #undef __FUNCT__
968 #define __FUNCT__ "MatCreateIS"
969 /*@
970     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
971        process but not across processes.
972 
973    Input Parameters:
974 +     comm - MPI communicator that will share the matrix
975 .     bs - local and global block size of the matrix
976 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
977 -     map - mapping that defines the global number for each local number
978 
979    Output Parameter:
980 .    A - the resulting matrix
981 
982    Level: advanced
983 
984    Notes: See MATIS for more details
985           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
986           by that process. m + nghosts (or n + nghosts) is the length of map since map maps all local points
987           plus the ghost points to global indices.
988 
989 .seealso: MATIS, MatSetLocalToGlobalMapping()
990 @*/
991 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping map,Mat *A)
992 {
993   PetscErrorCode ierr;
994 
995   PetscFunctionBegin;
996   ierr = MatCreate(comm,A);CHKERRQ(ierr);
997   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
998   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
999   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1000   ierr = MatSetUp(*A);CHKERRQ(ierr);
1001   ierr = MatSetLocalToGlobalMapping(*A,map,map);CHKERRQ(ierr);
1002   PetscFunctionReturn(0);
1003 }
1004 
1005 /*MC
1006    MATIS - MATIS = "is" - A matrix type to be used for using the Neumann-Neumann type preconditioners, see PCBDDC.
1007    This stores the matrices in globally unassembled form. Each processor
1008    assembles only its local Neumann problem and the parallel matrix vector
1009    product is handled "implicitly".
1010 
1011    Operations Provided:
1012 +  MatMult()
1013 .  MatMultAdd()
1014 .  MatMultTranspose()
1015 .  MatMultTransposeAdd()
1016 .  MatZeroEntries()
1017 .  MatSetOption()
1018 .  MatZeroRows()
1019 .  MatZeroRowsLocal()
1020 .  MatSetValues()
1021 .  MatSetValuesLocal()
1022 .  MatScale()
1023 .  MatGetDiagonal()
1024 -  MatSetLocalToGlobalMapping()
1025 
1026    Options Database Keys:
1027 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1028 
1029    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1030 
1031           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1032 
1033           You can do matrix preallocation on the local matrix after you obtain it with
1034           MatISGetLocalMat()
1035 
1036   Level: advanced
1037 
1038 .seealso: PC, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), PCBDDC
1039 
1040 M*/
1041 
1042 #undef __FUNCT__
1043 #define __FUNCT__ "MatCreate_IS"
1044 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1045 {
1046   PetscErrorCode ierr;
1047   Mat_IS         *b;
1048 
1049   PetscFunctionBegin;
1050   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1051   A->data = (void*)b;
1052   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1053 
1054   A->ops->mult                    = MatMult_IS;
1055   A->ops->multadd                 = MatMultAdd_IS;
1056   A->ops->multtranspose           = MatMultTranspose_IS;
1057   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1058   A->ops->destroy                 = MatDestroy_IS;
1059   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1060   A->ops->setvalues               = MatSetValues_IS;
1061   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1062   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1063   A->ops->zerorows                = MatZeroRows_IS;
1064   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1065   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1066   A->ops->assemblyend             = MatAssemblyEnd_IS;
1067   A->ops->view                    = MatView_IS;
1068   A->ops->zeroentries             = MatZeroEntries_IS;
1069   A->ops->scale                   = MatScale_IS;
1070   A->ops->getdiagonal             = MatGetDiagonal_IS;
1071   A->ops->setoption               = MatSetOption_IS;
1072   A->ops->ishermitian             = MatIsHermitian_IS;
1073   A->ops->issymmetric             = MatIsSymmetric_IS;
1074   A->ops->duplicate               = MatDuplicate_IS;
1075 
1076   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1077   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1078 
1079   /* zeros pointer */
1080   b->A           = 0;
1081   b->ctx         = 0;
1082   b->x           = 0;
1083   b->y           = 0;
1084   b->mapping     = 0;
1085   b->sf          = 0;
1086   b->sf_rootdata = 0;
1087   b->sf_leafdata = 0;
1088 
1089   /* special MATIS functions */
1090   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1091   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1092   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1093   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1094   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1095   PetscFunctionReturn(0);
1096 }
1097