xref: /petsc/src/mat/impls/is/matis.c (revision b3afcdbeace4b2d63b0b5a1e28971cf80ff20f66)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "MatISSetUpSF"
19 /*@
20    MatISSetUpSF - Setup star forest object used by MatIS during MatISSetPreallocation.
21 
22    Collective on MPI_Comm
23 
24    Input Parameters:
25 +  A - the matrix
26 
27    Level: advanced
28 
29    Notes: This function does not be to be called by the user.
30 
31 .keywords: matrix
32 
33 .seealso: MatCreate(), MatCreateIS(), MatISSetPreallocation(), MatISGetLocalMat()
34 @*/
35 PetscErrorCode  MatISSetUpSF(Mat A)
36 {
37   PetscErrorCode ierr;
38 
39   PetscFunctionBegin;
40   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
41   PetscValidType(A,1);
42   ierr = PetscTryMethod(A,"MatISSetUpSF_C",(Mat),(A));CHKERRQ(ierr);
43   PetscFunctionReturn(0);
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "MatISSetUpSF_IS"
48 static PetscErrorCode MatISSetUpSF_IS(Mat B)
49 {
50   Mat_IS         *matis = (Mat_IS*)(B->data);
51   const PetscInt *gidxs;
52   PetscErrorCode ierr;
53 
54   PetscFunctionBegin;
55   if (matis->sf) {
56     PetscFunctionReturn(0);
57   }
58   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
59   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
60   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
61   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
62   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
63   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
64   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
65   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
66   PetscFunctionReturn(0);
67 }
68 
69 #undef __FUNCT__
70 #define __FUNCT__ "MatISSetPreallocation"
71 /*@
72    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
73 
74    Collective on MPI_Comm
75 
76    Input Parameters:
77 +  B - the matrix
78 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
79            (same value is used for all local rows)
80 .  d_nnz - array containing the number of nonzeros in the various rows of the
81            DIAGONAL portion of the local submatrix (possibly different for each row)
82            or NULL, if d_nz is used to specify the nonzero structure.
83            The size of this array is equal to the number of local rows, i.e 'm'.
84            For matrices that will be factored, you must leave room for (and set)
85            the diagonal entry even if it is zero.
86 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
87            submatrix (same value is used for all local rows).
88 -  o_nnz - array containing the number of nonzeros in the various rows of the
89            OFF-DIAGONAL portion of the local submatrix (possibly different for
90            each row) or NULL, if o_nz is used to specify the nonzero
91            structure. The size of this array is equal to the number
92            of local rows, i.e 'm'.
93 
94    If the *_nnz parameter is given then the *_nz parameter is ignored
95 
96    Level: intermediate
97 
98    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
99           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
100           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
101 
102 .keywords: matrix
103 
104 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
105 @*/
106 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
107 {
108   PetscErrorCode ierr;
109 
110   PetscFunctionBegin;
111   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
112   PetscValidType(B,1);
113   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
114   PetscFunctionReturn(0);
115 }
116 
117 #undef __FUNCT__
118 #define __FUNCT__ "MatISSetPreallocation_IS"
119 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
120 {
121   Mat_IS         *matis = (Mat_IS*)(B->data);
122   PetscInt       bs,i,nlocalcols;
123   PetscErrorCode ierr;
124 
125   PetscFunctionBegin;
126   if (!matis->A) {
127     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
128   }
129   ierr = MatISSetUpSF(B);CHKERRQ(ierr);
130   if (!d_nnz) {
131     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
132   } else {
133     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
134   }
135   if (!o_nnz) {
136     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
137   } else {
138     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
139   }
140   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
141   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
142   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
143   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
144   for (i=0;i<matis->sf_nleaves;i++) {
145     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
146   }
147   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
148   for (i=0;i<matis->sf_nleaves/bs;i++) {
149     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
150   }
151   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
152   for (i=0;i<matis->sf_nleaves/bs;i++) {
153     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
154   }
155   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
156   PetscFunctionReturn(0);
157 }
158 
159 #undef __FUNCT__
160 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
161 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
162 {
163   Mat_IS          *matis = (Mat_IS*)(A->data);
164   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
165   const PetscInt  *global_indices_r,*global_indices_c;
166   PetscInt        i,j,bs,rows,cols;
167   PetscInt        lrows,lcols;
168   PetscInt        local_rows,local_cols;
169   PetscMPIInt     nsubdomains;
170   PetscBool       isdense,issbaij;
171   PetscErrorCode  ierr;
172 
173   PetscFunctionBegin;
174   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
175   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
176   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
177   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
178   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
179   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
180   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
181   if (A->rmap->mapping != A->cmap->mapping) {
182     ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
183   } else {
184     global_indices_c = global_indices_r;
185   }
186 
187   if (issbaij) {
188     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
189   }
190   /*
191      An SF reduce is needed to sum up properly on shared rows.
192      Note that generally preallocation is not exact, since it overestimates nonzeros
193   */
194   ierr = MatISSetUpSF(A);CHKERRQ(ierr);
195   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
196   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
197   /* All processes need to compute entire row ownership */
198   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
199   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
200   for (i=0;i<nsubdomains;i++) {
201     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
202       row_ownership[j] = i;
203     }
204   }
205 
206   /*
207      my_dnz and my_onz contains exact contribution to preallocation from each local mat
208      then, they will be summed up properly. This way, preallocation is always sufficient
209   */
210   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
211   /* preallocation as a MATAIJ */
212   if (isdense) { /* special case for dense local matrices */
213     for (i=0;i<local_rows;i++) {
214       PetscInt index_row = global_indices_r[i];
215       for (j=i;j<local_rows;j++) {
216         PetscInt owner = row_ownership[index_row];
217         PetscInt index_col = global_indices_c[j];
218         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
219           my_dnz[i] += 1;
220         } else { /* offdiag block */
221           my_onz[i] += 1;
222         }
223         /* same as before, interchanging rows and cols */
224         if (i != j) {
225           owner = row_ownership[index_col];
226           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
227             my_dnz[j] += 1;
228           } else {
229             my_onz[j] += 1;
230           }
231         }
232       }
233     }
234   } else { /* TODO: this could be optimized using MatGetRowIJ */
235     for (i=0;i<local_rows;i++) {
236       const PetscInt *cols;
237       PetscInt       ncols,index_row = global_indices_r[i];
238       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
239       for (j=0;j<ncols;j++) {
240         PetscInt owner = row_ownership[index_row];
241         PetscInt index_col = global_indices_c[cols[j]];
242         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
243           my_dnz[i] += 1;
244         } else { /* offdiag block */
245           my_onz[i] += 1;
246         }
247         /* same as before, interchanging rows and cols */
248         if (issbaij && index_col != index_row) {
249           owner = row_ownership[index_col];
250           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
251             my_dnz[cols[j]] += 1;
252           } else {
253             my_onz[cols[j]] += 1;
254           }
255         }
256       }
257       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
258     }
259   }
260   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
261   if (global_indices_c != global_indices_r) {
262     ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
263   }
264   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
265 
266   /* Reduce my_dnz and my_onz */
267   if (maxreduce) {
268     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
269     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
270     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
271     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
272   } else {
273     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
274     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
275     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
276     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
277   }
278   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
279 
280   /* Resize preallocation if overestimated */
281   for (i=0;i<lrows;i++) {
282     dnz[i] = PetscMin(dnz[i],lcols);
283     onz[i] = PetscMin(onz[i],cols-lcols);
284   }
285   /* set preallocation */
286   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
287   for (i=0;i<lrows/bs;i++) {
288     dnz[i] = dnz[i*bs]/bs;
289     onz[i] = onz[i*bs]/bs;
290   }
291   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
292   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
293   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
294   if (issbaij) {
295     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
296   }
297   PetscFunctionReturn(0);
298 }
299 
300 #undef __FUNCT__
301 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
302 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
303 {
304   Mat_IS         *matis = (Mat_IS*)(mat->data);
305   Mat            local_mat;
306   /* info on mat */
307   PetscInt       bs,rows,cols,lrows,lcols;
308   PetscInt       local_rows,local_cols;
309   PetscBool      isdense,issbaij,isseqaij;
310   PetscMPIInt    nsubdomains;
311   /* values insertion */
312   PetscScalar    *array;
313   /* work */
314   PetscErrorCode ierr;
315 
316   PetscFunctionBegin;
317   /* get info from mat */
318   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
319   if (nsubdomains == 1) {
320     if (reuse == MAT_INITIAL_MATRIX) {
321       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
322     } else {
323       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
324     }
325     PetscFunctionReturn(0);
326   }
327   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
328   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
329   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
330   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
331   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
332   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
333   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
334 
335   if (reuse == MAT_INITIAL_MATRIX) {
336     MatType     new_mat_type;
337     PetscBool   issbaij_red;
338 
339     /* determining new matrix type */
340     ierr = MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
341     if (issbaij_red) {
342       new_mat_type = MATSBAIJ;
343     } else {
344       if (bs>1) {
345         new_mat_type = MATBAIJ;
346       } else {
347         new_mat_type = MATAIJ;
348       }
349     }
350 
351     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
352     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
353     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
354     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
355     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
356   } else {
357     PetscInt mbs,mrows,mcols,mlrows,mlcols;
358     /* some checks */
359     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
360     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
361     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
362     if (mrows != rows) {
363       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
364     }
365     if (mcols != cols) {
366       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
367     }
368     if (mlrows != lrows) {
369       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
370     }
371     if (mlcols != lcols) {
372       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
373     }
374     if (mbs != bs) {
375       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
376     }
377     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
378   }
379 
380   if (issbaij) {
381     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
382   } else {
383     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
384     local_mat = matis->A;
385   }
386 
387   /* Set values */
388   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
389   if (isdense) { /* special case for dense local matrices */
390     PetscInt i,*dummy_rows,*dummy_cols;
391 
392     if (local_rows != local_cols) {
393       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
394       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
395       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
396     } else {
397       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
398       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
399       dummy_cols = dummy_rows;
400     }
401     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
402     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
403     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
404     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
405     if (dummy_rows != dummy_cols) {
406       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
407     } else {
408       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
409     }
410   } else if (isseqaij) {
411     PetscInt  i,nvtxs,*xadj,*adjncy;
412     PetscBool done;
413 
414     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
415     if (!done) {
416       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
417     }
418     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
419     for (i=0;i<nvtxs;i++) {
420       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
421     }
422     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
423     if (!done) {
424       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
425     }
426     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
427   } else { /* very basic values insertion for all other matrix types */
428     PetscInt i;
429 
430     for (i=0;i<local_rows;i++) {
431       PetscInt       j;
432       const PetscInt *local_indices_cols;
433 
434       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
435       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
436       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
437     }
438   }
439   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
440   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
441   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
442   if (isdense) {
443     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
444   }
445   PetscFunctionReturn(0);
446 }
447 
448 #undef __FUNCT__
449 #define __FUNCT__ "MatISGetMPIXAIJ"
450 /*@
451     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
452 
453   Input Parameter:
454 .  mat - the matrix (should be of type MATIS)
455 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
456 
457   Output Parameter:
458 .  newmat - the matrix in AIJ format
459 
460   Level: developer
461 
462   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
463 
464 .seealso: MATIS
465 @*/
466 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
467 {
468   PetscErrorCode ierr;
469 
470   PetscFunctionBegin;
471   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
472   PetscValidLogicalCollectiveEnum(mat,reuse,2);
473   PetscValidPointer(newmat,3);
474   if (reuse != MAT_INITIAL_MATRIX) {
475     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
476     PetscCheckSameComm(mat,1,*newmat,3);
477     if (mat == *newmat) {
478       SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
479     }
480   }
481   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
482   PetscFunctionReturn(0);
483 }
484 
485 #undef __FUNCT__
486 #define __FUNCT__ "MatDuplicate_IS"
487 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
488 {
489   PetscErrorCode ierr;
490   Mat_IS         *matis = (Mat_IS*)(mat->data);
491   PetscInt       bs,m,n,M,N;
492   Mat            B,localmat;
493 
494   PetscFunctionBegin;
495   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
496   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
497   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
498   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
499   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
500   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
501   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
502   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
503   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
504   *newmat = B;
505   PetscFunctionReturn(0);
506 }
507 
508 #undef __FUNCT__
509 #define __FUNCT__ "MatIsHermitian_IS"
510 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
511 {
512   PetscErrorCode ierr;
513   Mat_IS         *matis = (Mat_IS*)A->data;
514   PetscBool      local_sym;
515 
516   PetscFunctionBegin;
517   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
518   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
519   PetscFunctionReturn(0);
520 }
521 
522 #undef __FUNCT__
523 #define __FUNCT__ "MatIsSymmetric_IS"
524 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
525 {
526   PetscErrorCode ierr;
527   Mat_IS         *matis = (Mat_IS*)A->data;
528   PetscBool      local_sym;
529 
530   PetscFunctionBegin;
531   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
532   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
533   PetscFunctionReturn(0);
534 }
535 
536 #undef __FUNCT__
537 #define __FUNCT__ "MatDestroy_IS"
538 PetscErrorCode MatDestroy_IS(Mat A)
539 {
540   PetscErrorCode ierr;
541   Mat_IS         *b = (Mat_IS*)A->data;
542 
543   PetscFunctionBegin;
544   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
545   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
546   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
547   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
548   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
549   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
550   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
551   ierr = PetscFree(A->data);CHKERRQ(ierr);
552   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
553   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
554   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
555   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
556   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
557   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",NULL);CHKERRQ(ierr);
558   PetscFunctionReturn(0);
559 }
560 
561 #undef __FUNCT__
562 #define __FUNCT__ "MatMult_IS"
563 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
564 {
565   PetscErrorCode ierr;
566   Mat_IS         *is  = (Mat_IS*)A->data;
567   PetscScalar    zero = 0.0;
568 
569   PetscFunctionBegin;
570   /*  scatter the global vector x into the local work vector */
571   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
572   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
573 
574   /* multiply the local matrix */
575   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
576 
577   /* scatter product back into global memory */
578   ierr = VecSet(y,zero);CHKERRQ(ierr);
579   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
580   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
581   PetscFunctionReturn(0);
582 }
583 
584 #undef __FUNCT__
585 #define __FUNCT__ "MatMultAdd_IS"
586 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
587 {
588   Vec            temp_vec;
589   PetscErrorCode ierr;
590 
591   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
592   if (v3 != v2) {
593     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
594     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
595   } else {
596     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
597     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
598     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
599     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
600     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
601   }
602   PetscFunctionReturn(0);
603 }
604 
605 #undef __FUNCT__
606 #define __FUNCT__ "MatMultTranspose_IS"
607 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
608 {
609   Mat_IS         *is = (Mat_IS*)A->data;
610   PetscErrorCode ierr;
611 
612   PetscFunctionBegin;
613   /*  scatter the global vector x into the local work vector */
614   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
615   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
616 
617   /* multiply the local matrix */
618   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
619 
620   /* scatter product back into global vector */
621   ierr = VecSet(x,0);CHKERRQ(ierr);
622   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
623   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
624   PetscFunctionReturn(0);
625 }
626 
627 #undef __FUNCT__
628 #define __FUNCT__ "MatMultTransposeAdd_IS"
629 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
630 {
631   Vec            temp_vec;
632   PetscErrorCode ierr;
633 
634   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
635   if (v3 != v2) {
636     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
637     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
638   } else {
639     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
640     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
641     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
642     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
643     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
644   }
645   PetscFunctionReturn(0);
646 }
647 
648 #undef __FUNCT__
649 #define __FUNCT__ "MatView_IS"
650 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
651 {
652   Mat_IS         *a = (Mat_IS*)A->data;
653   PetscErrorCode ierr;
654   PetscViewer    sviewer;
655 
656   PetscFunctionBegin;
657   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
658   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
659   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
660   PetscFunctionReturn(0);
661 }
662 
663 #undef __FUNCT__
664 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
665 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
666 {
667   PetscErrorCode ierr;
668   PetscInt       nr,rbs,nc,cbs;
669   Mat_IS         *is = (Mat_IS*)A->data;
670   IS             from,to;
671   Vec            cglobal,rglobal;
672 
673   PetscFunctionBegin;
674   PetscCheckSameComm(A,1,rmapping,2);
675   PetscCheckSameComm(A,1,cmapping,3);
676   /* Destroy any previous data */
677   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
678   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
679   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
680   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
681   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
682   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
683   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
684 
685   /* Setup Layout and set local to global maps */
686   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
687   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
688   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
689   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
690 
691   /* Create the local matrix A */
692   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
693   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
694   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
695   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
696   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
697   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
698   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
699   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
700   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
701   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
702   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
703 
704   /* Create the local work vectors */
705   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
706 
707   /* setup the global to local scatters */
708   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
709   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
710   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
711   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
712   if (rmapping != cmapping) {
713     ierr = ISDestroy(&to);CHKERRQ(ierr);
714     ierr = ISDestroy(&from);CHKERRQ(ierr);
715     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
716     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
717     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
718   } else {
719     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
720     is->cctx = is->rctx;
721   }
722   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
723   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
724   ierr = ISDestroy(&to);CHKERRQ(ierr);
725   ierr = ISDestroy(&from);CHKERRQ(ierr);
726   PetscFunctionReturn(0);
727 }
728 
729 #undef __FUNCT__
730 #define __FUNCT__ "MatSetValues_IS"
731 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
732 {
733   Mat_IS         *is = (Mat_IS*)mat->data;
734   PetscInt       rows_l[2048],cols_l[2048];
735   PetscErrorCode ierr;
736 
737   PetscFunctionBegin;
738 #if defined(PETSC_USE_DEBUG)
739   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
740 #endif
741   ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
742   ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
743   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
744   PetscFunctionReturn(0);
745 }
746 
747 #undef __FUNCT__
748 #define __FUNCT__ "MatSetValuesLocal_IS"
749 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
750 {
751   PetscErrorCode ierr;
752   Mat_IS         *is = (Mat_IS*)A->data;
753 
754   PetscFunctionBegin;
755   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
756   PetscFunctionReturn(0);
757 }
758 
759 #undef __FUNCT__
760 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
761 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
762 {
763   PetscErrorCode ierr;
764   Mat_IS         *is = (Mat_IS*)A->data;
765 
766   PetscFunctionBegin;
767   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
768   PetscFunctionReturn(0);
769 }
770 
771 #undef __FUNCT__
772 #define __FUNCT__ "MatZeroRows_IS"
773 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
774 {
775   PetscInt       n_l = 0, *rows_l = NULL;
776   PetscErrorCode ierr;
777 
778   PetscFunctionBegin;
779   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
780   if (n) {
781     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
782     ierr = ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
783   }
784   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
785   ierr = PetscFree(rows_l);CHKERRQ(ierr);
786   PetscFunctionReturn(0);
787 }
788 
789 #undef __FUNCT__
790 #define __FUNCT__ "MatZeroRowsLocal_IS"
791 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
792 {
793   Mat_IS         *is = (Mat_IS*)A->data;
794   PetscErrorCode ierr;
795   PetscInt       i;
796   PetscScalar    *array;
797 
798   PetscFunctionBegin;
799   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
800   {
801     /*
802        Set up is->x as a "counting vector". This is in order to MatMult_IS
803        work properly in the interface nodes.
804     */
805     Vec counter;
806     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
807     ierr = VecSet(counter,0.);CHKERRQ(ierr);
808     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
809     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
810     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
811     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
812     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
813     ierr = VecDestroy(&counter);CHKERRQ(ierr);
814   }
815   if (!n) {
816     is->pure_neumann = PETSC_TRUE;
817   } else {
818     is->pure_neumann = PETSC_FALSE;
819 
820     ierr = VecGetArray(is->y,&array);CHKERRQ(ierr);
821     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
822     for (i=0; i<n; i++) {
823       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
824     }
825     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
826     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
827     ierr = VecRestoreArray(is->y,&array);CHKERRQ(ierr);
828   }
829   PetscFunctionReturn(0);
830 }
831 
832 #undef __FUNCT__
833 #define __FUNCT__ "MatAssemblyBegin_IS"
834 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
835 {
836   Mat_IS         *is = (Mat_IS*)A->data;
837   PetscErrorCode ierr;
838 
839   PetscFunctionBegin;
840   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
841   PetscFunctionReturn(0);
842 }
843 
844 #undef __FUNCT__
845 #define __FUNCT__ "MatAssemblyEnd_IS"
846 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
847 {
848   Mat_IS         *is = (Mat_IS*)A->data;
849   PetscErrorCode ierr;
850 
851   PetscFunctionBegin;
852   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
853   PetscFunctionReturn(0);
854 }
855 
856 #undef __FUNCT__
857 #define __FUNCT__ "MatISGetLocalMat_IS"
858 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
859 {
860   Mat_IS *is = (Mat_IS*)mat->data;
861 
862   PetscFunctionBegin;
863   *local = is->A;
864   PetscFunctionReturn(0);
865 }
866 
867 #undef __FUNCT__
868 #define __FUNCT__ "MatISGetLocalMat"
869 /*@
870     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
871 
872   Input Parameter:
873 .  mat - the matrix
874 
875   Output Parameter:
876 .  local - the local matrix
877 
878   Level: advanced
879 
880   Notes:
881     This can be called if you have precomputed the nonzero structure of the
882   matrix and want to provide it to the inner matrix object to improve the performance
883   of the MatSetValues() operation.
884 
885 .seealso: MATIS
886 @*/
887 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
888 {
889   PetscErrorCode ierr;
890 
891   PetscFunctionBegin;
892   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
893   PetscValidPointer(local,2);
894   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
895   PetscFunctionReturn(0);
896 }
897 
898 #undef __FUNCT__
899 #define __FUNCT__ "MatISSetLocalMat_IS"
900 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
901 {
902   Mat_IS         *is = (Mat_IS*)mat->data;
903   PetscInt       nrows,ncols,orows,ocols;
904   PetscErrorCode ierr;
905 
906   PetscFunctionBegin;
907   if (is->A) {
908     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
909     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
910     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
911   }
912   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
913   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
914   is->A = local;
915   PetscFunctionReturn(0);
916 }
917 
918 #undef __FUNCT__
919 #define __FUNCT__ "MatISSetLocalMat"
920 /*@
921     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
922 
923   Input Parameter:
924 .  mat - the matrix
925 .  local - the local matrix
926 
927   Output Parameter:
928 
929   Level: advanced
930 
931   Notes:
932     This can be called if you have precomputed the local matrix and
933   want to provide it to the matrix object MATIS.
934 
935 .seealso: MATIS
936 @*/
937 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
938 {
939   PetscErrorCode ierr;
940 
941   PetscFunctionBegin;
942   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
943   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
944   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
945   PetscFunctionReturn(0);
946 }
947 
948 #undef __FUNCT__
949 #define __FUNCT__ "MatZeroEntries_IS"
950 PetscErrorCode MatZeroEntries_IS(Mat A)
951 {
952   Mat_IS         *a = (Mat_IS*)A->data;
953   PetscErrorCode ierr;
954 
955   PetscFunctionBegin;
956   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
957   PetscFunctionReturn(0);
958 }
959 
960 #undef __FUNCT__
961 #define __FUNCT__ "MatScale_IS"
962 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
963 {
964   Mat_IS         *is = (Mat_IS*)A->data;
965   PetscErrorCode ierr;
966 
967   PetscFunctionBegin;
968   ierr = MatScale(is->A,a);CHKERRQ(ierr);
969   PetscFunctionReturn(0);
970 }
971 
972 #undef __FUNCT__
973 #define __FUNCT__ "MatGetDiagonal_IS"
974 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
975 {
976   Mat_IS         *is = (Mat_IS*)A->data;
977   PetscErrorCode ierr;
978 
979   PetscFunctionBegin;
980   /* get diagonal of the local matrix */
981   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
982 
983   /* scatter diagonal back into global vector */
984   ierr = VecSet(v,0);CHKERRQ(ierr);
985   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
986   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
987   PetscFunctionReturn(0);
988 }
989 
990 #undef __FUNCT__
991 #define __FUNCT__ "MatSetOption_IS"
992 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
993 {
994   Mat_IS         *a = (Mat_IS*)A->data;
995   PetscErrorCode ierr;
996 
997   PetscFunctionBegin;
998   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
999   PetscFunctionReturn(0);
1000 }
1001 
1002 #undef __FUNCT__
1003 #define __FUNCT__ "MatCreateIS"
1004 /*@
1005     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
1006        process but not across processes.
1007 
1008    Input Parameters:
1009 +     comm    - MPI communicator that will share the matrix
1010 .     bs      - block size of the matrix
1011 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
1012 .     rmap    - local to global map for rows
1013 -     cmap    - local to global map for cols
1014 
1015    Output Parameter:
1016 .    A - the resulting matrix
1017 
1018    Level: advanced
1019 
1020    Notes: See MATIS for more details
1021           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
1022           by that process. The sizes of rmap and cmap define the size of the local matrices.
1023           If either rmap or cmap are NULL, than the matrix is assumed to be square
1024 
1025 .seealso: MATIS, MatSetLocalToGlobalMapping()
1026 @*/
1027 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1028 {
1029   PetscErrorCode ierr;
1030 
1031   PetscFunctionBegin;
1032   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1033   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1034   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1035   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1036   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1037   ierr = MatSetUp(*A);CHKERRQ(ierr);
1038   if (rmap && cmap) {
1039     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1040   } else if (!rmap) {
1041     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1042   } else {
1043     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1044   }
1045   PetscFunctionReturn(0);
1046 }
1047 
1048 /*MC
1049    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1050    This stores the matrices in globally unassembled form. Each processor
1051    assembles only its local Neumann problem and the parallel matrix vector
1052    product is handled "implicitly".
1053 
1054    Operations Provided:
1055 +  MatMult()
1056 .  MatMultAdd()
1057 .  MatMultTranspose()
1058 .  MatMultTransposeAdd()
1059 .  MatZeroEntries()
1060 .  MatSetOption()
1061 .  MatZeroRows()
1062 .  MatZeroRowsLocal()
1063 .  MatSetValues()
1064 .  MatSetValuesLocal()
1065 .  MatScale()
1066 .  MatGetDiagonal()
1067 -  MatSetLocalToGlobalMapping()
1068 
1069    Options Database Keys:
1070 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1071 
1072    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1073 
1074           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1075 
1076           You can do matrix preallocation on the local matrix after you obtain it with
1077           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1078 
1079   Level: advanced
1080 
1081 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC
1082 
1083 M*/
1084 
1085 #undef __FUNCT__
1086 #define __FUNCT__ "MatCreate_IS"
1087 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1088 {
1089   PetscErrorCode ierr;
1090   Mat_IS         *b;
1091 
1092   PetscFunctionBegin;
1093   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1094   A->data = (void*)b;
1095 
1096   /* matrix ops */
1097   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1098   A->ops->mult                    = MatMult_IS;
1099   A->ops->multadd                 = MatMultAdd_IS;
1100   A->ops->multtranspose           = MatMultTranspose_IS;
1101   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1102   A->ops->destroy                 = MatDestroy_IS;
1103   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1104   A->ops->setvalues               = MatSetValues_IS;
1105   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1106   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1107   A->ops->zerorows                = MatZeroRows_IS;
1108   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1109   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1110   A->ops->assemblyend             = MatAssemblyEnd_IS;
1111   A->ops->view                    = MatView_IS;
1112   A->ops->zeroentries             = MatZeroEntries_IS;
1113   A->ops->scale                   = MatScale_IS;
1114   A->ops->getdiagonal             = MatGetDiagonal_IS;
1115   A->ops->setoption               = MatSetOption_IS;
1116   A->ops->ishermitian             = MatIsHermitian_IS;
1117   A->ops->issymmetric             = MatIsSymmetric_IS;
1118   A->ops->duplicate               = MatDuplicate_IS;
1119 
1120   /* special MATIS functions */
1121   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1122   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1123   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1124   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1125   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",MatISSetUpSF_IS);CHKERRQ(ierr);
1126   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1127   PetscFunctionReturn(0);
1128 }
1129