xref: /petsc/src/mat/impls/is/matis.c (revision c20ebc76411a944c5e054aaadfeb4c47706271ee)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 #include <petsc/private/sfimpl.h>
17 
18 #undef __FUNCT__
19 #define __FUNCT__ "MatISSetUpSF"
20 /*@
21    MatISSetUpSF - Setup star forest object used by MatIS during MatISSetPreallocation.
22 
23    Collective on MPI_Comm
24 
25    Input Parameters:
26 +  A - the matrix
27 
28    Level: advanced
29 
30    Notes: This function does not be to be called by the user.
31 
32 .keywords: matrix
33 
34 .seealso: MatCreate(), MatCreateIS(), MatISSetPreallocation(), MatISGetLocalMat()
35 @*/
36 PetscErrorCode  MatISSetUpSF(Mat A)
37 {
38   PetscErrorCode ierr;
39 
40   PetscFunctionBegin;
41   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
42   PetscValidType(A,1);
43   ierr = PetscTryMethod(A,"MatISSetUpSF_C",(Mat),(A));CHKERRQ(ierr);
44   PetscFunctionReturn(0);
45 }
46 
47 #undef __FUNCT__
48 #define __FUNCT__ "MatISSetUpSF_IS"
49 static PetscErrorCode MatISSetUpSF_IS(Mat B)
50 {
51   Mat_IS         *matis = (Mat_IS*)(B->data);
52   const PetscInt *gidxs;
53   PetscInt       nleaves;
54   PetscErrorCode ierr;
55 
56   PetscFunctionBegin;
57   if (matis->sf) PetscFunctionReturn(0);
58   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
59   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
60   ierr = ISLocalToGlobalMappingGetSize(B->rmap->mapping,&nleaves);CHKERRQ(ierr);
61   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
62   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
63   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
64   ierr = PetscMalloc2(matis->sf->nroots,&matis->sf_rootdata,matis->sf->nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
65   PetscFunctionReturn(0);
66 }
67 
68 #undef __FUNCT__
69 #define __FUNCT__ "MatISSetPreallocation"
70 /*@
71    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
72 
73    Collective on MPI_Comm
74 
75    Input Parameters:
76 +  B - the matrix
77 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
78            (same value is used for all local rows)
79 .  d_nnz - array containing the number of nonzeros in the various rows of the
80            DIAGONAL portion of the local submatrix (possibly different for each row)
81            or NULL, if d_nz is used to specify the nonzero structure.
82            The size of this array is equal to the number of local rows, i.e 'm'.
83            For matrices that will be factored, you must leave room for (and set)
84            the diagonal entry even if it is zero.
85 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
86            submatrix (same value is used for all local rows).
87 -  o_nnz - array containing the number of nonzeros in the various rows of the
88            OFF-DIAGONAL portion of the local submatrix (possibly different for
89            each row) or NULL, if o_nz is used to specify the nonzero
90            structure. The size of this array is equal to the number
91            of local rows, i.e 'm'.
92 
93    If the *_nnz parameter is given then the *_nz parameter is ignored
94 
95    Level: intermediate
96 
97    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
98           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
99           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
100 
101 .keywords: matrix
102 
103 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
104 @*/
105 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
106 {
107   PetscErrorCode ierr;
108 
109   PetscFunctionBegin;
110   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
111   PetscValidType(B,1);
112   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
113   PetscFunctionReturn(0);
114 }
115 
116 #undef __FUNCT__
117 #define __FUNCT__ "MatISSetPreallocation_IS"
118 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
119 {
120   Mat_IS         *matis = (Mat_IS*)(B->data);
121   PetscInt       bs,i,nlocalcols;
122   PetscErrorCode ierr;
123 
124   PetscFunctionBegin;
125   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
126   ierr = MatISSetUpSF(B);CHKERRQ(ierr);
127 
128   if (!d_nnz) for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] = d_nz;
129   else for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
130 
131   if (!o_nnz) for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] += o_nz;
132   else for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
133 
134   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
135   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
136   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
137   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
138 
139   for (i=0;i<matis->sf->nleaves;i++) matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
140   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
141 
142   for (i=0;i<matis->sf->nleaves/bs;i++) matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
143   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
144 
145   for (i=0;i<matis->sf->nleaves/bs;i++) matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
146   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
147   PetscFunctionReturn(0);
148 }
149 
150 #undef __FUNCT__
151 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
152 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
153 {
154   Mat_IS          *matis = (Mat_IS*)(A->data);
155   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
156   const PetscInt  *global_indices_r,*global_indices_c;
157   PetscInt        i,j,bs,rows,cols;
158   PetscInt        lrows,lcols;
159   PetscInt        local_rows,local_cols;
160   PetscMPIInt     nsubdomains;
161   PetscBool       isdense,issbaij;
162   PetscErrorCode  ierr;
163 
164   PetscFunctionBegin;
165   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
166   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
167   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
168   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
169   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
170   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
171   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
172   if (A->rmap->mapping != A->cmap->mapping) {
173     ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
174   } else {
175     global_indices_c = global_indices_r;
176   }
177 
178   if (issbaij) {
179     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
180   }
181   /*
182      An SF reduce is needed to sum up properly on shared rows.
183      Note that generally preallocation is not exact, since it overestimates nonzeros
184   */
185   ierr = MatISSetUpSF(A);CHKERRQ(ierr);
186   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
187   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
188   /* All processes need to compute entire row ownership */
189   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
190   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
191   for (i=0;i<nsubdomains;i++) {
192     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
193       row_ownership[j] = i;
194     }
195   }
196 
197   /*
198      my_dnz and my_onz contains exact contribution to preallocation from each local mat
199      then, they will be summed up properly. This way, preallocation is always sufficient
200   */
201   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
202   /* preallocation as a MATAIJ */
203   if (isdense) { /* special case for dense local matrices */
204     for (i=0;i<local_rows;i++) {
205       PetscInt index_row = global_indices_r[i];
206       for (j=i;j<local_rows;j++) {
207         PetscInt owner = row_ownership[index_row];
208         PetscInt index_col = global_indices_c[j];
209         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
210           my_dnz[i] += 1;
211         } else { /* offdiag block */
212           my_onz[i] += 1;
213         }
214         /* same as before, interchanging rows and cols */
215         if (i != j) {
216           owner = row_ownership[index_col];
217           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
218             my_dnz[j] += 1;
219           } else {
220             my_onz[j] += 1;
221           }
222         }
223       }
224     }
225   } else { /* TODO: this could be optimized using MatGetRowIJ */
226     for (i=0;i<local_rows;i++) {
227       const PetscInt *cols;
228       PetscInt       ncols,index_row = global_indices_r[i];
229       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
230       for (j=0;j<ncols;j++) {
231         PetscInt owner = row_ownership[index_row];
232         PetscInt index_col = global_indices_c[cols[j]];
233         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
234           my_dnz[i] += 1;
235         } else { /* offdiag block */
236           my_onz[i] += 1;
237         }
238         /* same as before, interchanging rows and cols */
239         if (issbaij && index_col != index_row) {
240           owner = row_ownership[index_col];
241           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
242             my_dnz[cols[j]] += 1;
243           } else {
244             my_onz[cols[j]] += 1;
245           }
246         }
247       }
248       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
249     }
250   }
251   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
252   if (global_indices_c != global_indices_r) {
253     ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
254   }
255   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
256 
257   /* Reduce my_dnz and my_onz */
258   if (maxreduce) {
259     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
260     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
261     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
262     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
263   } else {
264     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
265     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
266     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
267     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
268   }
269   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
270 
271   /* Resize preallocation if overestimated */
272   for (i=0;i<lrows;i++) {
273     dnz[i] = PetscMin(dnz[i],lcols);
274     onz[i] = PetscMin(onz[i],cols-lcols);
275   }
276   /* set preallocation */
277   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
278   for (i=0;i<lrows/bs;i++) {
279     dnz[i] = dnz[i*bs]/bs;
280     onz[i] = onz[i*bs]/bs;
281   }
282   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
283   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
284   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
285   if (issbaij) {
286     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
287   }
288   PetscFunctionReturn(0);
289 }
290 
291 #undef __FUNCT__
292 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
293 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
294 {
295   Mat_IS         *matis = (Mat_IS*)(mat->data);
296   Mat            local_mat;
297   /* info on mat */
298   PetscInt       bs,rows,cols,lrows,lcols;
299   PetscInt       local_rows,local_cols;
300   PetscBool      isdense,issbaij,isseqaij;
301   PetscMPIInt    nsubdomains;
302   /* values insertion */
303   PetscScalar    *array;
304   /* work */
305   PetscErrorCode ierr;
306 
307   PetscFunctionBegin;
308   /* get info from mat */
309   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
310   if (nsubdomains == 1) {
311     if (reuse == MAT_INITIAL_MATRIX) {
312       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
313     } else {
314       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
315     }
316     PetscFunctionReturn(0);
317   }
318   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
319   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
320   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
321   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
322   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
323   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
324   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
325 
326   if (reuse == MAT_INITIAL_MATRIX) {
327     MatType     new_mat_type;
328     PetscBool   issbaij_red;
329 
330     /* determining new matrix type */
331     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
332     if (issbaij_red) {
333       new_mat_type = MATSBAIJ;
334     } else {
335       if (bs>1) {
336         new_mat_type = MATBAIJ;
337       } else {
338         new_mat_type = MATAIJ;
339       }
340     }
341 
342     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
343     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
344     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
345     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
346     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
347   } else {
348     PetscInt mbs,mrows,mcols,mlrows,mlcols;
349     /* some checks */
350     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
351     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
352     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
353     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
354     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
355     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
356     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
357     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
358     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
359   }
360 
361   if (issbaij) {
362     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
363   } else {
364     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
365     local_mat = matis->A;
366   }
367 
368   /* Set values */
369   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
370   if (isdense) { /* special case for dense local matrices */
371     PetscInt i,*dummy_rows,*dummy_cols;
372 
373     if (local_rows != local_cols) {
374       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
375       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
376       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
377     } else {
378       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
379       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
380       dummy_cols = dummy_rows;
381     }
382     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
383     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
384     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
385     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
386     if (dummy_rows != dummy_cols) {
387       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
388     } else {
389       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
390     }
391   } else if (isseqaij) {
392     PetscInt  i,nvtxs,*xadj,*adjncy;
393     PetscBool done;
394 
395     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
396     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
397     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
398     for (i=0;i<nvtxs;i++) {
399       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
400     }
401     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
402     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
403     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
404   } else { /* very basic values insertion for all other matrix types */
405     PetscInt i;
406 
407     for (i=0;i<local_rows;i++) {
408       PetscInt       j;
409       const PetscInt *local_indices_cols;
410 
411       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
412       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
413       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
414     }
415   }
416   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
417   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
418   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
419   if (isdense) {
420     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
421   }
422   PetscFunctionReturn(0);
423 }
424 
425 #undef __FUNCT__
426 #define __FUNCT__ "MatISGetMPIXAIJ"
427 /*@
428     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
429 
430   Input Parameter:
431 .  mat - the matrix (should be of type MATIS)
432 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
433 
434   Output Parameter:
435 .  newmat - the matrix in AIJ format
436 
437   Level: developer
438 
439   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
440 
441 .seealso: MATIS
442 @*/
443 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
444 {
445   PetscErrorCode ierr;
446 
447   PetscFunctionBegin;
448   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
449   PetscValidLogicalCollectiveEnum(mat,reuse,2);
450   PetscValidPointer(newmat,3);
451   if (reuse != MAT_INITIAL_MATRIX) {
452     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
453     PetscCheckSameComm(mat,1,*newmat,3);
454     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
455   }
456   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
457   PetscFunctionReturn(0);
458 }
459 
460 #undef __FUNCT__
461 #define __FUNCT__ "MatDuplicate_IS"
462 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
463 {
464   PetscErrorCode ierr;
465   Mat_IS         *matis = (Mat_IS*)(mat->data);
466   PetscInt       bs,m,n,M,N;
467   Mat            B,localmat;
468 
469   PetscFunctionBegin;
470   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
471   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
472   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
473   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
474   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
475   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
476   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
477   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
478   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
479   *newmat = B;
480   PetscFunctionReturn(0);
481 }
482 
483 #undef __FUNCT__
484 #define __FUNCT__ "MatIsHermitian_IS"
485 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
486 {
487   PetscErrorCode ierr;
488   Mat_IS         *matis = (Mat_IS*)A->data;
489   PetscBool      local_sym;
490 
491   PetscFunctionBegin;
492   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
493   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
494   PetscFunctionReturn(0);
495 }
496 
497 #undef __FUNCT__
498 #define __FUNCT__ "MatIsSymmetric_IS"
499 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
500 {
501   PetscErrorCode ierr;
502   Mat_IS         *matis = (Mat_IS*)A->data;
503   PetscBool      local_sym;
504 
505   PetscFunctionBegin;
506   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
507   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
508   PetscFunctionReturn(0);
509 }
510 
511 #undef __FUNCT__
512 #define __FUNCT__ "MatDestroy_IS"
513 PetscErrorCode MatDestroy_IS(Mat A)
514 {
515   PetscErrorCode ierr;
516   Mat_IS         *b = (Mat_IS*)A->data;
517 
518   PetscFunctionBegin;
519   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
520   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
521   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
522   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
523   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
524   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
525   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
526   ierr = PetscFree(A->data);CHKERRQ(ierr);
527   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
528   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
529   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
530   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
531   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
532   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",NULL);CHKERRQ(ierr);
533   PetscFunctionReturn(0);
534 }
535 
536 #undef __FUNCT__
537 #define __FUNCT__ "MatMult_IS"
538 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
539 {
540   PetscErrorCode ierr;
541   Mat_IS         *is  = (Mat_IS*)A->data;
542   PetscScalar    zero = 0.0;
543 
544   PetscFunctionBegin;
545   /*  scatter the global vector x into the local work vector */
546   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
547   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
548 
549   /* multiply the local matrix */
550   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
551 
552   /* scatter product back into global memory */
553   ierr = VecSet(y,zero);CHKERRQ(ierr);
554   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
555   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
556   PetscFunctionReturn(0);
557 }
558 
559 #undef __FUNCT__
560 #define __FUNCT__ "MatMultAdd_IS"
561 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
562 {
563   Vec            temp_vec;
564   PetscErrorCode ierr;
565 
566   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
567   if (v3 != v2) {
568     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
569     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
570   } else {
571     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
572     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
573     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
574     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
575     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
576   }
577   PetscFunctionReturn(0);
578 }
579 
580 #undef __FUNCT__
581 #define __FUNCT__ "MatMultTranspose_IS"
582 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
583 {
584   Mat_IS         *is = (Mat_IS*)A->data;
585   PetscErrorCode ierr;
586 
587   PetscFunctionBegin;
588   /*  scatter the global vector x into the local work vector */
589   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
590   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
591 
592   /* multiply the local matrix */
593   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
594 
595   /* scatter product back into global vector */
596   ierr = VecSet(x,0);CHKERRQ(ierr);
597   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
598   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
599   PetscFunctionReturn(0);
600 }
601 
602 #undef __FUNCT__
603 #define __FUNCT__ "MatMultTransposeAdd_IS"
604 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
605 {
606   Vec            temp_vec;
607   PetscErrorCode ierr;
608 
609   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
610   if (v3 != v2) {
611     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
612     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
613   } else {
614     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
615     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
616     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
617     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
618     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
619   }
620   PetscFunctionReturn(0);
621 }
622 
623 #undef __FUNCT__
624 #define __FUNCT__ "MatView_IS"
625 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
626 {
627   Mat_IS         *a = (Mat_IS*)A->data;
628   PetscErrorCode ierr;
629   PetscViewer    sviewer;
630   PetscBool      isascii,view = PETSC_TRUE;
631 
632   PetscFunctionBegin;
633   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr);
634   if (isascii)  {
635     PetscViewerFormat format;
636 
637     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
638     if (format == PETSC_VIEWER_ASCII_INFO) view = PETSC_FALSE;
639   }
640   if (!view) PetscFunctionReturn(0);
641   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
642   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
643   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
644   PetscFunctionReturn(0);
645 }
646 
647 #undef __FUNCT__
648 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
649 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
650 {
651   PetscErrorCode ierr;
652   PetscInt       nr,rbs,nc,cbs;
653   Mat_IS         *is = (Mat_IS*)A->data;
654   IS             from,to;
655   Vec            cglobal,rglobal;
656 
657   PetscFunctionBegin;
658   PetscCheckSameComm(A,1,rmapping,2);
659   PetscCheckSameComm(A,1,cmapping,3);
660   /* Destroy any previous data */
661   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
662   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
663   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
664   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
665   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
666   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
667   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
668 
669   /* Setup Layout and set local to global maps */
670   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
671   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
672   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
673   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
674 
675   /* Create the local matrix A */
676   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
677   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
678   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
679   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
680   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
681   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
682   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
683   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
684   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
685   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
686   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
687 
688   /* Create the local work vectors */
689   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
690 
691   /* setup the global to local scatters */
692   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
693   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
694   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
695   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
696   if (rmapping != cmapping) {
697     ierr = ISDestroy(&to);CHKERRQ(ierr);
698     ierr = ISDestroy(&from);CHKERRQ(ierr);
699     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
700     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
701     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
702   } else {
703     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
704     is->cctx = is->rctx;
705   }
706   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
707   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
708   ierr = ISDestroy(&to);CHKERRQ(ierr);
709   ierr = ISDestroy(&from);CHKERRQ(ierr);
710   PetscFunctionReturn(0);
711 }
712 
713 #undef __FUNCT__
714 #define __FUNCT__ "MatSetValues_IS"
715 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
716 {
717   Mat_IS         *is = (Mat_IS*)mat->data;
718   PetscInt       rows_l[2048],cols_l[2048];
719   PetscErrorCode ierr;
720 
721   PetscFunctionBegin;
722 #if defined(PETSC_USE_DEBUG)
723   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
724 #endif
725   ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
726   ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
727   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
728   PetscFunctionReturn(0);
729 }
730 
731 #undef __FUNCT__
732 #define __FUNCT__ "MatSetValuesLocal_IS"
733 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
734 {
735   PetscErrorCode ierr;
736   Mat_IS         *is = (Mat_IS*)A->data;
737 
738   PetscFunctionBegin;
739   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
740   PetscFunctionReturn(0);
741 }
742 
743 #undef __FUNCT__
744 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
745 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
746 {
747   PetscErrorCode ierr;
748   Mat_IS         *is = (Mat_IS*)A->data;
749 
750   PetscFunctionBegin;
751   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
752   PetscFunctionReturn(0);
753 }
754 
755 #undef __FUNCT__
756 #define __FUNCT__ "MatZeroRows_IS"
757 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
758 {
759   PetscInt       n_l = 0, *rows_l = NULL;
760   PetscErrorCode ierr;
761 
762   PetscFunctionBegin;
763   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
764   if (n) {
765     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
766     ierr = ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
767   }
768   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
769   ierr = PetscFree(rows_l);CHKERRQ(ierr);
770   PetscFunctionReturn(0);
771 }
772 
773 #undef __FUNCT__
774 #define __FUNCT__ "MatZeroRowsLocal_IS"
775 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
776 {
777   Mat_IS         *is = (Mat_IS*)A->data;
778   PetscErrorCode ierr;
779   PetscInt       i;
780   PetscScalar    *array;
781 
782   PetscFunctionBegin;
783   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
784   {
785     /*
786        Set up is->x as a "counting vector". This is in order to MatMult_IS
787        work properly in the interface nodes.
788     */
789     Vec counter;
790     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
791     ierr = VecSet(counter,0.);CHKERRQ(ierr);
792     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
793     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
794     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
795     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
796     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
797     ierr = VecDestroy(&counter);CHKERRQ(ierr);
798   }
799   if (!n) {
800     is->pure_neumann = PETSC_TRUE;
801   } else {
802     is->pure_neumann = PETSC_FALSE;
803 
804     ierr = VecGetArray(is->y,&array);CHKERRQ(ierr);
805     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
806     for (i=0; i<n; i++) {
807       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
808     }
809     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
810     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
811     ierr = VecRestoreArray(is->y,&array);CHKERRQ(ierr);
812   }
813   PetscFunctionReturn(0);
814 }
815 
816 #undef __FUNCT__
817 #define __FUNCT__ "MatAssemblyBegin_IS"
818 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
819 {
820   Mat_IS         *is = (Mat_IS*)A->data;
821   PetscErrorCode ierr;
822 
823   PetscFunctionBegin;
824   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
825   PetscFunctionReturn(0);
826 }
827 
828 #undef __FUNCT__
829 #define __FUNCT__ "MatAssemblyEnd_IS"
830 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
831 {
832   Mat_IS         *is = (Mat_IS*)A->data;
833   PetscErrorCode ierr;
834 
835   PetscFunctionBegin;
836   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
837   PetscFunctionReturn(0);
838 }
839 
840 #undef __FUNCT__
841 #define __FUNCT__ "MatISGetLocalMat_IS"
842 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
843 {
844   Mat_IS *is = (Mat_IS*)mat->data;
845 
846   PetscFunctionBegin;
847   *local = is->A;
848   PetscFunctionReturn(0);
849 }
850 
851 #undef __FUNCT__
852 #define __FUNCT__ "MatISGetLocalMat"
853 /*@
854     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
855 
856   Input Parameter:
857 .  mat - the matrix
858 
859   Output Parameter:
860 .  local - the local matrix
861 
862   Level: advanced
863 
864   Notes:
865     This can be called if you have precomputed the nonzero structure of the
866   matrix and want to provide it to the inner matrix object to improve the performance
867   of the MatSetValues() operation.
868 
869 .seealso: MATIS
870 @*/
871 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
872 {
873   PetscErrorCode ierr;
874 
875   PetscFunctionBegin;
876   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
877   PetscValidPointer(local,2);
878   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
879   PetscFunctionReturn(0);
880 }
881 
882 #undef __FUNCT__
883 #define __FUNCT__ "MatISSetLocalMat_IS"
884 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
885 {
886   Mat_IS         *is = (Mat_IS*)mat->data;
887   PetscInt       nrows,ncols,orows,ocols;
888   PetscErrorCode ierr;
889 
890   PetscFunctionBegin;
891   if (is->A) {
892     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
893     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
894     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
895   }
896   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
897   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
898   is->A = local;
899   PetscFunctionReturn(0);
900 }
901 
902 #undef __FUNCT__
903 #define __FUNCT__ "MatISSetLocalMat"
904 /*@
905     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
906 
907   Input Parameter:
908 .  mat - the matrix
909 .  local - the local matrix
910 
911   Output Parameter:
912 
913   Level: advanced
914 
915   Notes:
916     This can be called if you have precomputed the local matrix and
917   want to provide it to the matrix object MATIS.
918 
919 .seealso: MATIS
920 @*/
921 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
922 {
923   PetscErrorCode ierr;
924 
925   PetscFunctionBegin;
926   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
927   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
928   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
929   PetscFunctionReturn(0);
930 }
931 
932 #undef __FUNCT__
933 #define __FUNCT__ "MatZeroEntries_IS"
934 PetscErrorCode MatZeroEntries_IS(Mat A)
935 {
936   Mat_IS         *a = (Mat_IS*)A->data;
937   PetscErrorCode ierr;
938 
939   PetscFunctionBegin;
940   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
941   PetscFunctionReturn(0);
942 }
943 
944 #undef __FUNCT__
945 #define __FUNCT__ "MatScale_IS"
946 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
947 {
948   Mat_IS         *is = (Mat_IS*)A->data;
949   PetscErrorCode ierr;
950 
951   PetscFunctionBegin;
952   ierr = MatScale(is->A,a);CHKERRQ(ierr);
953   PetscFunctionReturn(0);
954 }
955 
956 #undef __FUNCT__
957 #define __FUNCT__ "MatGetDiagonal_IS"
958 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
959 {
960   Mat_IS         *is = (Mat_IS*)A->data;
961   PetscErrorCode ierr;
962 
963   PetscFunctionBegin;
964   /* get diagonal of the local matrix */
965   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
966 
967   /* scatter diagonal back into global vector */
968   ierr = VecSet(v,0);CHKERRQ(ierr);
969   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
970   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
971   PetscFunctionReturn(0);
972 }
973 
974 #undef __FUNCT__
975 #define __FUNCT__ "MatSetOption_IS"
976 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
977 {
978   Mat_IS         *a = (Mat_IS*)A->data;
979   PetscErrorCode ierr;
980 
981   PetscFunctionBegin;
982   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
983   PetscFunctionReturn(0);
984 }
985 
986 #undef __FUNCT__
987 #define __FUNCT__ "MatCreateIS"
988 /*@
989     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
990        process but not across processes.
991 
992    Input Parameters:
993 +     comm    - MPI communicator that will share the matrix
994 .     bs      - block size of the matrix
995 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
996 .     rmap    - local to global map for rows
997 -     cmap    - local to global map for cols
998 
999    Output Parameter:
1000 .    A - the resulting matrix
1001 
1002    Level: advanced
1003 
1004    Notes: See MATIS for more details
1005           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
1006           by that process. The sizes of rmap and cmap define the size of the local matrices.
1007           If either rmap or cmap are NULL, than the matrix is assumed to be square
1008 
1009 .seealso: MATIS, MatSetLocalToGlobalMapping()
1010 @*/
1011 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1012 {
1013   PetscErrorCode ierr;
1014 
1015   PetscFunctionBegin;
1016   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1017   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1018   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1019   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1020   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1021   ierr = MatSetUp(*A);CHKERRQ(ierr);
1022   if (rmap && cmap) {
1023     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1024   } else if (!rmap) {
1025     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1026   } else {
1027     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1028   }
1029   PetscFunctionReturn(0);
1030 }
1031 
1032 /*MC
1033    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1034    This stores the matrices in globally unassembled form. Each processor
1035    assembles only its local Neumann problem and the parallel matrix vector
1036    product is handled "implicitly".
1037 
1038    Operations Provided:
1039 +  MatMult()
1040 .  MatMultAdd()
1041 .  MatMultTranspose()
1042 .  MatMultTransposeAdd()
1043 .  MatZeroEntries()
1044 .  MatSetOption()
1045 .  MatZeroRows()
1046 .  MatZeroRowsLocal()
1047 .  MatSetValues()
1048 .  MatSetValuesLocal()
1049 .  MatScale()
1050 .  MatGetDiagonal()
1051 -  MatSetLocalToGlobalMapping()
1052 
1053    Options Database Keys:
1054 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1055 
1056    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1057 
1058           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1059 
1060           You can do matrix preallocation on the local matrix after you obtain it with
1061           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1062 
1063   Level: advanced
1064 
1065 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC
1066 
1067 M*/
1068 
1069 #undef __FUNCT__
1070 #define __FUNCT__ "MatCreate_IS"
1071 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1072 {
1073   PetscErrorCode ierr;
1074   Mat_IS         *b;
1075 
1076   PetscFunctionBegin;
1077   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1078   A->data = (void*)b;
1079 
1080   /* matrix ops */
1081   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1082   A->ops->mult                    = MatMult_IS;
1083   A->ops->multadd                 = MatMultAdd_IS;
1084   A->ops->multtranspose           = MatMultTranspose_IS;
1085   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1086   A->ops->destroy                 = MatDestroy_IS;
1087   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1088   A->ops->setvalues               = MatSetValues_IS;
1089   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1090   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1091   A->ops->zerorows                = MatZeroRows_IS;
1092   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1093   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1094   A->ops->assemblyend             = MatAssemblyEnd_IS;
1095   A->ops->view                    = MatView_IS;
1096   A->ops->zeroentries             = MatZeroEntries_IS;
1097   A->ops->scale                   = MatScale_IS;
1098   A->ops->getdiagonal             = MatGetDiagonal_IS;
1099   A->ops->setoption               = MatSetOption_IS;
1100   A->ops->ishermitian             = MatIsHermitian_IS;
1101   A->ops->issymmetric             = MatIsSymmetric_IS;
1102   A->ops->duplicate               = MatDuplicate_IS;
1103 
1104   /* special MATIS functions */
1105   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1106   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1107   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1108   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1109   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",MatISSetUpSF_IS);CHKERRQ(ierr);
1110   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1111   PetscFunctionReturn(0);
1112 }
1113