xref: /petsc/src/mat/impls/is/matis.c (revision 4f2d7caf62749777129f8e1f39d8a03cd073aa5c)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 #include <petsc/private/sfimpl.h>
17 
18 #undef __FUNCT__
19 #define __FUNCT__ "MatISSetUpSF"
20 /*@
21    MatISSetUpSF - Setup star forest object used by MatIS during MatISSetPreallocation.
22 
23    Collective on MPI_Comm
24 
25    Input Parameters:
26 +  A - the matrix
27 
28    Level: advanced
29 
30    Notes: This function does not be to be called by the user.
31 
32 .keywords: matrix
33 
34 .seealso: MatCreate(), MatCreateIS(), MatISSetPreallocation(), MatISGetLocalMat()
35 @*/
36 PetscErrorCode  MatISSetUpSF(Mat A)
37 {
38   PetscErrorCode ierr;
39 
40   PetscFunctionBegin;
41   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
42   PetscValidType(A,1);
43   ierr = PetscTryMethod(A,"MatISSetUpSF_C",(Mat),(A));CHKERRQ(ierr);
44   PetscFunctionReturn(0);
45 }
46 
47 #undef __FUNCT__
48 #define __FUNCT__ "MatISSetUpSF_IS"
49 static PetscErrorCode MatISSetUpSF_IS(Mat B)
50 {
51   Mat_IS         *matis = (Mat_IS*)(B->data);
52   const PetscInt *gidxs;
53   PetscInt       nleaves;
54   PetscErrorCode ierr;
55 
56   PetscFunctionBegin;
57   if (matis->sf) PetscFunctionReturn(0);
58   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
59   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
60   ierr = ISLocalToGlobalMappingGetSize(B->rmap->mapping,&nleaves);CHKERRQ(ierr);
61   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
62   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
63   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
64   ierr = PetscMalloc2(matis->sf->nroots,&matis->sf_rootdata,matis->sf->nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
65   PetscFunctionReturn(0);
66 }
67 
68 #undef __FUNCT__
69 #define __FUNCT__ "MatISSetPreallocation"
70 /*@
71    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
72 
73    Collective on MPI_Comm
74 
75    Input Parameters:
76 +  B - the matrix
77 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
78            (same value is used for all local rows)
79 .  d_nnz - array containing the number of nonzeros in the various rows of the
80            DIAGONAL portion of the local submatrix (possibly different for each row)
81            or NULL, if d_nz is used to specify the nonzero structure.
82            The size of this array is equal to the number of local rows, i.e 'm'.
83            For matrices that will be factored, you must leave room for (and set)
84            the diagonal entry even if it is zero.
85 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
86            submatrix (same value is used for all local rows).
87 -  o_nnz - array containing the number of nonzeros in the various rows of the
88            OFF-DIAGONAL portion of the local submatrix (possibly different for
89            each row) or NULL, if o_nz is used to specify the nonzero
90            structure. The size of this array is equal to the number
91            of local rows, i.e 'm'.
92 
93    If the *_nnz parameter is given then the *_nz parameter is ignored
94 
95    Level: intermediate
96 
97    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
98           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
99           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
100 
101 .keywords: matrix
102 
103 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
104 @*/
105 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
106 {
107   PetscErrorCode ierr;
108 
109   PetscFunctionBegin;
110   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
111   PetscValidType(B,1);
112   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
113   PetscFunctionReturn(0);
114 }
115 
116 #undef __FUNCT__
117 #define __FUNCT__ "MatISSetPreallocation_IS"
118 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
119 {
120   Mat_IS         *matis = (Mat_IS*)(B->data);
121   PetscInt       bs,i,nlocalcols;
122   PetscErrorCode ierr;
123 
124   PetscFunctionBegin;
125   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
126   ierr = MatISSetUpSF(B);CHKERRQ(ierr);
127 
128   if (!d_nnz) for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] = d_nz;
129   else for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
130 
131   if (!o_nnz) for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] += o_nz;
132   else for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
133 
134   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
135   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
136   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
137   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
138 
139   for (i=0;i<matis->sf->nleaves;i++) matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
140   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
141 
142   for (i=0;i<matis->sf->nleaves/bs;i++) matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
143   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
144 
145   for (i=0;i<matis->sf->nleaves/bs;i++) matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
146   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
147   PetscFunctionReturn(0);
148 }
149 
150 #undef __FUNCT__
151 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
152 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
153 {
154   Mat_IS          *matis = (Mat_IS*)(A->data);
155   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
156   const PetscInt  *global_indices_r,*global_indices_c;
157   PetscInt        i,j,bs,rows,cols;
158   PetscInt        lrows,lcols;
159   PetscInt        local_rows,local_cols;
160   PetscMPIInt     nsubdomains;
161   PetscBool       isdense,issbaij;
162   PetscErrorCode  ierr;
163 
164   PetscFunctionBegin;
165   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
166   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
167   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
168   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
169   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
170   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
171   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
172   if (A->rmap->mapping != A->cmap->mapping) {
173     ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
174   } else {
175     global_indices_c = global_indices_r;
176   }
177 
178   if (issbaij) {
179     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
180   }
181   /*
182      An SF reduce is needed to sum up properly on shared rows.
183      Note that generally preallocation is not exact, since it overestimates nonzeros
184   */
185   ierr = MatISSetUpSF(A);CHKERRQ(ierr);
186   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
187   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
188   /* All processes need to compute entire row ownership */
189   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
190   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
191   for (i=0;i<nsubdomains;i++) {
192     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
193       row_ownership[j] = i;
194     }
195   }
196 
197   /*
198      my_dnz and my_onz contains exact contribution to preallocation from each local mat
199      then, they will be summed up properly. This way, preallocation is always sufficient
200   */
201   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
202   /* preallocation as a MATAIJ */
203   if (isdense) { /* special case for dense local matrices */
204     for (i=0;i<local_rows;i++) {
205       PetscInt index_row = global_indices_r[i];
206       for (j=i;j<local_rows;j++) {
207         PetscInt owner = row_ownership[index_row];
208         PetscInt index_col = global_indices_c[j];
209         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
210           my_dnz[i] += 1;
211         } else { /* offdiag block */
212           my_onz[i] += 1;
213         }
214         /* same as before, interchanging rows and cols */
215         if (i != j) {
216           owner = row_ownership[index_col];
217           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
218             my_dnz[j] += 1;
219           } else {
220             my_onz[j] += 1;
221           }
222         }
223       }
224     }
225   } else { /* TODO: this could be optimized using MatGetRowIJ */
226     for (i=0;i<local_rows;i++) {
227       const PetscInt *cols;
228       PetscInt       ncols,index_row = global_indices_r[i];
229       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
230       for (j=0;j<ncols;j++) {
231         PetscInt owner = row_ownership[index_row];
232         PetscInt index_col = global_indices_c[cols[j]];
233         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
234           my_dnz[i] += 1;
235         } else { /* offdiag block */
236           my_onz[i] += 1;
237         }
238         /* same as before, interchanging rows and cols */
239         if (issbaij && index_col != index_row) {
240           owner = row_ownership[index_col];
241           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
242             my_dnz[cols[j]] += 1;
243           } else {
244             my_onz[cols[j]] += 1;
245           }
246         }
247       }
248       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
249     }
250   }
251   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
252   if (global_indices_c != global_indices_r) {
253     ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
254   }
255   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
256 
257   /* Reduce my_dnz and my_onz */
258   if (maxreduce) {
259     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
260     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
261     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
262     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
263   } else {
264     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
265     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
266     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
267     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
268   }
269   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
270 
271   /* Resize preallocation if overestimated */
272   for (i=0;i<lrows;i++) {
273     dnz[i] = PetscMin(dnz[i],lcols);
274     onz[i] = PetscMin(onz[i],cols-lcols);
275   }
276   /* set preallocation */
277   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
278   for (i=0;i<lrows/bs;i++) {
279     dnz[i] = dnz[i*bs]/bs;
280     onz[i] = onz[i*bs]/bs;
281   }
282   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
283   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
284   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
285   if (issbaij) {
286     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
287   }
288   PetscFunctionReturn(0);
289 }
290 
291 #undef __FUNCT__
292 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
293 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
294 {
295   Mat_IS         *matis = (Mat_IS*)(mat->data);
296   Mat            local_mat;
297   /* info on mat */
298   PetscInt       bs,rows,cols,lrows,lcols;
299   PetscInt       local_rows,local_cols;
300   PetscBool      isdense,issbaij,isseqaij;
301   PetscMPIInt    nsubdomains;
302   /* values insertion */
303   PetscScalar    *array;
304   /* work */
305   PetscErrorCode ierr;
306 
307   PetscFunctionBegin;
308   /* get info from mat */
309   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
310   if (nsubdomains == 1) {
311     if (reuse == MAT_INITIAL_MATRIX) {
312       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
313     } else {
314       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
315     }
316     PetscFunctionReturn(0);
317   }
318   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
319   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
320   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
321   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
322   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
323   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
324   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
325 
326   if (reuse == MAT_INITIAL_MATRIX) {
327     MatType     new_mat_type;
328     PetscBool   issbaij_red;
329 
330     /* determining new matrix type */
331     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
332     if (issbaij_red) {
333       new_mat_type = MATSBAIJ;
334     } else {
335       if (bs>1) {
336         new_mat_type = MATBAIJ;
337       } else {
338         new_mat_type = MATAIJ;
339       }
340     }
341 
342     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
343     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
344     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
345     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
346     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
347   } else {
348     PetscInt mbs,mrows,mcols,mlrows,mlcols;
349     /* some checks */
350     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
351     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
352     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
353     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
354     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
355     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
356     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
357     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
358     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
359   }
360 
361   if (issbaij) {
362     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
363   } else {
364     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
365     local_mat = matis->A;
366   }
367 
368   /* Set values */
369   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
370   if (isdense) { /* special case for dense local matrices */
371     PetscInt i,*dummy_rows,*dummy_cols;
372 
373     if (local_rows != local_cols) {
374       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
375       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
376       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
377     } else {
378       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
379       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
380       dummy_cols = dummy_rows;
381     }
382     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
383     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
384     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
385     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
386     if (dummy_rows != dummy_cols) {
387       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
388     } else {
389       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
390     }
391   } else if (isseqaij) {
392     PetscInt  i,nvtxs,*xadj,*adjncy;
393     PetscBool done;
394 
395     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
396     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
397     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
398     for (i=0;i<nvtxs;i++) {
399       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
400     }
401     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
402     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
403     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
404   } else { /* very basic values insertion for all other matrix types */
405     PetscInt i;
406 
407     for (i=0;i<local_rows;i++) {
408       PetscInt       j;
409       const PetscInt *local_indices_cols;
410 
411       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
412       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
413       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
414     }
415   }
416   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
417   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
418   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
419   if (isdense) {
420     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
421   }
422   PetscFunctionReturn(0);
423 }
424 
425 #undef __FUNCT__
426 #define __FUNCT__ "MatISGetMPIXAIJ"
427 /*@
428     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
429 
430   Input Parameter:
431 .  mat - the matrix (should be of type MATIS)
432 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
433 
434   Output Parameter:
435 .  newmat - the matrix in AIJ format
436 
437   Level: developer
438 
439   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
440 
441 .seealso: MATIS
442 @*/
443 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
444 {
445   PetscErrorCode ierr;
446 
447   PetscFunctionBegin;
448   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
449   PetscValidLogicalCollectiveEnum(mat,reuse,2);
450   PetscValidPointer(newmat,3);
451   if (reuse != MAT_INITIAL_MATRIX) {
452     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
453     PetscCheckSameComm(mat,1,*newmat,3);
454     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
455   }
456   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
457   PetscFunctionReturn(0);
458 }
459 
460 #undef __FUNCT__
461 #define __FUNCT__ "MatDuplicate_IS"
462 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
463 {
464   PetscErrorCode ierr;
465   Mat_IS         *matis = (Mat_IS*)(mat->data);
466   PetscInt       bs,m,n,M,N;
467   Mat            B,localmat;
468 
469   PetscFunctionBegin;
470   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
471   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
472   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
473   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
474   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
475   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
476   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
477   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
478   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
479   *newmat = B;
480   PetscFunctionReturn(0);
481 }
482 
483 #undef __FUNCT__
484 #define __FUNCT__ "MatIsHermitian_IS"
485 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
486 {
487   PetscErrorCode ierr;
488   Mat_IS         *matis = (Mat_IS*)A->data;
489   PetscBool      local_sym;
490 
491   PetscFunctionBegin;
492   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
493   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
494   PetscFunctionReturn(0);
495 }
496 
497 #undef __FUNCT__
498 #define __FUNCT__ "MatIsSymmetric_IS"
499 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
500 {
501   PetscErrorCode ierr;
502   Mat_IS         *matis = (Mat_IS*)A->data;
503   PetscBool      local_sym;
504 
505   PetscFunctionBegin;
506   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
507   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
508   PetscFunctionReturn(0);
509 }
510 
511 #undef __FUNCT__
512 #define __FUNCT__ "MatDestroy_IS"
513 PetscErrorCode MatDestroy_IS(Mat A)
514 {
515   PetscErrorCode ierr;
516   Mat_IS         *b = (Mat_IS*)A->data;
517 
518   PetscFunctionBegin;
519   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
520   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
521   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
522   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
523   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
524   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
525   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
526   ierr = PetscFree(A->data);CHKERRQ(ierr);
527   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
528   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
529   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
530   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
531   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
532   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",NULL);CHKERRQ(ierr);
533   PetscFunctionReturn(0);
534 }
535 
536 #undef __FUNCT__
537 #define __FUNCT__ "MatMult_IS"
538 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
539 {
540   PetscErrorCode ierr;
541   Mat_IS         *is  = (Mat_IS*)A->data;
542   PetscScalar    zero = 0.0;
543 
544   PetscFunctionBegin;
545   /*  scatter the global vector x into the local work vector */
546   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
547   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
548 
549   /* multiply the local matrix */
550   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
551 
552   /* scatter product back into global memory */
553   ierr = VecSet(y,zero);CHKERRQ(ierr);
554   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
555   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
556   PetscFunctionReturn(0);
557 }
558 
559 #undef __FUNCT__
560 #define __FUNCT__ "MatMultAdd_IS"
561 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
562 {
563   Vec            temp_vec;
564   PetscErrorCode ierr;
565 
566   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
567   if (v3 != v2) {
568     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
569     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
570   } else {
571     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
572     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
573     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
574     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
575     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
576   }
577   PetscFunctionReturn(0);
578 }
579 
580 #undef __FUNCT__
581 #define __FUNCT__ "MatMultTranspose_IS"
582 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
583 {
584   Mat_IS         *is = (Mat_IS*)A->data;
585   PetscErrorCode ierr;
586 
587   PetscFunctionBegin;
588   /*  scatter the global vector x into the local work vector */
589   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
590   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
591 
592   /* multiply the local matrix */
593   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
594 
595   /* scatter product back into global vector */
596   ierr = VecSet(x,0);CHKERRQ(ierr);
597   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
598   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
599   PetscFunctionReturn(0);
600 }
601 
602 #undef __FUNCT__
603 #define __FUNCT__ "MatMultTransposeAdd_IS"
604 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
605 {
606   Vec            temp_vec;
607   PetscErrorCode ierr;
608 
609   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
610   if (v3 != v2) {
611     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
612     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
613   } else {
614     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
615     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
616     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
617     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
618     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
619   }
620   PetscFunctionReturn(0);
621 }
622 
623 #undef __FUNCT__
624 #define __FUNCT__ "MatView_IS"
625 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
626 {
627   Mat_IS         *a = (Mat_IS*)A->data;
628   PetscErrorCode ierr;
629   PetscViewer    sviewer;
630 
631   PetscFunctionBegin;
632   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
633   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
634   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
635   PetscFunctionReturn(0);
636 }
637 
638 #undef __FUNCT__
639 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
640 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
641 {
642   PetscErrorCode ierr;
643   PetscInt       nr,rbs,nc,cbs;
644   Mat_IS         *is = (Mat_IS*)A->data;
645   IS             from,to;
646   Vec            cglobal,rglobal;
647 
648   PetscFunctionBegin;
649   PetscCheckSameComm(A,1,rmapping,2);
650   PetscCheckSameComm(A,1,cmapping,3);
651   /* Destroy any previous data */
652   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
653   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
654   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
655   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
656   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
657   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
658   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
659 
660   /* Setup Layout and set local to global maps */
661   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
662   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
663   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
664   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
665 
666   /* Create the local matrix A */
667   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
668   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
669   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
670   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
671   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
672   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
673   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
674   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
675   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
676   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
677   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
678 
679   /* Create the local work vectors */
680   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
681 
682   /* setup the global to local scatters */
683   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
684   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
685   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
686   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
687   if (rmapping != cmapping) {
688     ierr = ISDestroy(&to);CHKERRQ(ierr);
689     ierr = ISDestroy(&from);CHKERRQ(ierr);
690     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
691     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
692     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
693   } else {
694     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
695     is->cctx = is->rctx;
696   }
697   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
698   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
699   ierr = ISDestroy(&to);CHKERRQ(ierr);
700   ierr = ISDestroy(&from);CHKERRQ(ierr);
701   PetscFunctionReturn(0);
702 }
703 
704 #undef __FUNCT__
705 #define __FUNCT__ "MatSetValues_IS"
706 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
707 {
708   Mat_IS         *is = (Mat_IS*)mat->data;
709   PetscInt       rows_l[2048],cols_l[2048];
710   PetscErrorCode ierr;
711 
712   PetscFunctionBegin;
713 #if defined(PETSC_USE_DEBUG)
714   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
715 #endif
716   ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
717   ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
718   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
719   PetscFunctionReturn(0);
720 }
721 
722 #undef __FUNCT__
723 #define __FUNCT__ "MatSetValuesLocal_IS"
724 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
725 {
726   PetscErrorCode ierr;
727   Mat_IS         *is = (Mat_IS*)A->data;
728 
729   PetscFunctionBegin;
730   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
731   PetscFunctionReturn(0);
732 }
733 
734 #undef __FUNCT__
735 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
736 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
737 {
738   PetscErrorCode ierr;
739   Mat_IS         *is = (Mat_IS*)A->data;
740 
741   PetscFunctionBegin;
742   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
743   PetscFunctionReturn(0);
744 }
745 
746 #undef __FUNCT__
747 #define __FUNCT__ "MatZeroRows_IS"
748 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
749 {
750   PetscInt       n_l = 0, *rows_l = NULL;
751   PetscErrorCode ierr;
752 
753   PetscFunctionBegin;
754   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
755   if (n) {
756     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
757     ierr = ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
758   }
759   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
760   ierr = PetscFree(rows_l);CHKERRQ(ierr);
761   PetscFunctionReturn(0);
762 }
763 
764 #undef __FUNCT__
765 #define __FUNCT__ "MatZeroRowsLocal_IS"
766 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
767 {
768   Mat_IS         *is = (Mat_IS*)A->data;
769   PetscErrorCode ierr;
770   PetscInt       i;
771   PetscScalar    *array;
772 
773   PetscFunctionBegin;
774   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
775   {
776     /*
777        Set up is->x as a "counting vector". This is in order to MatMult_IS
778        work properly in the interface nodes.
779     */
780     Vec counter;
781     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
782     ierr = VecSet(counter,0.);CHKERRQ(ierr);
783     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
784     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
785     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
786     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
787     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
788     ierr = VecDestroy(&counter);CHKERRQ(ierr);
789   }
790   if (!n) {
791     is->pure_neumann = PETSC_TRUE;
792   } else {
793     is->pure_neumann = PETSC_FALSE;
794 
795     ierr = VecGetArray(is->y,&array);CHKERRQ(ierr);
796     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
797     for (i=0; i<n; i++) {
798       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
799     }
800     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
801     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
802     ierr = VecRestoreArray(is->y,&array);CHKERRQ(ierr);
803   }
804   PetscFunctionReturn(0);
805 }
806 
807 #undef __FUNCT__
808 #define __FUNCT__ "MatAssemblyBegin_IS"
809 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
810 {
811   Mat_IS         *is = (Mat_IS*)A->data;
812   PetscErrorCode ierr;
813 
814   PetscFunctionBegin;
815   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
816   PetscFunctionReturn(0);
817 }
818 
819 #undef __FUNCT__
820 #define __FUNCT__ "MatAssemblyEnd_IS"
821 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
822 {
823   Mat_IS         *is = (Mat_IS*)A->data;
824   PetscErrorCode ierr;
825 
826   PetscFunctionBegin;
827   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
828   PetscFunctionReturn(0);
829 }
830 
831 #undef __FUNCT__
832 #define __FUNCT__ "MatISGetLocalMat_IS"
833 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
834 {
835   Mat_IS *is = (Mat_IS*)mat->data;
836 
837   PetscFunctionBegin;
838   *local = is->A;
839   PetscFunctionReturn(0);
840 }
841 
842 #undef __FUNCT__
843 #define __FUNCT__ "MatISGetLocalMat"
844 /*@
845     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
846 
847   Input Parameter:
848 .  mat - the matrix
849 
850   Output Parameter:
851 .  local - the local matrix
852 
853   Level: advanced
854 
855   Notes:
856     This can be called if you have precomputed the nonzero structure of the
857   matrix and want to provide it to the inner matrix object to improve the performance
858   of the MatSetValues() operation.
859 
860 .seealso: MATIS
861 @*/
862 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
863 {
864   PetscErrorCode ierr;
865 
866   PetscFunctionBegin;
867   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
868   PetscValidPointer(local,2);
869   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
870   PetscFunctionReturn(0);
871 }
872 
873 #undef __FUNCT__
874 #define __FUNCT__ "MatISSetLocalMat_IS"
875 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
876 {
877   Mat_IS         *is = (Mat_IS*)mat->data;
878   PetscInt       nrows,ncols,orows,ocols;
879   PetscErrorCode ierr;
880 
881   PetscFunctionBegin;
882   if (is->A) {
883     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
884     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
885     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
886   }
887   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
888   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
889   is->A = local;
890   PetscFunctionReturn(0);
891 }
892 
893 #undef __FUNCT__
894 #define __FUNCT__ "MatISSetLocalMat"
895 /*@
896     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
897 
898   Input Parameter:
899 .  mat - the matrix
900 .  local - the local matrix
901 
902   Output Parameter:
903 
904   Level: advanced
905 
906   Notes:
907     This can be called if you have precomputed the local matrix and
908   want to provide it to the matrix object MATIS.
909 
910 .seealso: MATIS
911 @*/
912 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
913 {
914   PetscErrorCode ierr;
915 
916   PetscFunctionBegin;
917   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
918   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
919   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
920   PetscFunctionReturn(0);
921 }
922 
923 #undef __FUNCT__
924 #define __FUNCT__ "MatZeroEntries_IS"
925 PetscErrorCode MatZeroEntries_IS(Mat A)
926 {
927   Mat_IS         *a = (Mat_IS*)A->data;
928   PetscErrorCode ierr;
929 
930   PetscFunctionBegin;
931   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 #undef __FUNCT__
936 #define __FUNCT__ "MatScale_IS"
937 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
938 {
939   Mat_IS         *is = (Mat_IS*)A->data;
940   PetscErrorCode ierr;
941 
942   PetscFunctionBegin;
943   ierr = MatScale(is->A,a);CHKERRQ(ierr);
944   PetscFunctionReturn(0);
945 }
946 
947 #undef __FUNCT__
948 #define __FUNCT__ "MatGetDiagonal_IS"
949 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
950 {
951   Mat_IS         *is = (Mat_IS*)A->data;
952   PetscErrorCode ierr;
953 
954   PetscFunctionBegin;
955   /* get diagonal of the local matrix */
956   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
957 
958   /* scatter diagonal back into global vector */
959   ierr = VecSet(v,0);CHKERRQ(ierr);
960   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
961   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
962   PetscFunctionReturn(0);
963 }
964 
965 #undef __FUNCT__
966 #define __FUNCT__ "MatSetOption_IS"
967 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
968 {
969   Mat_IS         *a = (Mat_IS*)A->data;
970   PetscErrorCode ierr;
971 
972   PetscFunctionBegin;
973   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
974   PetscFunctionReturn(0);
975 }
976 
977 #undef __FUNCT__
978 #define __FUNCT__ "MatCreateIS"
979 /*@
980     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
981        process but not across processes.
982 
983    Input Parameters:
984 +     comm    - MPI communicator that will share the matrix
985 .     bs      - block size of the matrix
986 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
987 .     rmap    - local to global map for rows
988 -     cmap    - local to global map for cols
989 
990    Output Parameter:
991 .    A - the resulting matrix
992 
993    Level: advanced
994 
995    Notes: See MATIS for more details
996           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
997           by that process. The sizes of rmap and cmap define the size of the local matrices.
998           If either rmap or cmap are NULL, than the matrix is assumed to be square
999 
1000 .seealso: MATIS, MatSetLocalToGlobalMapping()
1001 @*/
1002 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1003 {
1004   PetscErrorCode ierr;
1005 
1006   PetscFunctionBegin;
1007   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1008   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1009   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1010   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1011   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1012   ierr = MatSetUp(*A);CHKERRQ(ierr);
1013   if (rmap && cmap) {
1014     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1015   } else if (!rmap) {
1016     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1017   } else {
1018     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1019   }
1020   PetscFunctionReturn(0);
1021 }
1022 
1023 /*MC
1024    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1025    This stores the matrices in globally unassembled form. Each processor
1026    assembles only its local Neumann problem and the parallel matrix vector
1027    product is handled "implicitly".
1028 
1029    Operations Provided:
1030 +  MatMult()
1031 .  MatMultAdd()
1032 .  MatMultTranspose()
1033 .  MatMultTransposeAdd()
1034 .  MatZeroEntries()
1035 .  MatSetOption()
1036 .  MatZeroRows()
1037 .  MatZeroRowsLocal()
1038 .  MatSetValues()
1039 .  MatSetValuesLocal()
1040 .  MatScale()
1041 .  MatGetDiagonal()
1042 -  MatSetLocalToGlobalMapping()
1043 
1044    Options Database Keys:
1045 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1046 
1047    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1048 
1049           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1050 
1051           You can do matrix preallocation on the local matrix after you obtain it with
1052           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1053 
1054   Level: advanced
1055 
1056 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC
1057 
1058 M*/
1059 
1060 #undef __FUNCT__
1061 #define __FUNCT__ "MatCreate_IS"
1062 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1063 {
1064   PetscErrorCode ierr;
1065   Mat_IS         *b;
1066 
1067   PetscFunctionBegin;
1068   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1069   A->data = (void*)b;
1070 
1071   /* matrix ops */
1072   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1073   A->ops->mult                    = MatMult_IS;
1074   A->ops->multadd                 = MatMultAdd_IS;
1075   A->ops->multtranspose           = MatMultTranspose_IS;
1076   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1077   A->ops->destroy                 = MatDestroy_IS;
1078   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1079   A->ops->setvalues               = MatSetValues_IS;
1080   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1081   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1082   A->ops->zerorows                = MatZeroRows_IS;
1083   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1084   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1085   A->ops->assemblyend             = MatAssemblyEnd_IS;
1086   A->ops->view                    = MatView_IS;
1087   A->ops->zeroentries             = MatZeroEntries_IS;
1088   A->ops->scale                   = MatScale_IS;
1089   A->ops->getdiagonal             = MatGetDiagonal_IS;
1090   A->ops->setoption               = MatSetOption_IS;
1091   A->ops->ishermitian             = MatIsHermitian_IS;
1092   A->ops->issymmetric             = MatIsSymmetric_IS;
1093   A->ops->duplicate               = MatDuplicate_IS;
1094 
1095   /* special MATIS functions */
1096   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1097   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1098   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1099   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1100   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",MatISSetUpSF_IS);CHKERRQ(ierr);
1101   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1102   PetscFunctionReturn(0);
1103 }
1104