xref: /petsc/src/mat/impls/is/matis.c (revision 65066ba5555fc053c3b9a5584fceec17514b3fd8)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 static PetscErrorCode MatISComputeSF_Private(Mat);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatISComputeSF_Private"
21 static PetscErrorCode MatISComputeSF_Private(Mat B)
22 {
23   Mat_IS         *matis = (Mat_IS*)(B->data);
24   const PetscInt *gidxs;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
29   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
30   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
31   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
32   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
33   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
34   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
35   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatISSetPreallocation"
41 /*@
42    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
43 
44    Collective on MPI_Comm
45 
46    Input Parameters:
47 +  B - the matrix
48 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
49            (same value is used for all local rows)
50 .  d_nnz - array containing the number of nonzeros in the various rows of the
51            DIAGONAL portion of the local submatrix (possibly different for each row)
52            or NULL, if d_nz is used to specify the nonzero structure.
53            The size of this array is equal to the number of local rows, i.e 'm'.
54            For matrices that will be factored, you must leave room for (and set)
55            the diagonal entry even if it is zero.
56 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
57            submatrix (same value is used for all local rows).
58 -  o_nnz - array containing the number of nonzeros in the various rows of the
59            OFF-DIAGONAL portion of the local submatrix (possibly different for
60            each row) or NULL, if o_nz is used to specify the nonzero
61            structure. The size of this array is equal to the number
62            of local rows, i.e 'm'.
63 
64    If the *_nnz parameter is given then the *_nz parameter is ignored
65 
66    Level: intermediate
67 
68    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
69           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
70           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
71 
72 .keywords: matrix
73 
74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
75 @*/
76 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
77 {
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
82   PetscValidType(B,1);
83   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatISSetPreallocation_IS"
89 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
90 {
91   Mat_IS         *matis = (Mat_IS*)(B->data);
92   PetscInt       bs,i,nlocalcols;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
97   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
98     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
99   }
100   if (!d_nnz) {
101     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
102   } else {
103     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
104   }
105   if (!o_nnz) {
106     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
107   } else {
108     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
109   }
110   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
111   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
112   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
113   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
114   for (i=0;i<matis->sf_nleaves;i++) {
115     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
116   }
117   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
118   for (i=0;i<matis->sf_nleaves/bs;i++) {
119     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
120   }
121   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
122   for (i=0;i<matis->sf_nleaves/bs;i++) {
123     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
124   }
125   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
126   PetscFunctionReturn(0);
127 }
128 
129 #undef __FUNCT__
130 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
131 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
132 {
133   Mat_IS          *matis = (Mat_IS*)(A->data);
134   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
135   const PetscInt  *global_indices_r,*global_indices_c;
136   PetscInt        i,j,bs,rows,cols;
137   PetscInt        lrows,lcols;
138   PetscInt        local_rows,local_cols;
139   PetscMPIInt     nsubdomains;
140   PetscBool       isdense,issbaij;
141   PetscErrorCode  ierr;
142 
143   PetscFunctionBegin;
144   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
145   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
146   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
147   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
148   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
149   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
150   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
151   if (A->rmap->mapping != A->cmap->mapping) {
152     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
153   } else {
154     global_indices_c = global_indices_r;
155   }
156 
157   if (issbaij) {
158     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
159   }
160   /*
161      An SF reduce is needed to sum up properly on shared rows.
162      Note that generally preallocation is not exact, since it overestimates nonzeros
163   */
164   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
165     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
166   }
167   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
168   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
169   /* All processes need to compute entire row ownership */
170   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
171   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
172   for (i=0;i<nsubdomains;i++) {
173     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
174       row_ownership[j] = i;
175     }
176   }
177   ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
178 
179   /*
180      my_dnz and my_onz contains exact contribution to preallocation from each local mat
181      then, they will be summed up properly. This way, preallocation is always sufficient
182   */
183   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
184   /* preallocation as a MATAIJ */
185   if (isdense) { /* special case for dense local matrices */
186     for (i=0;i<local_rows;i++) {
187       PetscInt owner = row_ownership[global_indices_r[i]];
188       for (j=0;j<local_cols;j++) {
189         PetscInt index_col = global_indices_c[j];
190         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
191           my_dnz[i] += 1;
192         } else { /* offdiag block */
193           my_onz[i] += 1;
194         }
195       }
196     }
197   } else { /* TODO: this could be optimized using MatGetRowIJ */
198     for (i=0;i<local_rows;i++) {
199       const PetscInt *cols;
200       PetscInt       ncols,index_row = global_indices_r[i];
201       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
202       for (j=0;j<ncols;j++) {
203         PetscInt owner = row_ownership[index_row];
204         PetscInt index_col = global_indices_c[cols[j]];
205         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
206           my_dnz[i] += 1;
207         } else { /* offdiag block */
208           my_onz[i] += 1;
209         }
210         /* same as before, interchanging rows and cols */
211         if (issbaij && index_col != index_row) {
212           owner = row_ownership[index_col];
213           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
214             my_dnz[cols[j]] += 1;
215           } else {
216             my_onz[cols[j]] += 1;
217           }
218         }
219       }
220       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
221     }
222   }
223   if (global_indices_c != global_indices_r) {
224     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
225   }
226   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
227   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
228 
229   /* Reduce my_dnz and my_onz */
230   if (maxreduce) {
231     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
232     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
233     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
234     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
235   } else {
236     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
237     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
238     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
239     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
240   }
241   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
242 
243   /* Resize preallocation if overestimated */
244   for (i=0;i<lrows;i++) {
245     dnz[i] = PetscMin(dnz[i],lcols);
246     onz[i] = PetscMin(onz[i],cols-lcols);
247   }
248   /* set preallocation */
249   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
250   for (i=0;i<lrows/bs;i++) {
251     dnz[i] = dnz[i*bs]/bs;
252     onz[i] = onz[i*bs]/bs;
253   }
254   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
255   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
256   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
257   if (issbaij) {
258     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
259   }
260   PetscFunctionReturn(0);
261 }
262 
263 #undef __FUNCT__
264 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
265 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
266 {
267   Mat_IS         *matis = (Mat_IS*)(mat->data);
268   Mat            local_mat;
269   /* info on mat */
270   PetscInt       bs,rows,cols,lrows,lcols;
271   PetscInt       local_rows,local_cols;
272   PetscBool      isdense,issbaij,isseqaij;
273   PetscMPIInt    nsubdomains;
274   /* values insertion */
275   PetscScalar    *array;
276   /* work */
277   PetscErrorCode ierr;
278 
279   PetscFunctionBegin;
280   /* get info from mat */
281   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
282   if (nsubdomains == 1) {
283     if (reuse == MAT_INITIAL_MATRIX) {
284       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
285     } else {
286       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
287     }
288     PetscFunctionReturn(0);
289   }
290   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
291   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
292   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
293   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
294   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
295   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
296   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
297 
298   if (reuse == MAT_INITIAL_MATRIX) {
299     MatType     new_mat_type;
300     PetscBool   issbaij_red;
301 
302     /* determining new matrix type */
303     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
304     if (issbaij_red) {
305       new_mat_type = MATSBAIJ;
306     } else {
307       if (bs>1) {
308         new_mat_type = MATBAIJ;
309       } else {
310         new_mat_type = MATAIJ;
311       }
312     }
313 
314     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
315     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
316     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
317     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
318     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
319   } else {
320     PetscInt mbs,mrows,mcols,mlrows,mlcols;
321     /* some checks */
322     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
323     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
324     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
325     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
326     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
327     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
328     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
329     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
330     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
331   }
332 
333   if (issbaij) {
334     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
335   } else {
336     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
337     local_mat = matis->A;
338   }
339 
340   /* Set values */
341   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
342   if (isdense) { /* special case for dense local matrices */
343     PetscInt i,*dummy;
344 
345     ierr = PetscMalloc1(PetscMax(local_rows,local_cols),&dummy);CHKERRQ(ierr);
346     for (i=0;i<PetscMax(local_rows,local_cols);i++) dummy[i] = i;
347     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
348     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
349     ierr = MatSetValuesLocal(*M,local_rows,dummy,local_cols,dummy,array,ADD_VALUES);CHKERRQ(ierr);
350     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
351     ierr = PetscFree(dummy);CHKERRQ(ierr);
352   } else if (isseqaij) {
353     PetscInt  i,nvtxs,*xadj,*adjncy;
354     PetscBool done;
355 
356     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
357     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
358     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
359     for (i=0;i<nvtxs;i++) {
360       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
361     }
362     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
363     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
364     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
365   } else { /* very basic values insertion for all other matrix types */
366     PetscInt i;
367 
368     for (i=0;i<local_rows;i++) {
369       PetscInt       j;
370       const PetscInt *local_indices_cols;
371 
372       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
373       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
374       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
375     }
376   }
377   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
378   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
379   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
380   if (isdense) {
381     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
382   }
383   PetscFunctionReturn(0);
384 }
385 
386 #undef __FUNCT__
387 #define __FUNCT__ "MatISGetMPIXAIJ"
388 /*@
389     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
390 
391   Input Parameter:
392 .  mat - the matrix (should be of type MATIS)
393 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
394 
395   Output Parameter:
396 .  newmat - the matrix in AIJ format
397 
398   Level: developer
399 
400   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
401 
402 .seealso: MATIS
403 @*/
404 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
405 {
406   PetscErrorCode ierr;
407 
408   PetscFunctionBegin;
409   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
410   PetscValidLogicalCollectiveEnum(mat,reuse,2);
411   PetscValidPointer(newmat,3);
412   if (reuse != MAT_INITIAL_MATRIX) {
413     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
414     PetscCheckSameComm(mat,1,*newmat,3);
415     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
416   }
417   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
418   PetscFunctionReturn(0);
419 }
420 
421 #undef __FUNCT__
422 #define __FUNCT__ "MatDuplicate_IS"
423 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
424 {
425   PetscErrorCode ierr;
426   Mat_IS         *matis = (Mat_IS*)(mat->data);
427   PetscInt       bs,m,n,M,N;
428   Mat            B,localmat;
429 
430   PetscFunctionBegin;
431   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
432   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
433   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
434   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
435   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
436   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
437   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
438   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
439   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
440   *newmat = B;
441   PetscFunctionReturn(0);
442 }
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "MatIsHermitian_IS"
446 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
447 {
448   PetscErrorCode ierr;
449   Mat_IS         *matis = (Mat_IS*)A->data;
450   PetscBool      local_sym;
451 
452   PetscFunctionBegin;
453   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
454   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
455   PetscFunctionReturn(0);
456 }
457 
458 #undef __FUNCT__
459 #define __FUNCT__ "MatIsSymmetric_IS"
460 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
461 {
462   PetscErrorCode ierr;
463   Mat_IS         *matis = (Mat_IS*)A->data;
464   PetscBool      local_sym;
465 
466   PetscFunctionBegin;
467   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
468   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
469   PetscFunctionReturn(0);
470 }
471 
472 #undef __FUNCT__
473 #define __FUNCT__ "MatDestroy_IS"
474 PetscErrorCode MatDestroy_IS(Mat A)
475 {
476   PetscErrorCode ierr;
477   Mat_IS         *b = (Mat_IS*)A->data;
478 
479   PetscFunctionBegin;
480   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
481   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
482   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
483   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
484   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
485   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
486   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
487   ierr = PetscFree(A->data);CHKERRQ(ierr);
488   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
489   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
490   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
491   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
492   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
493   PetscFunctionReturn(0);
494 }
495 
496 #undef __FUNCT__
497 #define __FUNCT__ "MatMult_IS"
498 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
499 {
500   PetscErrorCode ierr;
501   Mat_IS         *is  = (Mat_IS*)A->data;
502   PetscScalar    zero = 0.0;
503 
504   PetscFunctionBegin;
505   /*  scatter the global vector x into the local work vector */
506   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
507   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
508 
509   /* multiply the local matrix */
510   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
511 
512   /* scatter product back into global memory */
513   ierr = VecSet(y,zero);CHKERRQ(ierr);
514   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
515   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
516   PetscFunctionReturn(0);
517 }
518 
519 #undef __FUNCT__
520 #define __FUNCT__ "MatMultAdd_IS"
521 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
522 {
523   Vec            temp_vec;
524   PetscErrorCode ierr;
525 
526   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
527   if (v3 != v2) {
528     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
529     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
530   } else {
531     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
532     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
533     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
534     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
535     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
536   }
537   PetscFunctionReturn(0);
538 }
539 
540 #undef __FUNCT__
541 #define __FUNCT__ "MatMultTranspose_IS"
542 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
543 {
544   Mat_IS         *is = (Mat_IS*)A->data;
545   PetscErrorCode ierr;
546 
547   PetscFunctionBegin;
548   /*  scatter the global vector x into the local work vector */
549   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
550   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
551 
552   /* multiply the local matrix */
553   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
554 
555   /* scatter product back into global vector */
556   ierr = VecSet(x,0);CHKERRQ(ierr);
557   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
558   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
559   PetscFunctionReturn(0);
560 }
561 
562 #undef __FUNCT__
563 #define __FUNCT__ "MatMultTransposeAdd_IS"
564 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
565 {
566   Vec            temp_vec;
567   PetscErrorCode ierr;
568 
569   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
570   if (v3 != v2) {
571     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
572     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
573   } else {
574     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
575     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
576     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
577     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
578     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
579   }
580   PetscFunctionReturn(0);
581 }
582 
583 #undef __FUNCT__
584 #define __FUNCT__ "MatView_IS"
585 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
586 {
587   Mat_IS         *a = (Mat_IS*)A->data;
588   PetscErrorCode ierr;
589   PetscViewer    sviewer;
590 
591   PetscFunctionBegin;
592   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
593   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
594   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
595   PetscFunctionReturn(0);
596 }
597 
598 #undef __FUNCT__
599 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
600 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
601 {
602   PetscErrorCode ierr;
603   PetscInt       nr,rbs,nc,cbs;
604   Mat_IS         *is = (Mat_IS*)A->data;
605   IS             from,to;
606   Vec            cglobal,rglobal;
607 
608   PetscFunctionBegin;
609   PetscCheckSameComm(A,1,rmapping,2);
610   PetscCheckSameComm(A,1,cmapping,3);
611   /* Destroy any previous data */
612   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
613   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
614   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
615   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
616   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
617   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
618   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
619 
620   /* Setup Layout and set local to global maps */
621   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
622   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
623   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
624   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
625 
626   /* Create the local matrix A */
627   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
628   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
629   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
630   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
631   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
632   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
633   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
634   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
635   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
636   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
637   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
638 
639   /* Create the local work vectors */
640   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
641 
642   /* setup the global to local scatters */
643   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
644   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
645   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
646   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
647   if (rmapping != cmapping) {
648     ierr = ISDestroy(&to);CHKERRQ(ierr);
649     ierr = ISDestroy(&from);CHKERRQ(ierr);
650     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
651     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
652     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
653   } else {
654     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
655     is->cctx = is->rctx;
656   }
657   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
658   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
659   ierr = ISDestroy(&to);CHKERRQ(ierr);
660   ierr = ISDestroy(&from);CHKERRQ(ierr);
661   PetscFunctionReturn(0);
662 }
663 
664 #undef __FUNCT__
665 #define __FUNCT__ "MatSetValues_IS"
666 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
667 {
668   Mat_IS         *is = (Mat_IS*)mat->data;
669   PetscInt       rows_l[2048],cols_l[2048];
670   PetscErrorCode ierr;
671 
672   PetscFunctionBegin;
673 #if defined(PETSC_USE_DEBUG)
674   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
675 #endif
676   ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
677   ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
678   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
679   PetscFunctionReturn(0);
680 }
681 
682 #undef __FUNCT__
683 #define __FUNCT__ "MatSetValuesLocal_IS"
684 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
685 {
686   PetscErrorCode ierr;
687   Mat_IS         *is = (Mat_IS*)A->data;
688 
689   PetscFunctionBegin;
690   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
691   PetscFunctionReturn(0);
692 }
693 
694 #undef __FUNCT__
695 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
696 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
697 {
698   PetscErrorCode ierr;
699   Mat_IS         *is = (Mat_IS*)A->data;
700 
701   PetscFunctionBegin;
702   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
703   PetscFunctionReturn(0);
704 }
705 
706 #undef __FUNCT__
707 #define __FUNCT__ "MatZeroRows_IS"
708 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
709 {
710   PetscInt       n_l = 0, *rows_l = NULL;
711   PetscErrorCode ierr;
712 
713   PetscFunctionBegin;
714   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
715   if (n) {
716     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
717     ierr = ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
718   }
719   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
720   ierr = PetscFree(rows_l);CHKERRQ(ierr);
721   PetscFunctionReturn(0);
722 }
723 
724 #undef __FUNCT__
725 #define __FUNCT__ "MatZeroRowsLocal_IS"
726 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
727 {
728   Mat_IS         *is = (Mat_IS*)A->data;
729   PetscErrorCode ierr;
730   PetscInt       i;
731   PetscScalar    *array;
732 
733   PetscFunctionBegin;
734   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
735   {
736     /*
737        Set up is->x as a "counting vector". This is in order to MatMult_IS
738        work properly in the interface nodes.
739     */
740     Vec counter;
741     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
742     ierr = VecSet(counter,0.);CHKERRQ(ierr);
743     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
744     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
745     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
746     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
747     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
748     ierr = VecDestroy(&counter);CHKERRQ(ierr);
749   }
750   if (!n) {
751     is->pure_neumann = PETSC_TRUE;
752   } else {
753     is->pure_neumann = PETSC_FALSE;
754 
755     ierr = VecGetArray(is->y,&array);CHKERRQ(ierr);
756     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
757     for (i=0; i<n; i++) {
758       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
759     }
760     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
761     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
762     ierr = VecRestoreArray(is->y,&array);CHKERRQ(ierr);
763   }
764   PetscFunctionReturn(0);
765 }
766 
767 #undef __FUNCT__
768 #define __FUNCT__ "MatAssemblyBegin_IS"
769 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
770 {
771   Mat_IS         *is = (Mat_IS*)A->data;
772   PetscErrorCode ierr;
773 
774   PetscFunctionBegin;
775   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
776   PetscFunctionReturn(0);
777 }
778 
779 #undef __FUNCT__
780 #define __FUNCT__ "MatAssemblyEnd_IS"
781 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
782 {
783   Mat_IS         *is = (Mat_IS*)A->data;
784   PetscErrorCode ierr;
785 
786   PetscFunctionBegin;
787   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
788   PetscFunctionReturn(0);
789 }
790 
791 #undef __FUNCT__
792 #define __FUNCT__ "MatISGetLocalMat_IS"
793 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
794 {
795   Mat_IS *is = (Mat_IS*)mat->data;
796 
797   PetscFunctionBegin;
798   *local = is->A;
799   PetscFunctionReturn(0);
800 }
801 
802 #undef __FUNCT__
803 #define __FUNCT__ "MatISGetLocalMat"
804 /*@
805     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
806 
807   Input Parameter:
808 .  mat - the matrix
809 
810   Output Parameter:
811 .  local - the local matrix
812 
813   Level: advanced
814 
815   Notes:
816     This can be called if you have precomputed the nonzero structure of the
817   matrix and want to provide it to the inner matrix object to improve the performance
818   of the MatSetValues() operation.
819 
820   This function does not increase the reference count for the local Mat.  Do not destroy it and do not attempt to use
821   your reference after destroying the parent mat.
822 
823 .seealso: MATIS
824 @*/
825 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
826 {
827   PetscErrorCode ierr;
828 
829   PetscFunctionBegin;
830   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
831   PetscValidPointer(local,2);
832   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
833   PetscFunctionReturn(0);
834 }
835 
836 #undef __FUNCT__
837 #define __FUNCT__ "MatISSetLocalMat_IS"
838 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
839 {
840   Mat_IS         *is = (Mat_IS*)mat->data;
841   PetscInt       nrows,ncols,orows,ocols;
842   PetscErrorCode ierr;
843 
844   PetscFunctionBegin;
845   if (is->A) {
846     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
847     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
848     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
849   }
850   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
851   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
852   is->A = local;
853   PetscFunctionReturn(0);
854 }
855 
856 #undef __FUNCT__
857 #define __FUNCT__ "MatISSetLocalMat"
858 /*@
859     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
860 
861   Input Parameter:
862 .  mat - the matrix
863 .  local - the local matrix
864 
865   Output Parameter:
866 
867   Level: advanced
868 
869   Notes:
870     This can be called if you have precomputed the local matrix and
871   want to provide it to the matrix object MATIS.
872 
873 .seealso: MATIS
874 @*/
875 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
876 {
877   PetscErrorCode ierr;
878 
879   PetscFunctionBegin;
880   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
881   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
882   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
883   PetscFunctionReturn(0);
884 }
885 
886 #undef __FUNCT__
887 #define __FUNCT__ "MatZeroEntries_IS"
888 PetscErrorCode MatZeroEntries_IS(Mat A)
889 {
890   Mat_IS         *a = (Mat_IS*)A->data;
891   PetscErrorCode ierr;
892 
893   PetscFunctionBegin;
894   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
895   PetscFunctionReturn(0);
896 }
897 
898 #undef __FUNCT__
899 #define __FUNCT__ "MatScale_IS"
900 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
901 {
902   Mat_IS         *is = (Mat_IS*)A->data;
903   PetscErrorCode ierr;
904 
905   PetscFunctionBegin;
906   ierr = MatScale(is->A,a);CHKERRQ(ierr);
907   PetscFunctionReturn(0);
908 }
909 
910 #undef __FUNCT__
911 #define __FUNCT__ "MatGetDiagonal_IS"
912 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
913 {
914   Mat_IS         *is = (Mat_IS*)A->data;
915   PetscErrorCode ierr;
916 
917   PetscFunctionBegin;
918   /* get diagonal of the local matrix */
919   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
920 
921   /* scatter diagonal back into global vector */
922   ierr = VecSet(v,0);CHKERRQ(ierr);
923   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
924   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "MatSetOption_IS"
930 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
931 {
932   Mat_IS         *a = (Mat_IS*)A->data;
933   PetscErrorCode ierr;
934 
935   PetscFunctionBegin;
936   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
937   PetscFunctionReturn(0);
938 }
939 
940 #undef __FUNCT__
941 #define __FUNCT__ "MatCreateIS"
942 /*@
943     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
944        process but not across processes.
945 
946    Input Parameters:
947 +     comm    - MPI communicator that will share the matrix
948 .     bs      - block size of the matrix
949 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
950 .     rmap    - local to global map for rows
951 -     cmap    - local to global map for cols
952 
953    Output Parameter:
954 .    A - the resulting matrix
955 
956    Level: advanced
957 
958    Notes: See MATIS for more details
959           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
960           by that process. The sizes of rmap and cmap define the size of the local matrices.
961           If either rmap or cmap are NULL, than the matrix is assumed to be square
962 
963 .seealso: MATIS, MatSetLocalToGlobalMapping()
964 @*/
965 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
966 {
967   PetscErrorCode ierr;
968 
969   PetscFunctionBegin;
970   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
971   ierr = MatCreate(comm,A);CHKERRQ(ierr);
972   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
973   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
974   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
975   ierr = MatSetUp(*A);CHKERRQ(ierr);
976   if (rmap && cmap) {
977     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
978   } else if (!rmap) {
979     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
980   } else {
981     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
982   }
983   PetscFunctionReturn(0);
984 }
985 
986 /*MC
987    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
988    This stores the matrices in globally unassembled form. Each processor
989    assembles only its local Neumann problem and the parallel matrix vector
990    product is handled "implicitly".
991 
992    Operations Provided:
993 +  MatMult()
994 .  MatMultAdd()
995 .  MatMultTranspose()
996 .  MatMultTransposeAdd()
997 .  MatZeroEntries()
998 .  MatSetOption()
999 .  MatZeroRows()
1000 .  MatZeroRowsLocal()
1001 .  MatSetValues()
1002 .  MatSetValuesLocal()
1003 .  MatScale()
1004 .  MatGetDiagonal()
1005 -  MatSetLocalToGlobalMapping()
1006 
1007    Options Database Keys:
1008 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1009 
1010    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1011 
1012           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1013 
1014           You can do matrix preallocation on the local matrix after you obtain it with
1015           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1016 
1017   Level: advanced
1018 
1019 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC
1020 
1021 M*/
1022 
1023 #undef __FUNCT__
1024 #define __FUNCT__ "MatCreate_IS"
1025 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1026 {
1027   PetscErrorCode ierr;
1028   Mat_IS         *b;
1029 
1030   PetscFunctionBegin;
1031   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1032   A->data = (void*)b;
1033 
1034   /* matrix ops */
1035   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1036   A->ops->mult                    = MatMult_IS;
1037   A->ops->multadd                 = MatMultAdd_IS;
1038   A->ops->multtranspose           = MatMultTranspose_IS;
1039   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1040   A->ops->destroy                 = MatDestroy_IS;
1041   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1042   A->ops->setvalues               = MatSetValues_IS;
1043   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1044   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1045   A->ops->zerorows                = MatZeroRows_IS;
1046   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1047   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1048   A->ops->assemblyend             = MatAssemblyEnd_IS;
1049   A->ops->view                    = MatView_IS;
1050   A->ops->zeroentries             = MatZeroEntries_IS;
1051   A->ops->scale                   = MatScale_IS;
1052   A->ops->getdiagonal             = MatGetDiagonal_IS;
1053   A->ops->setoption               = MatSetOption_IS;
1054   A->ops->ishermitian             = MatIsHermitian_IS;
1055   A->ops->issymmetric             = MatIsSymmetric_IS;
1056   A->ops->duplicate               = MatDuplicate_IS;
1057 
1058   /* special MATIS functions */
1059   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1060   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1061   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1062   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1063   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1064   PetscFunctionReturn(0);
1065 }
1066