xref: /petsc/src/dm/impls/sliced/sliced.c (revision 836dcfcf4f741db140c73b60e7d4dffcbd7a3a28)
1 #include <petscdmsliced.h>      /*I      "petscdmsliced.h" I*/
2 #include <petscmat.h>           /*I      "petscmat.h"      I*/
3 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"       I*/
4 
5 /* CSR storage of the nonzero structure of a bs*bs matrix */
6 typedef struct {
7   PetscInt bs,nz,*i,*j;
8 } DMSlicedBlockFills;
9 
10 typedef struct  {
11   PetscInt           bs,n,N,Nghosts,*ghosts;
12   PetscInt           d_nz,o_nz,*d_nnz,*o_nnz;
13   DMSlicedBlockFills *dfill,*ofill;
14 } DM_Sliced;
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "DMCreateMatrix_Sliced"
18 PetscErrorCode  DMCreateMatrix_Sliced(DM dm, const MatType mtype,Mat *J)
19 {
20   PetscErrorCode         ierr;
21   PetscInt               *globals,*sd_nnz,*so_nnz,rstart,bs,i;
22   ISLocalToGlobalMapping lmap,blmap;
23   void                   (*aij)(void) = PETSC_NULL;
24   DM_Sliced              *slice = (DM_Sliced*)dm->data;
25 
26   PetscFunctionBegin;
27   bs = slice->bs;
28   ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr);
29   ierr = MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
30   ierr = MatSetBlockSize(*J,bs);CHKERRQ(ierr);
31   ierr = MatSetType(*J,mtype);CHKERRQ(ierr);
32   ierr = MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
33   ierr = MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
34   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
35   * good before going on with it. */
36   ierr = PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
37   if (!aij) {
38     ierr = PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
39   }
40   if (aij) {
41     if (bs == 1) {
42       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
43       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
44     } else if (!slice->d_nnz) {
45       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL);CHKERRQ(ierr);
46       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL,slice->o_nz*bs,PETSC_NULL);CHKERRQ(ierr);
47     } else {
48       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
49       ierr = PetscMalloc2(slice->n*bs,PetscInt,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,PetscInt,&so_nnz);CHKERRQ(ierr);
50       for (i=0; i<slice->n*bs; i++) {
51         sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs)
52                                            + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs);
53         if (so_nnz) {
54           so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs);
55         }
56       }
57       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz);CHKERRQ(ierr);
58       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz);CHKERRQ(ierr);
59       ierr = PetscFree2(sd_nnz,so_nnz);CHKERRQ(ierr);
60     }
61   }
62 
63   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
64   ierr = PetscMalloc((slice->n+slice->Nghosts)*bs*sizeof(PetscInt),&globals);CHKERRQ(ierr);
65   ierr = MatGetOwnershipRange(*J,&rstart,PETSC_NULL);CHKERRQ(ierr);
66   for (i=0; i<slice->n*bs; i++) {
67     globals[i] = rstart + i;
68   }
69   for (i=0; i<slice->Nghosts*bs; i++) {
70     globals[slice->n*bs+i] = slice->ghosts[i/bs]*bs + i%bs;
71   }
72   ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,(slice->n+slice->Nghosts)*bs,globals,PETSC_OWN_POINTER,&lmap);CHKERRQ(ierr);
73   ierr = ISLocalToGlobalMappingBlock(lmap,bs,&blmap);CHKERRQ(ierr);
74   ierr = MatSetLocalToGlobalMapping(*J,lmap,lmap);CHKERRQ(ierr);
75   ierr = MatSetLocalToGlobalMappingBlock(*J,blmap,blmap);CHKERRQ(ierr);
76   ierr = ISLocalToGlobalMappingDestroy(&lmap);CHKERRQ(ierr);
77   ierr = ISLocalToGlobalMappingDestroy(&blmap);CHKERRQ(ierr);
78   PetscFunctionReturn(0);
79 }
80 
81 #undef __FUNCT__
82 #define __FUNCT__ "DMSlicedSetGhosts"
83 /*@C
84     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
85       be ghosts on this process
86 
87     Not Collective
88 
89     Input Parameters:
90 +    slice - the DM object
91 .    bs - block size
92 .    nlocal - number of local (owned, non-ghost) blocks
93 .    Nghosts - number of ghost blocks on this process
94 -    ghosts - global indices of each ghost block
95 
96     Level: advanced
97 
98 .seealso DMDestroy(), DMCreateGlobalVector()
99 
100 @*/
101 PetscErrorCode  DMSlicedSetGhosts(DM dm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[])
102 {
103   PetscErrorCode ierr;
104   DM_Sliced      *slice = (DM_Sliced*)dm->data;
105 
106   PetscFunctionBegin;
107   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
108   ierr = PetscFree(slice->ghosts);CHKERRQ(ierr);
109   ierr = PetscMalloc(Nghosts*sizeof(PetscInt),&slice->ghosts);CHKERRQ(ierr);
110   ierr = PetscMemcpy(slice->ghosts,ghosts,Nghosts*sizeof(PetscInt));CHKERRQ(ierr);
111   slice->bs      = bs;
112   slice->n       = nlocal;
113   slice->Nghosts = Nghosts;
114   PetscFunctionReturn(0);
115 }
116 
117 #undef __FUNCT__
118 #define __FUNCT__ "DMSlicedSetPreallocation"
119 /*@C
120     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced
121 
122     Not Collective
123 
124     Input Parameters:
125 +    slice - the DM object
126 .    d_nz  - number of block nonzeros per block row in diagonal portion of local
127            submatrix  (same for all local rows)
128 .    d_nnz - array containing the number of block nonzeros in the various block rows
129            of the in diagonal portion of the local (possibly different for each block
130            row) or PETSC_NULL.
131 .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
132            submatrix (same for all local rows).
133 -    o_nnz - array containing the number of nonzeros in the various block rows of the
134            off-diagonal portion of the local submatrix (possibly different for
135            each block row) or PETSC_NULL.
136 
137     Notes:
138     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
139     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().
140 
141     Level: advanced
142 
143 .seealso DMDestroy(), DMCreateGlobalVector(), MatMPIAIJSetPreallocation(),
144          MatMPIBAIJSetPreallocation(), DMSlicedGetMatrix(), DMSlicedSetBlockFills()
145 
146 @*/
147 PetscErrorCode  DMSlicedSetPreallocation(DM dm,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
148 {
149   DM_Sliced *slice = (DM_Sliced*)dm->data;
150 
151   PetscFunctionBegin;
152   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
153   slice->d_nz  = d_nz;
154   slice->d_nnz = (PetscInt*)d_nnz;
155   slice->o_nz  = o_nz;
156   slice->o_nnz = (PetscInt*)o_nnz;
157   PetscFunctionReturn(0);
158 }
159 
160 #undef __FUNCT__
161 #define __FUNCT__ "DMSlicedSetBlockFills_Private"
162 static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs,const PetscInt *fill,DMSlicedBlockFills **inf)
163 {
164   PetscErrorCode     ierr;
165   PetscInt           i,j,nz,*fi,*fj;
166   DMSlicedBlockFills *f;
167 
168   PetscFunctionBegin;
169   PetscValidPointer(inf,3);
170   if (*inf) {ierr = PetscFree3((*inf)->i,(*inf)->j,*inf);CHKERRQ(ierr);}
171   if (!fill) PetscFunctionReturn(0);
172   for (i=0,nz=0; i<bs*bs; i++) if (fill[i]) nz++;
173   ierr = PetscMalloc3(1,DMSlicedBlockFills,&f,bs+1,PetscInt,&fi,nz,PetscInt,&fj);CHKERRQ(ierr);
174   f->bs = bs;
175   f->nz = nz;
176   f->i  = fi;
177   f->j  = fj;
178   for (i=0,nz=0; i<bs; i++) {
179     fi[i] = nz;
180     for (j=0; j<bs; j++) if (fill[i*bs+j]) fj[nz++] = j;
181   }
182   fi[i] = nz;
183   *inf = f;
184   PetscFunctionReturn(0);
185 }
186 
187 #undef __FUNCT__
188 #define __FUNCT__ "DMSlicedSetBlockFills"
189 /*@C
190     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
191     of the matrix returned by DMSlicedGetMatrix().
192 
193     Logically Collective on DM
194 
195     Input Parameter:
196 +   sliced - the DM object
197 .   dfill - the fill pattern in the diagonal block (may be PETSC_NULL, means use dense block)
198 -   ofill - the fill pattern in the off-diagonal blocks
199 
200     Notes:
201     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
202     See DMDASetBlockFills() for example usage.
203 
204     Level: advanced
205 
206 .seealso DMSlicedGetMatrix(), DMDASetBlockFills()
207 @*/
208 PetscErrorCode  DMSlicedSetBlockFills(DM dm,const PetscInt *dfill,const PetscInt *ofill)
209 {
210   DM_Sliced      *slice = (DM_Sliced*)dm->data;
211   PetscErrorCode ierr;
212 
213   PetscFunctionBegin;
214   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
215   ierr = DMSlicedSetBlockFills_Private(slice->bs,dfill,&slice->dfill);CHKERRQ(ierr);
216   ierr = DMSlicedSetBlockFills_Private(slice->bs,ofill,&slice->ofill);CHKERRQ(ierr);
217   PetscFunctionReturn(0);
218 }
219 
220 #undef __FUNCT__
221 #define __FUNCT__ "DMDestroy_Sliced"
222 static PetscErrorCode  DMDestroy_Sliced(DM dm)
223 {
224   PetscErrorCode ierr;
225   DM_Sliced      *slice = (DM_Sliced*)dm->data;
226 
227   PetscFunctionBegin;
228   ierr = PetscFree(slice->ghosts);CHKERRQ(ierr);
229   if (slice->dfill) {ierr = PetscFree3(slice->dfill,slice->dfill->i,slice->dfill->j);CHKERRQ(ierr);}
230   if (slice->ofill) {ierr = PetscFree3(slice->ofill,slice->ofill->i,slice->ofill->j);CHKERRQ(ierr);}
231   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
232   ierr = PetscFree(slice);CHKERRQ(ierr);
233   PetscFunctionReturn(0);
234 }
235 
236 #undef __FUNCT__
237 #define __FUNCT__ "DMCreateGlobalVector_Sliced"
238 static PetscErrorCode  DMCreateGlobalVector_Sliced(DM dm,Vec *gvec)
239 {
240   PetscErrorCode     ierr;
241   DM_Sliced          *slice = (DM_Sliced*)dm->data;
242 
243   PetscFunctionBegin;
244   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
245   PetscValidPointer(gvec,2);
246   *gvec = 0;
247   ierr = VecCreateGhostBlock(((PetscObject)dm)->comm,slice->bs,slice->n*slice->bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,gvec);CHKERRQ(ierr);
248   ierr = PetscObjectCompose((PetscObject)*gvec,"DM",(PetscObject)dm);CHKERRQ(ierr);
249   PetscFunctionReturn(0);
250 }
251 
252 #undef __FUNCT__
253 #define __FUNCT__ "DMGlobalToLocalBegin_Sliced"
254 static PetscErrorCode  DMGlobalToLocalBegin_Sliced(DM da,Vec g,InsertMode mode,Vec l)
255 {
256   PetscErrorCode ierr;
257   PetscBool      flg;
258 
259   PetscFunctionBegin;
260   if (!flg) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
261   ierr = VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
262   ierr = VecGhostUpdateBegin(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
263   PetscFunctionReturn(0);
264 }
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "DMGlobalToLocalEnd_Sliced"
268 static PetscErrorCode  DMGlobalToLocalEnd_Sliced(DM da,Vec g,InsertMode mode,Vec l)
269 {
270   PetscErrorCode ierr;
271   PetscBool      flg;
272 
273   PetscFunctionBegin;
274   ierr = VecGhostIsLocalForm(g,l,&flg);CHKERRQ(ierr);
275   if (!flg) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
276   ierr = VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
277   PetscFunctionReturn(0);
278 }
279 
280 /*MC
281    DMSLICED = "sliced" - A DM object that is used to manage data for a general graph. Uses VecCreateGhost() ghosted vectors for storing the fields
282 
283    See DMCreateSliced() for details.
284 
285   Level: intermediate
286 
287 .seealso: DMType, DMCOMPOSITE, DMCreateSliced(), DMCreate()
288 M*/
289 
290 EXTERN_C_BEGIN
291 #undef __FUNCT__
292 #define __FUNCT__ "DMCreate_Sliced"
293 PetscErrorCode  DMCreate_Sliced(DM p)
294 {
295   PetscErrorCode ierr;
296   DM_Sliced      *slice;
297 
298   PetscFunctionBegin;
299   ierr = PetscNewLog(p,DM_Sliced,&slice);CHKERRQ(ierr);
300   p->data = slice;
301 
302   ierr = PetscObjectChangeTypeName((PetscObject)p,DMSLICED);CHKERRQ(ierr);
303   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
304   p->ops->creatematrix       = DMCreateMatrix_Sliced;
305   p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Sliced;
306   p->ops->globaltolocalend   = DMGlobalToLocalEnd_Sliced;
307   p->ops->destroy            = DMDestroy_Sliced;
308   PetscFunctionReturn(0);
309 }
310 EXTERN_C_END
311 
312 #undef __FUNCT__
313 #define __FUNCT__ "DMSlicedCreate"
314 /*@C
315     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem
316 
317     Collective on MPI_Comm
318 
319     Input Parameter:
320 +   comm - the processors that will share the global vector
321 .   bs - the block size
322 .   nlocal - number of vector entries on this process
323 .   Nghosts - number of ghost points needed on this process
324 .   ghosts - global indices of all ghost points for this process
325 .   d_nnz - matrix preallocation information representing coupling within this process
326 -   o_nnz - matrix preallocation information representing coupling between this process and other processes
327 
328     Output Parameters:
329 .   slice - the slice object
330 
331     Notes:
332         This DM does not support DMCreateLocalVector(), DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead one directly uses
333         VecGhostGetLocalForm() and VecGhostRestoreLocalForm() to access the local representation and VecGhostUpdateBegin() and VecGhostUpdateEnd() to update
334         the ghost points.
335 
336         One can use DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead of VecGhostUpdateBegin() and VecGhostUpdateEnd().
337 
338     Level: advanced
339 
340 .seealso DMDestroy(), DMCreateGlobalVector(), DMSetType(), DMSLICED, DMSlicedSetGhosts(), DMSlicedSetPreallocation(), VecGhostUpdateBegin(), VecGhostUpdateEnd(),
341          VecGhostGetLocalForm(), VecGhostRestoreLocalForm()
342 
343 @*/
344 PetscErrorCode  DMSlicedCreate(MPI_Comm comm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[], const PetscInt d_nnz[],const PetscInt o_nnz[],DM *dm)
345 {
346   PetscErrorCode ierr;
347 
348   PetscFunctionBegin;
349   PetscValidPointer(dm,2);
350   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
351   ierr = DMSetType(*dm,DMSLICED);CHKERRQ(ierr);
352   ierr = DMSlicedSetGhosts(*dm,bs,nlocal,Nghosts,ghosts);CHKERRQ(ierr);
353   if (d_nnz) {
354     ierr = DMSlicedSetPreallocation(*dm,0, d_nnz,0,o_nnz);CHKERRQ(ierr);
355   }
356   PetscFunctionReturn(0);
357 }
358 
359