xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision c3b5f7ba6bc5ce25a01a67bb37ba5d34b02bbbd7)
1 #include <petsc/private/dmimpl.h>
2 #include <petscdmredundant.h>   /*I      "petscdmredundant.h" I*/
3 
4 typedef struct  {
5   PetscMPIInt rank;                /* owner */
6   PetscInt    N;                   /* total number of dofs */
7   PetscInt    n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
8 } DM_Redundant;
9 
10 static PetscErrorCode DMCreateMatrix_Redundant(DM dm,Mat *J)
11 {
12   DM_Redundant           *red = (DM_Redundant*)dm->data;
13   ISLocalToGlobalMapping ltog;
14   PetscInt               i,rstart,rend,*cols;
15   PetscScalar            *vals;
16 
17   PetscFunctionBegin;
18   PetscCall(MatCreate(PetscObjectComm((PetscObject)dm),J));
19   PetscCall(MatSetSizes(*J,red->n,red->n,red->N,red->N));
20   PetscCall(MatSetType(*J,dm->mattype));
21   PetscCall(MatSeqAIJSetPreallocation(*J,red->n,NULL));
22   PetscCall(MatSeqBAIJSetPreallocation(*J,1,red->n,NULL));
23   PetscCall(MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL));
24   PetscCall(MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL));
25 
26   PetscCall(DMGetLocalToGlobalMapping(dm,&ltog));
27   PetscCall(MatSetLocalToGlobalMapping(*J,ltog,ltog));
28   PetscCall(MatSetDM(*J,dm));
29 
30   PetscCall(PetscMalloc2(red->N,&cols,red->N,&vals));
31   for (i=0; i<red->N; i++) {
32     cols[i] = i;
33     vals[i] = 0.0;
34   }
35   PetscCall(MatGetOwnershipRange(*J,&rstart,&rend));
36   for (i=rstart; i<rend; i++) {
37     PetscCall(MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES));
38   }
39   PetscCall(PetscFree2(cols,vals));
40   PetscCall(MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY));
41   PetscCall(MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY));
42   PetscFunctionReturn(0);
43 }
44 
45 static PetscErrorCode DMDestroy_Redundant(DM dm)
46 {
47   PetscFunctionBegin;
48   PetscCall(PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",NULL));
49   PetscCall(PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",NULL));
50   PetscCall(PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",NULL));
51   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
52   PetscCall(PetscFree(dm->data));
53   PetscFunctionReturn(0);
54 }
55 
56 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
57 {
58   DM_Redundant           *red = (DM_Redundant*)dm->data;
59   ISLocalToGlobalMapping ltog;
60 
61   PetscFunctionBegin;
62   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
63   PetscValidPointer(gvec,2);
64   *gvec = NULL;
65   PetscCall(VecCreate(PetscObjectComm((PetscObject)dm),gvec));
66   PetscCall(VecSetSizes(*gvec,red->n,red->N));
67   PetscCall(VecSetType(*gvec,dm->vectype));
68   PetscCall(DMGetLocalToGlobalMapping(dm,&ltog));
69   PetscCall(VecSetLocalToGlobalMapping(*gvec,ltog));
70   PetscCall(VecSetDM(*gvec,dm));
71   PetscFunctionReturn(0);
72 }
73 
74 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
75 {
76   DM_Redundant   *red = (DM_Redundant*)dm->data;
77 
78   PetscFunctionBegin;
79   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
80   PetscValidPointer(lvec,2);
81   *lvec = NULL;
82   PetscCall(VecCreate(PETSC_COMM_SELF,lvec));
83   PetscCall(VecSetSizes(*lvec,red->N,red->N));
84   PetscCall(VecSetType(*lvec,dm->vectype));
85   PetscCall(VecSetDM(*lvec,dm));
86   PetscFunctionReturn(0);
87 }
88 
89 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
90 {
91   DM_Redundant      *red = (DM_Redundant*)dm->data;
92   const PetscScalar *lv;
93   PetscScalar       *gv;
94   PetscMPIInt       rank;
95 
96   PetscFunctionBegin;
97   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank));
98   PetscCall(VecGetArrayRead(l,&lv));
99   PetscCall(VecGetArray(g,&gv));
100   switch (imode) {
101   case ADD_VALUES:
102   case MAX_VALUES:
103   {
104     void        *source;
105     PetscScalar *buffer;
106     PetscInt    i;
107     if (rank == red->rank) {
108       buffer = gv;
109       source = MPI_IN_PLACE;
110       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
111 #if !defined(PETSC_USE_COMPLEX)
112       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
113 #endif
114     } else source = (void*)lv;
115     PetscCallMPI(MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX,red->rank,PetscObjectComm((PetscObject)dm)));
116   } break;
117   case INSERT_VALUES:
118     PetscCall(PetscArraycpy(gv,lv,red->n));
119     break;
120   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
121   }
122   PetscCall(VecRestoreArrayRead(l,&lv));
123   PetscCall(VecRestoreArray(g,&gv));
124   PetscFunctionReturn(0);
125 }
126 
127 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
128 {
129   PetscFunctionBegin;
130   PetscFunctionReturn(0);
131 }
132 
133 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
134 {
135   DM_Redundant      *red = (DM_Redundant*)dm->data;
136   const PetscScalar *gv;
137   PetscScalar       *lv;
138 
139   PetscFunctionBegin;
140   PetscCall(VecGetArrayRead(g,&gv));
141   PetscCall(VecGetArray(l,&lv));
142   switch (imode) {
143   case INSERT_VALUES:
144     if (red->n) PetscCall(PetscArraycpy(lv,gv,red->n));
145     PetscCallMPI(MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,PetscObjectComm((PetscObject)dm)));
146     break;
147   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
148   }
149   PetscCall(VecRestoreArrayRead(g,&gv));
150   PetscCall(VecRestoreArray(l,&lv));
151   PetscFunctionReturn(0);
152 }
153 
154 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
155 {
156   PetscFunctionBegin;
157   PetscFunctionReturn(0);
158 }
159 
160 static PetscErrorCode DMSetUp_Redundant(DM dm)
161 {
162   PetscFunctionBegin;
163   PetscFunctionReturn(0);
164 }
165 
166 static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
167 {
168   DM_Redundant   *red = (DM_Redundant*)dm->data;
169   PetscBool      iascii;
170 
171   PetscFunctionBegin;
172   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
173   if (iascii) {
174     PetscCall(PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N));
175   }
176   PetscFunctionReturn(0);
177 }
178 
179 static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,ISColoring *coloring)
180 {
181   DM_Redundant    *red = (DM_Redundant*)dm->data;
182   PetscInt        i,nloc;
183   ISColoringValue *colors;
184 
185   PetscFunctionBegin;
186   switch (ctype) {
187   case IS_COLORING_GLOBAL:
188     nloc = red->n;
189     break;
190   case IS_COLORING_LOCAL:
191     nloc = red->N;
192     break;
193   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
194   }
195   PetscCall(PetscMalloc1(nloc,&colors));
196   for (i=0; i<nloc; i++) colors[i] = i;
197   PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)dm),red->N,nloc,colors,PETSC_OWN_POINTER,coloring));
198   PetscCall(ISColoringSetType(*coloring,ctype));
199   PetscFunctionReturn(0);
200 }
201 
202 static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
203 {
204   PetscMPIInt    flag;
205   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
206 
207   PetscFunctionBegin;
208   if (comm == MPI_COMM_NULL) {
209     PetscCall(PetscObjectGetComm((PetscObject)dmc,&comm));
210   }
211   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),comm,&flag));
212   PetscCheckFalse(flag != MPI_CONGRUENT && flag != MPI_IDENT,PetscObjectComm((PetscObject)dmc),PETSC_ERR_SUP,"cannot change communicators");
213   PetscCall(DMRedundantCreate(comm,redc->rank,redc->N,dmf));
214   PetscFunctionReturn(0);
215 }
216 
217 static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
218 {
219   PetscMPIInt    flag;
220   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
221 
222   PetscFunctionBegin;
223   if (comm == MPI_COMM_NULL) {
224     PetscCall(PetscObjectGetComm((PetscObject)dmf,&comm));
225   }
226   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmf),comm,&flag));
227   PetscCheckFalse(flag != MPI_CONGRUENT && flag != MPI_IDENT,PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
228   PetscCall(DMRedundantCreate(comm,redf->rank,redf->N,dmc));
229   PetscFunctionReturn(0);
230 }
231 
232 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
233 {
234   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
235   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
236   PetscMPIInt    flag;
237   PetscInt       i,rstart,rend;
238 
239   PetscFunctionBegin;
240   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),PetscObjectComm((PetscObject)dmf),&flag));
241   PetscCheckFalse(flag != MPI_CONGRUENT && flag != MPI_IDENT,PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
242   PetscCheckFalse(redc->rank != redf->rank,PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
243   PetscCheckFalse(redc->N != redf->N,PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Global size does not match");
244   PetscCall(MatCreate(PetscObjectComm((PetscObject)dmc),P));
245   PetscCall(MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N));
246   PetscCall(MatSetType(*P,MATAIJ));
247   PetscCall(MatSeqAIJSetPreallocation(*P,1,NULL));
248   PetscCall(MatMPIAIJSetPreallocation(*P,1,NULL,0,NULL));
249   PetscCall(MatGetOwnershipRange(*P,&rstart,&rend));
250   for (i=rstart; i<rend; i++) PetscCall(MatSetValue(*P,i,i,1.0,INSERT_VALUES));
251   PetscCall(MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY));
252   PetscCall(MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY));
253   if (scale) PetscCall(DMCreateInterpolationScale(dmc,dmf,*P,scale));
254   PetscFunctionReturn(0);
255 }
256 
257 /*@
258     DMRedundantSetSize - Sets the size of a densely coupled redundant object
259 
260     Collective on dm
261 
262     Input Parameters:
263 +   dm - redundant DM
264 .   rank - rank of process to own redundant degrees of freedom
265 -   N - total number of redundant degrees of freedom
266 
267     Level: advanced
268 
269 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
270 @*/
271 PetscErrorCode DMRedundantSetSize(DM dm,PetscMPIInt rank,PetscInt N)
272 {
273   PetscFunctionBegin;
274   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
275   PetscValidType(dm,1);
276   PetscValidLogicalCollectiveMPIInt(dm,rank,2);
277   PetscValidLogicalCollectiveInt(dm,N,3);
278   PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscMPIInt,PetscInt),(dm,rank,N));
279   PetscFunctionReturn(0);
280 }
281 
282 /*@
283     DMRedundantGetSize - Gets the size of a densely coupled redundant object
284 
285     Not Collective
286 
287     Input Parameter:
288 .   dm - redundant DM
289 
290     Output Parameters:
291 +   rank - rank of process to own redundant degrees of freedom (or NULL)
292 -   N - total number of redundant degrees of freedom (or NULL)
293 
294     Level: advanced
295 
296 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
297 @*/
298 PetscErrorCode DMRedundantGetSize(DM dm,PetscMPIInt *rank,PetscInt *N)
299 {
300   PetscFunctionBegin;
301   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
302   PetscValidType(dm,1);
303   PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscMPIInt*,PetscInt*),(dm,rank,N));
304   PetscFunctionReturn(0);
305 }
306 
307 static PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscMPIInt rank,PetscInt N)
308 {
309   DM_Redundant   *red = (DM_Redundant*)dm->data;
310   PetscMPIInt    myrank;
311   PetscInt       i,*globals;
312 
313   PetscFunctionBegin;
314   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank));
315   red->rank = rank;
316   red->N    = N;
317   red->n    = (myrank == rank) ? N : 0;
318 
319   /* mapping is setup here */
320   PetscCall(PetscMalloc1(red->N,&globals));
321   for (i=0; i<red->N; i++) globals[i] = i;
322   PetscCall(ISLocalToGlobalMappingDestroy(&dm->ltogmap));
323   PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)dm),1,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap));
324   PetscFunctionReturn(0);
325 }
326 
327 static PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
328 {
329   DM_Redundant *red = (DM_Redundant*)dm->data;
330 
331   PetscFunctionBegin;
332   if (rank) *rank = red->rank;
333   if (N)    *N = red->N;
334   PetscFunctionReturn(0);
335 }
336 
337 static PetscErrorCode DMSetUpGLVisViewer_Redundant(PetscObject odm, PetscViewer viewer)
338 {
339   PetscFunctionBegin;
340   PetscFunctionReturn(0);
341 }
342 
343 /*MC
344    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
345          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
346          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
347          processes local computations).
348 
349          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
350 
351   Level: intermediate
352 
353 .seealso: DMType, DMCOMPOSITE,  DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
354 M*/
355 
356 PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
357 {
358   DM_Redundant   *red;
359 
360   PetscFunctionBegin;
361   PetscCall(PetscNewLog(dm,&red));
362   dm->data = red;
363 
364   dm->ops->setup               = DMSetUp_Redundant;
365   dm->ops->view                = DMView_Redundant;
366   dm->ops->createglobalvector  = DMCreateGlobalVector_Redundant;
367   dm->ops->createlocalvector   = DMCreateLocalVector_Redundant;
368   dm->ops->creatematrix        = DMCreateMatrix_Redundant;
369   dm->ops->destroy             = DMDestroy_Redundant;
370   dm->ops->globaltolocalbegin  = DMGlobalToLocalBegin_Redundant;
371   dm->ops->globaltolocalend    = DMGlobalToLocalEnd_Redundant;
372   dm->ops->localtoglobalbegin  = DMLocalToGlobalBegin_Redundant;
373   dm->ops->localtoglobalend    = DMLocalToGlobalEnd_Redundant;
374   dm->ops->refine              = DMRefine_Redundant;
375   dm->ops->coarsen             = DMCoarsen_Redundant;
376   dm->ops->createinterpolation = DMCreateInterpolation_Redundant;
377   dm->ops->getcoloring         = DMCreateColoring_Redundant;
378 
379   PetscCall(PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",DMRedundantSetSize_Redundant));
380   PetscCall(PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",DMRedundantGetSize_Redundant));
381   PetscCall(PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",DMSetUpGLVisViewer_Redundant));
382   PetscFunctionReturn(0);
383 }
384 
385 /*@C
386     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
387 
388     Collective
389 
390     Input Parameters:
391 +   comm - the processors that will share the global vector
392 .   rank - rank to own the redundant values
393 -   N - total number of degrees of freedom
394 
395     Output Parameters:
396 .   dm - the redundant DM
397 
398     Level: advanced
399 
400 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()
401 
402 @*/
403 PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscMPIInt rank,PetscInt N,DM *dm)
404 {
405   PetscFunctionBegin;
406   PetscValidPointer(dm,4);
407   PetscCall(DMCreate(comm,dm));
408   PetscCall(DMSetType(*dm,DMREDUNDANT));
409   PetscCall(DMRedundantSetSize(*dm,rank,N));
410   PetscCall(DMSetUp(*dm));
411   PetscFunctionReturn(0);
412 }
413