xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision b45e3bf4ff73d80a20c3202b6cd9f79d2f2d3efe)
1 #include <petsc/private/dmimpl.h>
2 #include <petscdmredundant.h>   /*I      "petscdmredundant.h" I*/
3 
4 typedef struct  {
5   PetscMPIInt rank;                /* owner */
6   PetscInt    N;                   /* total number of dofs */
7   PetscInt    n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
8 } DM_Redundant;
9 
10 static PetscErrorCode DMCreateMatrix_Redundant(DM dm,Mat *J)
11 {
12   DM_Redundant           *red = (DM_Redundant*)dm->data;
13   PetscErrorCode         ierr;
14   ISLocalToGlobalMapping ltog;
15   PetscInt               i,rstart,rend,*cols;
16   PetscScalar            *vals;
17 
18   PetscFunctionBegin;
19   ierr = MatCreate(PetscObjectComm((PetscObject)dm),J);CHKERRQ(ierr);
20   ierr = MatSetSizes(*J,red->n,red->n,red->N,red->N);CHKERRQ(ierr);
21   ierr = MatSetType(*J,dm->mattype);CHKERRQ(ierr);
22   ierr = MatSeqAIJSetPreallocation(*J,red->n,NULL);CHKERRQ(ierr);
23   ierr = MatSeqBAIJSetPreallocation(*J,1,red->n,NULL);CHKERRQ(ierr);
24   ierr = MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr);
25   ierr = MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr);
26 
27   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
28   ierr = MatSetLocalToGlobalMapping(*J,ltog,ltog);CHKERRQ(ierr);
29   ierr = MatSetDM(*J,dm);CHKERRQ(ierr);
30 
31   ierr = PetscMalloc2(red->N,&cols,red->N,&vals);CHKERRQ(ierr);
32   for (i=0; i<red->N; i++) {
33     cols[i] = i;
34     vals[i] = 0.0;
35   }
36   ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr);
37   for (i=rstart; i<rend; i++) {
38     ierr = MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
39   }
40   ierr = PetscFree2(cols,vals);CHKERRQ(ierr);
41   ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
42   ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
43   PetscFunctionReturn(0);
44 }
45 
46 static PetscErrorCode DMDestroy_Redundant(DM dm)
47 {
48   PetscErrorCode ierr;
49 
50   PetscFunctionBegin;
51   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",NULL);CHKERRQ(ierr);
52   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",NULL);CHKERRQ(ierr);
53   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",NULL);CHKERRQ(ierr);
54   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
55   ierr = PetscFree(dm->data);CHKERRQ(ierr);
56   PetscFunctionReturn(0);
57 }
58 
59 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
60 {
61   PetscErrorCode         ierr;
62   DM_Redundant           *red = (DM_Redundant*)dm->data;
63   ISLocalToGlobalMapping ltog;
64 
65   PetscFunctionBegin;
66   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
67   PetscValidPointer(gvec,2);
68   *gvec = NULL;
69   ierr  = VecCreate(PetscObjectComm((PetscObject)dm),gvec);CHKERRQ(ierr);
70   ierr  = VecSetSizes(*gvec,red->n,red->N);CHKERRQ(ierr);
71   ierr  = VecSetType(*gvec,dm->vectype);CHKERRQ(ierr);
72   ierr  = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
73   ierr  = VecSetLocalToGlobalMapping(*gvec,ltog);CHKERRQ(ierr);
74   ierr  = VecSetDM(*gvec,dm);CHKERRQ(ierr);
75   PetscFunctionReturn(0);
76 }
77 
78 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
79 {
80   PetscErrorCode ierr;
81   DM_Redundant   *red = (DM_Redundant*)dm->data;
82 
83   PetscFunctionBegin;
84   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
85   PetscValidPointer(lvec,2);
86   *lvec = NULL;
87   ierr  = VecCreate(PETSC_COMM_SELF,lvec);CHKERRQ(ierr);
88   ierr  = VecSetSizes(*lvec,red->N,red->N);CHKERRQ(ierr);
89   ierr  = VecSetType(*lvec,dm->vectype);CHKERRQ(ierr);
90   ierr  = VecSetDM(*lvec,dm);CHKERRQ(ierr);
91   PetscFunctionReturn(0);
92 }
93 
94 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
95 {
96   PetscErrorCode    ierr;
97   DM_Redundant      *red = (DM_Redundant*)dm->data;
98   const PetscScalar *lv;
99   PetscScalar       *gv;
100   PetscMPIInt       rank;
101 
102   PetscFunctionBegin;
103   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);CHKERRMPI(ierr);
104   ierr = VecGetArrayRead(l,&lv);CHKERRQ(ierr);
105   ierr = VecGetArray(g,&gv);CHKERRQ(ierr);
106   switch (imode) {
107   case ADD_VALUES:
108   case MAX_VALUES:
109   {
110     void        *source;
111     PetscScalar *buffer;
112     PetscInt    i;
113     if (rank == red->rank) {
114       buffer = gv;
115       source = MPI_IN_PLACE;
116       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
117 #if !defined(PETSC_USE_COMPLEX)
118       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
119 #endif
120     } else source = (void*)lv;
121     ierr = MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX,red->rank,PetscObjectComm((PetscObject)dm));CHKERRMPI(ierr);
122   } break;
123   case INSERT_VALUES:
124     ierr = PetscArraycpy(gv,lv,red->n);CHKERRQ(ierr);
125     break;
126   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
127   }
128   ierr = VecRestoreArrayRead(l,&lv);CHKERRQ(ierr);
129   ierr = VecRestoreArray(g,&gv);CHKERRQ(ierr);
130   PetscFunctionReturn(0);
131 }
132 
133 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
134 {
135   PetscFunctionBegin;
136   PetscFunctionReturn(0);
137 }
138 
139 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
140 {
141   PetscErrorCode    ierr;
142   DM_Redundant      *red = (DM_Redundant*)dm->data;
143   const PetscScalar *gv;
144   PetscScalar       *lv;
145 
146   PetscFunctionBegin;
147   ierr = VecGetArrayRead(g,&gv);CHKERRQ(ierr);
148   ierr = VecGetArray(l,&lv);CHKERRQ(ierr);
149   switch (imode) {
150   case INSERT_VALUES:
151     if (red->n) {ierr = PetscArraycpy(lv,gv,red->n);CHKERRQ(ierr);}
152     ierr = MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,PetscObjectComm((PetscObject)dm));CHKERRMPI(ierr);
153     break;
154   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
155   }
156   ierr = VecRestoreArrayRead(g,&gv);CHKERRQ(ierr);
157   ierr = VecRestoreArray(l,&lv);CHKERRQ(ierr);
158   PetscFunctionReturn(0);
159 }
160 
161 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
162 {
163   PetscFunctionBegin;
164   PetscFunctionReturn(0);
165 }
166 
167 static PetscErrorCode DMSetUp_Redundant(DM dm)
168 {
169   PetscFunctionBegin;
170   PetscFunctionReturn(0);
171 }
172 
173 static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
174 {
175   PetscErrorCode ierr;
176   DM_Redundant   *red = (DM_Redundant*)dm->data;
177   PetscBool      iascii;
178 
179   PetscFunctionBegin;
180   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
181   if (iascii) {
182     ierr = PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);CHKERRQ(ierr);
183   }
184   PetscFunctionReturn(0);
185 }
186 
187 static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,ISColoring *coloring)
188 {
189   DM_Redundant    *red = (DM_Redundant*)dm->data;
190   PetscErrorCode  ierr;
191   PetscInt        i,nloc;
192   ISColoringValue *colors;
193 
194   PetscFunctionBegin;
195   switch (ctype) {
196   case IS_COLORING_GLOBAL:
197     nloc = red->n;
198     break;
199   case IS_COLORING_LOCAL:
200     nloc = red->N;
201     break;
202   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
203   }
204   ierr = PetscMalloc1(nloc,&colors);CHKERRQ(ierr);
205   for (i=0; i<nloc; i++) colors[i] = i;
206   ierr = ISColoringCreate(PetscObjectComm((PetscObject)dm),red->N,nloc,colors,PETSC_OWN_POINTER,coloring);CHKERRQ(ierr);
207   ierr = ISColoringSetType(*coloring,ctype);CHKERRQ(ierr);
208   PetscFunctionReturn(0);
209 }
210 
211 static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
212 {
213   PetscErrorCode ierr;
214   PetscMPIInt    flag;
215   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
216 
217   PetscFunctionBegin;
218   if (comm == MPI_COMM_NULL) {
219     ierr = PetscObjectGetComm((PetscObject)dmc,&comm);CHKERRQ(ierr);
220   }
221   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),comm,&flag);CHKERRMPI(ierr);
222   PetscCheckFalse(flag != MPI_CONGRUENT && flag != MPI_IDENT,PetscObjectComm((PetscObject)dmc),PETSC_ERR_SUP,"cannot change communicators");
223   ierr = DMRedundantCreate(comm,redc->rank,redc->N,dmf);CHKERRQ(ierr);
224   PetscFunctionReturn(0);
225 }
226 
227 static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
228 {
229   PetscErrorCode ierr;
230   PetscMPIInt    flag;
231   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
232 
233   PetscFunctionBegin;
234   if (comm == MPI_COMM_NULL) {
235     ierr = PetscObjectGetComm((PetscObject)dmf,&comm);CHKERRQ(ierr);
236   }
237   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmf),comm,&flag);CHKERRMPI(ierr);
238   PetscCheckFalse(flag != MPI_CONGRUENT && flag != MPI_IDENT,PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
239   ierr = DMRedundantCreate(comm,redf->rank,redf->N,dmc);CHKERRQ(ierr);
240   PetscFunctionReturn(0);
241 }
242 
243 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
244 {
245   PetscErrorCode ierr;
246   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
247   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
248   PetscMPIInt    flag;
249   PetscInt       i,rstart,rend;
250 
251   PetscFunctionBegin;
252   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),PetscObjectComm((PetscObject)dmf),&flag);CHKERRMPI(ierr);
253   PetscCheckFalse(flag != MPI_CONGRUENT && flag != MPI_IDENT,PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
254   PetscCheckFalse(redc->rank != redf->rank,PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
255   PetscCheckFalse(redc->N != redf->N,PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Global size does not match");
256   ierr = MatCreate(PetscObjectComm((PetscObject)dmc),P);CHKERRQ(ierr);
257   ierr = MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);CHKERRQ(ierr);
258   ierr = MatSetType(*P,MATAIJ);CHKERRQ(ierr);
259   ierr = MatSeqAIJSetPreallocation(*P,1,NULL);CHKERRQ(ierr);
260   ierr = MatMPIAIJSetPreallocation(*P,1,NULL,0,NULL);CHKERRQ(ierr);
261   ierr = MatGetOwnershipRange(*P,&rstart,&rend);CHKERRQ(ierr);
262   for (i=rstart; i<rend; i++) {ierr = MatSetValue(*P,i,i,1.0,INSERT_VALUES);CHKERRQ(ierr);}
263   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
264   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
265   if (scale) {ierr = DMCreateInterpolationScale(dmc,dmf,*P,scale);CHKERRQ(ierr);}
266   PetscFunctionReturn(0);
267 }
268 
269 /*@
270     DMRedundantSetSize - Sets the size of a densely coupled redundant object
271 
272     Collective on dm
273 
274     Input Parameters:
275 +   dm - redundant DM
276 .   rank - rank of process to own redundant degrees of freedom
277 -   N - total number of redundant degrees of freedom
278 
279     Level: advanced
280 
281 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
282 @*/
283 PetscErrorCode DMRedundantSetSize(DM dm,PetscMPIInt rank,PetscInt N)
284 {
285   PetscErrorCode ierr;
286 
287   PetscFunctionBegin;
288   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
289   PetscValidType(dm,1);
290   PetscValidLogicalCollectiveMPIInt(dm,rank,2);
291   PetscValidLogicalCollectiveInt(dm,N,3);
292   ierr = PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscMPIInt,PetscInt),(dm,rank,N));CHKERRQ(ierr);
293   PetscFunctionReturn(0);
294 }
295 
296 /*@
297     DMRedundantGetSize - Gets the size of a densely coupled redundant object
298 
299     Not Collective
300 
301     Input Parameter:
302 .   dm - redundant DM
303 
304     Output Parameters:
305 +   rank - rank of process to own redundant degrees of freedom (or NULL)
306 -   N - total number of redundant degrees of freedom (or NULL)
307 
308     Level: advanced
309 
310 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
311 @*/
312 PetscErrorCode DMRedundantGetSize(DM dm,PetscMPIInt *rank,PetscInt *N)
313 {
314   PetscErrorCode ierr;
315 
316   PetscFunctionBegin;
317   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
318   PetscValidType(dm,1);
319   ierr = PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscMPIInt*,PetscInt*),(dm,rank,N));CHKERRQ(ierr);
320   PetscFunctionReturn(0);
321 }
322 
323 static PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscMPIInt rank,PetscInt N)
324 {
325   DM_Redundant   *red = (DM_Redundant*)dm->data;
326   PetscErrorCode ierr;
327   PetscMPIInt    myrank;
328   PetscInt       i,*globals;
329 
330   PetscFunctionBegin;
331   ierr      = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank);CHKERRMPI(ierr);
332   red->rank = rank;
333   red->N    = N;
334   red->n    = (myrank == rank) ? N : 0;
335 
336   /* mapping is setup here */
337   ierr = PetscMalloc1(red->N,&globals);CHKERRQ(ierr);
338   for (i=0; i<red->N; i++) globals[i] = i;
339   ierr = ISLocalToGlobalMappingDestroy(&dm->ltogmap);CHKERRQ(ierr);
340   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)dm),1,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr);
341   PetscFunctionReturn(0);
342 }
343 
344 static PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
345 {
346   DM_Redundant *red = (DM_Redundant*)dm->data;
347 
348   PetscFunctionBegin;
349   if (rank) *rank = red->rank;
350   if (N)    *N = red->N;
351   PetscFunctionReturn(0);
352 }
353 
354 static PetscErrorCode DMSetUpGLVisViewer_Redundant(PetscObject odm, PetscViewer viewer)
355 {
356   PetscFunctionBegin;
357   PetscFunctionReturn(0);
358 }
359 
360 /*MC
361    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
362          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
363          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
364          processes local computations).
365 
366          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
367 
368   Level: intermediate
369 
370 .seealso: DMType, DMCOMPOSITE,  DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
371 M*/
372 
373 PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
374 {
375   PetscErrorCode ierr;
376   DM_Redundant   *red;
377 
378   PetscFunctionBegin;
379   ierr     = PetscNewLog(dm,&red);CHKERRQ(ierr);
380   dm->data = red;
381 
382   dm->ops->setup               = DMSetUp_Redundant;
383   dm->ops->view                = DMView_Redundant;
384   dm->ops->createglobalvector  = DMCreateGlobalVector_Redundant;
385   dm->ops->createlocalvector   = DMCreateLocalVector_Redundant;
386   dm->ops->creatematrix        = DMCreateMatrix_Redundant;
387   dm->ops->destroy             = DMDestroy_Redundant;
388   dm->ops->globaltolocalbegin  = DMGlobalToLocalBegin_Redundant;
389   dm->ops->globaltolocalend    = DMGlobalToLocalEnd_Redundant;
390   dm->ops->localtoglobalbegin  = DMLocalToGlobalBegin_Redundant;
391   dm->ops->localtoglobalend    = DMLocalToGlobalEnd_Redundant;
392   dm->ops->refine              = DMRefine_Redundant;
393   dm->ops->coarsen             = DMCoarsen_Redundant;
394   dm->ops->createinterpolation = DMCreateInterpolation_Redundant;
395   dm->ops->getcoloring         = DMCreateColoring_Redundant;
396 
397   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",DMRedundantSetSize_Redundant);CHKERRQ(ierr);
398   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",DMRedundantGetSize_Redundant);CHKERRQ(ierr);
399   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",DMSetUpGLVisViewer_Redundant);CHKERRQ(ierr);
400   PetscFunctionReturn(0);
401 }
402 
403 /*@C
404     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
405 
406     Collective
407 
408     Input Parameters:
409 +   comm - the processors that will share the global vector
410 .   rank - rank to own the redundant values
411 -   N - total number of degrees of freedom
412 
413     Output Parameters:
414 .   dm - the redundant DM
415 
416     Level: advanced
417 
418 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()
419 
420 @*/
421 PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscMPIInt rank,PetscInt N,DM *dm)
422 {
423   PetscErrorCode ierr;
424 
425   PetscFunctionBegin;
426   PetscValidPointer(dm,4);
427   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
428   ierr = DMSetType(*dm,DMREDUNDANT);CHKERRQ(ierr);
429   ierr = DMRedundantSetSize(*dm,rank,N);CHKERRQ(ierr);
430   ierr = DMSetUp(*dm);CHKERRQ(ierr);
431   PetscFunctionReturn(0);
432 }
433