xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision d2fd7bfc6f0fd2e1d083decbb7cc7d77e16824f0)
1 #include <petsc/private/dmimpl.h>
2 #include <petscdmredundant.h>   /*I      "petscdmredundant.h" I*/
3 
4 typedef struct  {
5   PetscMPIInt rank;                /* owner */
6   PetscInt    N;                   /* total number of dofs */
7   PetscInt    n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
8 } DM_Redundant;
9 
10 static PetscErrorCode DMCreateMatrix_Redundant(DM dm,Mat *J)
11 {
12   DM_Redundant           *red = (DM_Redundant*)dm->data;
13   PetscErrorCode         ierr;
14   ISLocalToGlobalMapping ltog;
15   PetscInt               i,rstart,rend,*cols;
16   PetscScalar            *vals;
17 
18   PetscFunctionBegin;
19   ierr = MatCreate(PetscObjectComm((PetscObject)dm),J);CHKERRQ(ierr);
20   ierr = MatSetSizes(*J,red->n,red->n,red->N,red->N);CHKERRQ(ierr);
21   ierr = MatSetType(*J,dm->mattype);CHKERRQ(ierr);
22   ierr = MatSeqAIJSetPreallocation(*J,red->n,NULL);CHKERRQ(ierr);
23   ierr = MatSeqBAIJSetPreallocation(*J,1,red->n,NULL);CHKERRQ(ierr);
24   ierr = MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr);
25   ierr = MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr);
26 
27   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
28   ierr = MatSetLocalToGlobalMapping(*J,ltog,ltog);CHKERRQ(ierr);
29 
30   ierr = PetscMalloc2(red->N,&cols,red->N,&vals);CHKERRQ(ierr);
31   for (i=0; i<red->N; i++) {
32     cols[i] = i;
33     vals[i] = 0.0;
34   }
35   ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr);
36   for (i=rstart; i<rend; i++) {
37     ierr = MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
38   }
39   ierr = PetscFree2(cols,vals);CHKERRQ(ierr);
40   ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
41   ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
42   PetscFunctionReturn(0);
43 }
44 
45 static PetscErrorCode DMDestroy_Redundant(DM dm)
46 {
47   PetscErrorCode ierr;
48 
49   PetscFunctionBegin;
50   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",NULL);CHKERRQ(ierr);
51   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",NULL);CHKERRQ(ierr);
52   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",NULL);CHKERRQ(ierr);
53   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
54   ierr = PetscFree(dm->data);CHKERRQ(ierr);
55   PetscFunctionReturn(0);
56 }
57 
58 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
59 {
60   PetscErrorCode         ierr;
61   DM_Redundant           *red = (DM_Redundant*)dm->data;
62   ISLocalToGlobalMapping ltog;
63 
64   PetscFunctionBegin;
65   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
66   PetscValidPointer(gvec,2);
67   *gvec = 0;
68   ierr  = VecCreate(PetscObjectComm((PetscObject)dm),gvec);CHKERRQ(ierr);
69   ierr  = VecSetSizes(*gvec,red->n,red->N);CHKERRQ(ierr);
70   ierr  = VecSetType(*gvec,dm->vectype);CHKERRQ(ierr);
71   ierr  = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
72   ierr  = VecSetLocalToGlobalMapping(*gvec,ltog);CHKERRQ(ierr);
73   ierr  = VecSetDM(*gvec,dm);CHKERRQ(ierr);
74   PetscFunctionReturn(0);
75 }
76 
77 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
78 {
79   PetscErrorCode ierr;
80   DM_Redundant   *red = (DM_Redundant*)dm->data;
81 
82   PetscFunctionBegin;
83   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
84   PetscValidPointer(lvec,2);
85   *lvec = 0;
86   ierr  = VecCreate(PETSC_COMM_SELF,lvec);CHKERRQ(ierr);
87   ierr  = VecSetSizes(*lvec,red->N,red->N);CHKERRQ(ierr);
88   ierr  = VecSetType(*lvec,dm->vectype);CHKERRQ(ierr);
89   ierr  = VecSetDM(*lvec,dm);CHKERRQ(ierr);
90   PetscFunctionReturn(0);
91 }
92 
93 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
94 {
95   PetscErrorCode    ierr;
96   DM_Redundant      *red = (DM_Redundant*)dm->data;
97   const PetscScalar *lv;
98   PetscScalar       *gv;
99   PetscMPIInt       rank;
100 
101   PetscFunctionBegin;
102   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);CHKERRQ(ierr);
103   ierr = VecGetArrayRead(l,&lv);CHKERRQ(ierr);
104   ierr = VecGetArray(g,&gv);CHKERRQ(ierr);
105   switch (imode) {
106   case ADD_VALUES:
107   case MAX_VALUES:
108   {
109     void        *source;
110     PetscScalar *buffer;
111     PetscInt    i;
112     if (rank == red->rank) {
113 #if defined(PETSC_HAVE_MPI_IN_PLACE)
114       buffer = gv;
115       source = MPI_IN_PLACE;
116 #else
117       ierr   = PetscMalloc1(red->N,&buffer);CHKERRQ(ierr);
118       source = buffer;
119 #endif
120       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
121 #if !defined(PETSC_USE_COMPLEX)
122       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
123 #endif
124     } else source = (void*)lv;
125     ierr = MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX,red->rank,PetscObjectComm((PetscObject)dm));CHKERRQ(ierr);
126 #if !defined(PETSC_HAVE_MPI_IN_PLACE)
127     if (rank == red->rank) {ierr = PetscFree(buffer);CHKERRQ(ierr);}
128 #endif
129   } break;
130   case INSERT_VALUES:
131     ierr = PetscArraycpy(gv,lv,red->n);CHKERRQ(ierr);
132     break;
133   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
134   }
135   ierr = VecRestoreArrayRead(l,&lv);CHKERRQ(ierr);
136   ierr = VecRestoreArray(g,&gv);CHKERRQ(ierr);
137   PetscFunctionReturn(0);
138 }
139 
140 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
141 {
142   PetscFunctionBegin;
143   PetscFunctionReturn(0);
144 }
145 
146 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
147 {
148   PetscErrorCode    ierr;
149   DM_Redundant      *red = (DM_Redundant*)dm->data;
150   const PetscScalar *gv;
151   PetscScalar       *lv;
152 
153   PetscFunctionBegin;
154   ierr = VecGetArrayRead(g,&gv);CHKERRQ(ierr);
155   ierr = VecGetArray(l,&lv);CHKERRQ(ierr);
156   switch (imode) {
157   case INSERT_VALUES:
158     if (red->n) {ierr = PetscArraycpy(lv,gv,red->n);CHKERRQ(ierr);}
159     ierr = MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,PetscObjectComm((PetscObject)dm));CHKERRQ(ierr);
160     break;
161   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
162   }
163   ierr = VecRestoreArrayRead(g,&gv);CHKERRQ(ierr);
164   ierr = VecRestoreArray(l,&lv);CHKERRQ(ierr);
165   PetscFunctionReturn(0);
166 }
167 
168 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
169 {
170   PetscFunctionBegin;
171   PetscFunctionReturn(0);
172 }
173 
174 static PetscErrorCode DMSetUp_Redundant(DM dm)
175 {
176   PetscFunctionBegin;
177   PetscFunctionReturn(0);
178 }
179 
180 static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
181 {
182   PetscErrorCode ierr;
183   DM_Redundant   *red = (DM_Redundant*)dm->data;
184   PetscBool      iascii;
185 
186   PetscFunctionBegin;
187   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
188   if (iascii) {
189     ierr = PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);CHKERRQ(ierr);
190   }
191   PetscFunctionReturn(0);
192 }
193 
194 static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,ISColoring *coloring)
195 {
196   DM_Redundant    *red = (DM_Redundant*)dm->data;
197   PetscErrorCode  ierr;
198   PetscInt        i,nloc;
199   ISColoringValue *colors;
200 
201   PetscFunctionBegin;
202   switch (ctype) {
203   case IS_COLORING_GLOBAL:
204     nloc = red->n;
205     break;
206   case IS_COLORING_LOCAL:
207     nloc = red->N;
208     break;
209   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
210   }
211   ierr = PetscMalloc1(nloc,&colors);CHKERRQ(ierr);
212   for (i=0; i<nloc; i++) colors[i] = i;
213   ierr = ISColoringCreate(PetscObjectComm((PetscObject)dm),red->N,nloc,colors,PETSC_OWN_POINTER,coloring);CHKERRQ(ierr);
214   ierr = ISColoringSetType(*coloring,ctype);CHKERRQ(ierr);
215   PetscFunctionReturn(0);
216 }
217 
218 static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
219 {
220   PetscErrorCode ierr;
221   PetscMPIInt    flag;
222   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
223 
224   PetscFunctionBegin;
225   if (comm == MPI_COMM_NULL) {
226     ierr = PetscObjectGetComm((PetscObject)dmc,&comm);CHKERRQ(ierr);
227   }
228   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),comm,&flag);CHKERRQ(ierr);
229   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmc),PETSC_ERR_SUP,"cannot change communicators");
230   ierr = DMRedundantCreate(comm,redc->rank,redc->N,dmf);CHKERRQ(ierr);
231   PetscFunctionReturn(0);
232 }
233 
234 static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
235 {
236   PetscErrorCode ierr;
237   PetscMPIInt    flag;
238   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
239 
240   PetscFunctionBegin;
241   if (comm == MPI_COMM_NULL) {
242     ierr = PetscObjectGetComm((PetscObject)dmf,&comm);CHKERRQ(ierr);
243   }
244   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmf),comm,&flag);CHKERRQ(ierr);
245   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
246   ierr = DMRedundantCreate(comm,redf->rank,redf->N,dmc);CHKERRQ(ierr);
247   PetscFunctionReturn(0);
248 }
249 
250 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
251 {
252   PetscErrorCode ierr;
253   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
254   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
255   PetscMPIInt    flag;
256   PetscInt       i,rstart,rend;
257 
258   PetscFunctionBegin;
259   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),PetscObjectComm((PetscObject)dmf),&flag);CHKERRQ(ierr);
260   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
261   if (redc->rank != redf->rank) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
262   if (redc->N != redf->N) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Global size does not match");
263   ierr = MatCreate(PetscObjectComm((PetscObject)dmc),P);CHKERRQ(ierr);
264   ierr = MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);CHKERRQ(ierr);
265   ierr = MatSetType(*P,MATAIJ);CHKERRQ(ierr);
266   ierr = MatSeqAIJSetPreallocation(*P,1,0);CHKERRQ(ierr);
267   ierr = MatMPIAIJSetPreallocation(*P,1,0,0,0);CHKERRQ(ierr);
268   ierr = MatGetOwnershipRange(*P,&rstart,&rend);CHKERRQ(ierr);
269   for (i=rstart; i<rend; i++) {ierr = MatSetValue(*P,i,i,1.0,INSERT_VALUES);CHKERRQ(ierr);}
270   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
271   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
272   if (scale) {ierr = DMCreateInterpolationScale(dmc,dmf,*P,scale);CHKERRQ(ierr);}
273   PetscFunctionReturn(0);
274 }
275 
276 /*@
277     DMRedundantSetSize - Sets the size of a densely coupled redundant object
278 
279     Collective on dm
280 
281     Input Parameter:
282 +   dm - redundant DM
283 .   rank - rank of process to own redundant degrees of freedom
284 -   N - total number of redundant degrees of freedom
285 
286     Level: advanced
287 
288 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
289 @*/
290 PetscErrorCode DMRedundantSetSize(DM dm,PetscMPIInt rank,PetscInt N)
291 {
292   PetscErrorCode ierr;
293 
294   PetscFunctionBegin;
295   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
296   PetscValidType(dm,1);
297   PetscValidLogicalCollectiveMPIInt(dm,rank,2);
298   PetscValidLogicalCollectiveInt(dm,N,3);
299   ierr = PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscMPIInt,PetscInt),(dm,rank,N));CHKERRQ(ierr);
300   PetscFunctionReturn(0);
301 }
302 
303 /*@
304     DMRedundantGetSize - Gets the size of a densely coupled redundant object
305 
306     Not Collective
307 
308     Input Parameter:
309 .   dm - redundant DM
310 
311     Output Parameters:
312 +   rank - rank of process to own redundant degrees of freedom (or NULL)
313 -   N - total number of redundant degrees of freedom (or NULL)
314 
315     Level: advanced
316 
317 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
318 @*/
319 PetscErrorCode DMRedundantGetSize(DM dm,PetscMPIInt *rank,PetscInt *N)
320 {
321   PetscErrorCode ierr;
322 
323   PetscFunctionBegin;
324   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
325   PetscValidType(dm,1);
326   ierr = PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscMPIInt*,PetscInt*),(dm,rank,N));CHKERRQ(ierr);
327   PetscFunctionReturn(0);
328 }
329 
330 static PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscMPIInt rank,PetscInt N)
331 {
332   DM_Redundant   *red = (DM_Redundant*)dm->data;
333   PetscErrorCode ierr;
334   PetscMPIInt    myrank;
335   PetscInt       i,*globals;
336 
337   PetscFunctionBegin;
338   ierr      = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank);CHKERRQ(ierr);
339   red->rank = rank;
340   red->N    = N;
341   red->n    = (myrank == rank) ? N : 0;
342 
343   /* mapping is setup here */
344   ierr = PetscMalloc1(red->N,&globals);CHKERRQ(ierr);
345   for (i=0; i<red->N; i++) globals[i] = i;
346   ierr = ISLocalToGlobalMappingDestroy(&dm->ltogmap);CHKERRQ(ierr);
347   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)dm),1,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr);
348   PetscFunctionReturn(0);
349 }
350 
351 static PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
352 {
353   DM_Redundant *red = (DM_Redundant*)dm->data;
354 
355   PetscFunctionBegin;
356   if (rank) *rank = red->rank;
357   if (N)    *N = red->N;
358   PetscFunctionReturn(0);
359 }
360 
361 static PetscErrorCode DMSetUpGLVisViewer_Redundant(PetscObject odm, PetscViewer viewer)
362 {
363   PetscFunctionBegin;
364   PetscFunctionReturn(0);
365 }
366 
367 /*MC
368    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
369          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
370          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
371          processes local computations).
372 
373          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
374 
375 
376   Level: intermediate
377 
378 .seealso: DMType, DMCOMPOSITE,  DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
379 M*/
380 
381 PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
382 {
383   PetscErrorCode ierr;
384   DM_Redundant   *red;
385 
386   PetscFunctionBegin;
387   ierr     = PetscNewLog(dm,&red);CHKERRQ(ierr);
388   dm->data = red;
389 
390   dm->ops->setup               = DMSetUp_Redundant;
391   dm->ops->view                = DMView_Redundant;
392   dm->ops->createglobalvector  = DMCreateGlobalVector_Redundant;
393   dm->ops->createlocalvector   = DMCreateLocalVector_Redundant;
394   dm->ops->creatematrix        = DMCreateMatrix_Redundant;
395   dm->ops->destroy             = DMDestroy_Redundant;
396   dm->ops->globaltolocalbegin  = DMGlobalToLocalBegin_Redundant;
397   dm->ops->globaltolocalend    = DMGlobalToLocalEnd_Redundant;
398   dm->ops->localtoglobalbegin  = DMLocalToGlobalBegin_Redundant;
399   dm->ops->localtoglobalend    = DMLocalToGlobalEnd_Redundant;
400   dm->ops->refine              = DMRefine_Redundant;
401   dm->ops->coarsen             = DMCoarsen_Redundant;
402   dm->ops->createinterpolation = DMCreateInterpolation_Redundant;
403   dm->ops->getcoloring         = DMCreateColoring_Redundant;
404 
405   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",DMRedundantSetSize_Redundant);CHKERRQ(ierr);
406   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",DMRedundantGetSize_Redundant);CHKERRQ(ierr);
407   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",DMSetUpGLVisViewer_Redundant);CHKERRQ(ierr);
408   PetscFunctionReturn(0);
409 }
410 
411 /*@C
412     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
413 
414     Collective
415 
416     Input Parameter:
417 +   comm - the processors that will share the global vector
418 .   rank - rank to own the redundant values
419 -   N - total number of degrees of freedom
420 
421     Output Parameters:
422 .   dm - the redundant DM
423 
424     Level: advanced
425 
426 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()
427 
428 @*/
429 PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscMPIInt rank,PetscInt N,DM *dm)
430 {
431   PetscErrorCode ierr;
432 
433   PetscFunctionBegin;
434   PetscValidPointer(dm,2);
435   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
436   ierr = DMSetType(*dm,DMREDUNDANT);CHKERRQ(ierr);
437   ierr = DMRedundantSetSize(*dm,rank,N);CHKERRQ(ierr);
438   ierr = DMSetUp(*dm);CHKERRQ(ierr);
439   PetscFunctionReturn(0);
440 }
441