xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision 609bdbee21ea3be08735c64dbe00a9ab27759925)
1 #include <petsc/private/dmimpl.h>
2 #include <petscdmredundant.h>   /*I      "petscdmredundant.h" I*/
3 
4 typedef struct  {
5   PetscMPIInt rank;                /* owner */
6   PetscInt    N;                   /* total number of dofs */
7   PetscInt    n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
8 } DM_Redundant;
9 
10 static PetscErrorCode DMCreateMatrix_Redundant(DM dm,Mat *J)
11 {
12   DM_Redundant           *red = (DM_Redundant*)dm->data;
13   PetscErrorCode         ierr;
14   ISLocalToGlobalMapping ltog;
15   PetscInt               i,rstart,rend,*cols;
16   PetscScalar            *vals;
17 
18   PetscFunctionBegin;
19   ierr = MatCreate(PetscObjectComm((PetscObject)dm),J);CHKERRQ(ierr);
20   ierr = MatSetSizes(*J,red->n,red->n,red->N,red->N);CHKERRQ(ierr);
21   ierr = MatSetType(*J,dm->mattype);CHKERRQ(ierr);
22   ierr = MatSeqAIJSetPreallocation(*J,red->n,NULL);CHKERRQ(ierr);
23   ierr = MatSeqBAIJSetPreallocation(*J,1,red->n,NULL);CHKERRQ(ierr);
24   ierr = MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr);
25   ierr = MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr);
26 
27   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
28   ierr = MatSetLocalToGlobalMapping(*J,ltog,ltog);CHKERRQ(ierr);
29 
30   ierr = PetscMalloc2(red->N,&cols,red->N,&vals);CHKERRQ(ierr);
31   for (i=0; i<red->N; i++) {
32     cols[i] = i;
33     vals[i] = 0.0;
34   }
35   ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr);
36   for (i=rstart; i<rend; i++) {
37     ierr = MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
38   }
39   ierr = PetscFree2(cols,vals);CHKERRQ(ierr);
40   ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
41   ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
42   PetscFunctionReturn(0);
43 }
44 
45 static PetscErrorCode DMDestroy_Redundant(DM dm)
46 {
47   PetscErrorCode ierr;
48 
49   PetscFunctionBegin;
50   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",NULL);CHKERRQ(ierr);
51   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",NULL);CHKERRQ(ierr);
52   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
53   ierr = PetscFree(dm->data);CHKERRQ(ierr);
54   PetscFunctionReturn(0);
55 }
56 
57 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
58 {
59   PetscErrorCode         ierr;
60   DM_Redundant           *red = (DM_Redundant*)dm->data;
61   ISLocalToGlobalMapping ltog;
62 
63   PetscFunctionBegin;
64   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
65   PetscValidPointer(gvec,2);
66   *gvec = 0;
67   ierr  = VecCreate(PetscObjectComm((PetscObject)dm),gvec);CHKERRQ(ierr);
68   ierr  = VecSetSizes(*gvec,red->n,red->N);CHKERRQ(ierr);
69   ierr  = VecSetType(*gvec,dm->vectype);CHKERRQ(ierr);
70   ierr  = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
71   ierr  = VecSetLocalToGlobalMapping(*gvec,ltog);CHKERRQ(ierr);
72   ierr  = VecSetDM(*gvec,dm);CHKERRQ(ierr);
73   PetscFunctionReturn(0);
74 }
75 
76 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
77 {
78   PetscErrorCode ierr;
79   DM_Redundant   *red = (DM_Redundant*)dm->data;
80 
81   PetscFunctionBegin;
82   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
83   PetscValidPointer(lvec,2);
84   *lvec = 0;
85   ierr  = VecCreate(PETSC_COMM_SELF,lvec);CHKERRQ(ierr);
86   ierr  = VecSetSizes(*lvec,red->N,red->N);CHKERRQ(ierr);
87   ierr  = VecSetType(*lvec,dm->vectype);CHKERRQ(ierr);
88   ierr  = VecSetDM(*lvec,dm);CHKERRQ(ierr);
89   PetscFunctionReturn(0);
90 }
91 
92 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
93 {
94   PetscErrorCode    ierr;
95   DM_Redundant      *red = (DM_Redundant*)dm->data;
96   const PetscScalar *lv;
97   PetscScalar       *gv;
98   PetscMPIInt       rank;
99 
100   PetscFunctionBegin;
101   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);CHKERRQ(ierr);
102   ierr = VecGetArrayRead(l,&lv);CHKERRQ(ierr);
103   ierr = VecGetArray(g,&gv);CHKERRQ(ierr);
104   switch (imode) {
105   case ADD_VALUES:
106   case MAX_VALUES:
107   {
108     void        *source;
109     PetscScalar *buffer;
110     PetscInt    i;
111     if (rank == red->rank) {
112 #if defined(PETSC_HAVE_MPI_IN_PLACE)
113       buffer = gv;
114       source = MPI_IN_PLACE;
115 #else
116       ierr   = PetscMalloc1(red->N,&buffer);CHKERRQ(ierr);
117       source = buffer;
118 #endif
119       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
120 #if !defined(PETSC_USE_COMPLEX)
121       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
122 #endif
123     } else source = (void*)lv;
124     ierr = MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX,red->rank,PetscObjectComm((PetscObject)dm));CHKERRQ(ierr);
125 #if !defined(PETSC_HAVE_MPI_IN_PLACE)
126     if (rank == red->rank) {ierr = PetscFree(buffer);CHKERRQ(ierr);}
127 #endif
128   } break;
129   case INSERT_VALUES:
130     ierr = PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);
131     break;
132   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
133   }
134   ierr = VecRestoreArrayRead(l,&lv);CHKERRQ(ierr);
135   ierr = VecRestoreArray(g,&gv);CHKERRQ(ierr);
136   PetscFunctionReturn(0);
137 }
138 
139 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
140 {
141   PetscFunctionBegin;
142   PetscFunctionReturn(0);
143 }
144 
145 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
146 {
147   PetscErrorCode    ierr;
148   DM_Redundant      *red = (DM_Redundant*)dm->data;
149   const PetscScalar *gv;
150   PetscScalar       *lv;
151 
152   PetscFunctionBegin;
153   ierr = VecGetArrayRead(g,&gv);CHKERRQ(ierr);
154   ierr = VecGetArray(l,&lv);CHKERRQ(ierr);
155   switch (imode) {
156   case INSERT_VALUES:
157     if (red->n) {ierr = PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);}
158     ierr = MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,PetscObjectComm((PetscObject)dm));CHKERRQ(ierr);
159     break;
160   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
161   }
162   ierr = VecRestoreArrayRead(g,&gv);CHKERRQ(ierr);
163   ierr = VecRestoreArray(l,&lv);CHKERRQ(ierr);
164   PetscFunctionReturn(0);
165 }
166 
167 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
168 {
169   PetscFunctionBegin;
170   PetscFunctionReturn(0);
171 }
172 
173 static PetscErrorCode DMSetUp_Redundant(DM dm)
174 {
175   PetscErrorCode ierr;
176   DM_Redundant   *red = (DM_Redundant*)dm->data;
177   PetscInt       i,*globals;
178 
179   PetscFunctionBegin;
180   ierr = PetscMalloc1(red->N,&globals);CHKERRQ(ierr);
181   for (i=0; i<red->N; i++) globals[i] = i;
182   ierr         = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,1,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr);
183   PetscFunctionReturn(0);
184 }
185 
186 static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
187 {
188   PetscErrorCode ierr;
189   DM_Redundant   *red = (DM_Redundant*)dm->data;
190   PetscBool      iascii;
191 
192   PetscFunctionBegin;
193   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
194   if (iascii) {
195     ierr = PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);CHKERRQ(ierr);
196   }
197   PetscFunctionReturn(0);
198 }
199 
200 static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,ISColoring *coloring)
201 {
202   DM_Redundant    *red = (DM_Redundant*)dm->data;
203   PetscErrorCode  ierr;
204   PetscInt        i,nloc;
205   ISColoringValue *colors;
206 
207   PetscFunctionBegin;
208   switch (ctype) {
209   case IS_COLORING_GLOBAL:
210     nloc = red->n;
211     break;
212   case IS_COLORING_LOCAL:
213     nloc = red->N;
214     break;
215   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
216   }
217   ierr = PetscMalloc1(nloc,&colors);CHKERRQ(ierr);
218   for (i=0; i<nloc; i++) colors[i] = i;
219   ierr = ISColoringCreate(PetscObjectComm((PetscObject)dm),red->N,nloc,colors,PETSC_OWN_POINTER,coloring);CHKERRQ(ierr);
220   ierr = ISColoringSetType(*coloring,ctype);CHKERRQ(ierr);
221   PetscFunctionReturn(0);
222 }
223 
224 static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
225 {
226   PetscErrorCode ierr;
227   PetscMPIInt    flag;
228   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
229 
230   PetscFunctionBegin;
231   if (comm == MPI_COMM_NULL) {
232     ierr = PetscObjectGetComm((PetscObject)dmc,&comm);CHKERRQ(ierr);
233   }
234   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),comm,&flag);CHKERRQ(ierr);
235   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmc),PETSC_ERR_SUP,"cannot change communicators");
236   ierr = DMRedundantCreate(comm,redc->rank,redc->N,dmf);CHKERRQ(ierr);
237   PetscFunctionReturn(0);
238 }
239 
240 static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
241 {
242   PetscErrorCode ierr;
243   PetscMPIInt    flag;
244   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
245 
246   PetscFunctionBegin;
247   if (comm == MPI_COMM_NULL) {
248     ierr = PetscObjectGetComm((PetscObject)dmf,&comm);CHKERRQ(ierr);
249   }
250   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmf),comm,&flag);CHKERRQ(ierr);
251   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
252   ierr = DMRedundantCreate(comm,redf->rank,redf->N,dmc);CHKERRQ(ierr);
253   PetscFunctionReturn(0);
254 }
255 
256 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
257 {
258   PetscErrorCode ierr;
259   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
260   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
261   PetscMPIInt    flag;
262   PetscInt       i,rstart,rend;
263 
264   PetscFunctionBegin;
265   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),PetscObjectComm((PetscObject)dmf),&flag);CHKERRQ(ierr);
266   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
267   if (redc->rank != redf->rank) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
268   if (redc->N != redf->N) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Global size does not match");
269   ierr = MatCreate(PetscObjectComm((PetscObject)dmc),P);CHKERRQ(ierr);
270   ierr = MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);CHKERRQ(ierr);
271   ierr = MatSetType(*P,MATAIJ);CHKERRQ(ierr);
272   ierr = MatSeqAIJSetPreallocation(*P,1,0);CHKERRQ(ierr);
273   ierr = MatMPIAIJSetPreallocation(*P,1,0,0,0);CHKERRQ(ierr);
274   ierr = MatGetOwnershipRange(*P,&rstart,&rend);CHKERRQ(ierr);
275   for (i=rstart; i<rend; i++) {ierr = MatSetValue(*P,i,i,1.0,INSERT_VALUES);CHKERRQ(ierr);}
276   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
277   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
278   if (scale) {ierr = DMCreateInterpolationScale(dmc,dmf,*P,scale);CHKERRQ(ierr);}
279   PetscFunctionReturn(0);
280 }
281 
282 /*@
283     DMRedundantSetSize - Sets the size of a densely coupled redundant object
284 
285     Collective on DM
286 
287     Input Parameter:
288 +   dm - redundant DM
289 .   rank - rank of process to own redundant degrees of freedom
290 -   N - total number of redundant degrees of freedom
291 
292     Level: advanced
293 
294 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
295 @*/
296 PetscErrorCode DMRedundantSetSize(DM dm,PetscMPIInt rank,PetscInt N)
297 {
298   PetscErrorCode ierr;
299 
300   PetscFunctionBegin;
301   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
302   PetscValidType(dm,1);
303   PetscValidLogicalCollectiveMPIInt(dm,rank,2);
304   PetscValidLogicalCollectiveInt(dm,N,3);
305   ierr = PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscMPIInt,PetscInt),(dm,rank,N));CHKERRQ(ierr);
306   PetscFunctionReturn(0);
307 }
308 
309 /*@
310     DMRedundantGetSize - Gets the size of a densely coupled redundant object
311 
312     Not Collective
313 
314     Input Parameter:
315 +   dm - redundant DM
316 
317     Output Parameters:
318 +   rank - rank of process to own redundant degrees of freedom (or NULL)
319 -   N - total number of redundant degrees of freedom (or NULL)
320 
321     Level: advanced
322 
323 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
324 @*/
325 PetscErrorCode DMRedundantGetSize(DM dm,PetscMPIInt *rank,PetscInt *N)
326 {
327   PetscErrorCode ierr;
328 
329   PetscFunctionBegin;
330   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
331   PetscValidType(dm,1);
332   ierr = PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscMPIInt*,PetscInt*),(dm,rank,N));CHKERRQ(ierr);
333   PetscFunctionReturn(0);
334 }
335 
336 static PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscMPIInt rank,PetscInt N)
337 {
338   DM_Redundant   *red = (DM_Redundant*)dm->data;
339   PetscErrorCode ierr;
340   PetscMPIInt    myrank;
341 
342   PetscFunctionBegin;
343   ierr      = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank);CHKERRQ(ierr);
344   red->rank = rank;
345   red->N    = N;
346   red->n    = (myrank == rank) ? N : 0;
347   PetscFunctionReturn(0);
348 }
349 
350 static PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
351 {
352   DM_Redundant *red = (DM_Redundant*)dm->data;
353 
354   PetscFunctionBegin;
355   if (rank) *rank = red->rank;
356   if (N)    *N = red->N;
357   PetscFunctionReturn(0);
358 }
359 
360 /*MC
361    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
362          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
363          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
364          processes local computations).
365 
366          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
367 
368 
369   Level: intermediate
370 
371 .seealso: DMType, DMCOMPOSITE,  DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
372 M*/
373 
374 PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
375 {
376   PetscErrorCode ierr;
377   DM_Redundant   *red;
378 
379   PetscFunctionBegin;
380   ierr     = PetscNewLog(dm,&red);CHKERRQ(ierr);
381   dm->data = red;
382 
383   ierr = PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);CHKERRQ(ierr);
384 
385   dm->ops->setup              = DMSetUp_Redundant;
386   dm->ops->view               = DMView_Redundant;
387   dm->ops->createglobalvector = DMCreateGlobalVector_Redundant;
388   dm->ops->createlocalvector  = DMCreateLocalVector_Redundant;
389   dm->ops->creatematrix       = DMCreateMatrix_Redundant;
390   dm->ops->destroy            = DMDestroy_Redundant;
391   dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant;
392   dm->ops->globaltolocalend   = DMGlobalToLocalEnd_Redundant;
393   dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant;
394   dm->ops->localtoglobalend   = DMLocalToGlobalEnd_Redundant;
395   dm->ops->refine             = DMRefine_Redundant;
396   dm->ops->coarsen            = DMCoarsen_Redundant;
397   dm->ops->createinterpolation= DMCreateInterpolation_Redundant;
398   dm->ops->getcoloring        = DMCreateColoring_Redundant;
399 
400   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",DMRedundantSetSize_Redundant);CHKERRQ(ierr);
401   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",DMRedundantGetSize_Redundant);CHKERRQ(ierr);
402   PetscFunctionReturn(0);
403 }
404 
405 /*@C
406     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
407 
408     Collective on MPI_Comm
409 
410     Input Parameter:
411 +   comm - the processors that will share the global vector
412 .   rank - rank to own the redundant values
413 -   N - total number of degrees of freedom
414 
415     Output Parameters:
416 .   dm - the redundant DM
417 
418     Level: advanced
419 
420 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()
421 
422 @*/
423 PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscMPIInt rank,PetscInt N,DM *dm)
424 {
425   PetscErrorCode ierr;
426 
427   PetscFunctionBegin;
428   PetscValidPointer(dm,2);
429   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
430   ierr = DMSetType(*dm,DMREDUNDANT);CHKERRQ(ierr);
431   ierr = DMRedundantSetSize(*dm,rank,N);CHKERRQ(ierr);
432   ierr = DMSetUp(*dm);CHKERRQ(ierr);
433   PetscFunctionReturn(0);
434 }
435