xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision 8cdbd7576f676fd2f0cf4ce91c9fbf7f9b0d4574)
1 #include <private/dmimpl.h>     /*I      "petscdm.h"          I*/
2 #include <petscdmredundant.h>   /*I      "petscdmredundant.h" I*/
3 #include <petscmat.h>           /*I      "petscmat.h"         I*/
4 
5 typedef struct  {
6   PetscInt rank;                /* owner */
7   PetscInt N;                   /* total number of dofs */
8   PetscInt n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
9 } DM_Redundant;
10 
11 #undef __FUNCT__
12 #define __FUNCT__ "DMCreateMatrix_Redundant"
13 static PetscErrorCode DMCreateMatrix_Redundant(DM dm,const MatType mtype,Mat *J)
14 {
15   DM_Redundant           *red = (DM_Redundant*)dm->data;
16   PetscErrorCode         ierr;
17   ISLocalToGlobalMapping ltog,ltogb;
18 
19   PetscFunctionBegin;
20   ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr);
21   ierr = MatSetSizes(*J,red->n,red->n,red->N,red->N);CHKERRQ(ierr);
22   ierr = MatSetType(*J,mtype);CHKERRQ(ierr);
23   ierr = MatSeqAIJSetPreallocation(*J,red->n,PETSC_NULL);CHKERRQ(ierr);
24   ierr = MatSeqBAIJSetPreallocation(*J,1,red->n,PETSC_NULL);CHKERRQ(ierr);
25   ierr = MatMPIAIJSetPreallocation(*J,red->n,PETSC_NULL,red->N-red->n,PETSC_NULL);CHKERRQ(ierr);
26   ierr = MatMPIBAIJSetPreallocation(*J,1,red->n,PETSC_NULL,red->N-red->n,PETSC_NULL);CHKERRQ(ierr);
27 
28   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
29   ierr = DMGetLocalToGlobalMappingBlock(dm,&ltogb);CHKERRQ(ierr);
30   ierr = MatSetLocalToGlobalMapping(*J,ltog,ltog);CHKERRQ(ierr);
31   ierr = MatSetLocalToGlobalMappingBlock(*J,ltogb,ltogb);CHKERRQ(ierr);
32   PetscFunctionReturn(0);
33 }
34 
35 #undef __FUNCT__
36 #define __FUNCT__ "DMDestroy_Redundant"
37 static PetscErrorCode DMDestroy_Redundant(DM dm)
38 {
39   PetscErrorCode ierr;
40 
41   PetscFunctionBegin;
42   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","",PETSC_NULL);CHKERRQ(ierr);
43   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","",PETSC_NULL);CHKERRQ(ierr);
44   PetscFunctionReturn(0);
45 }
46 
47 #undef __FUNCT__
48 #define __FUNCT__ "DMCreateGlobalVector_Redundant"
49 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
50 {
51   PetscErrorCode         ierr;
52   DM_Redundant           *red = (DM_Redundant*)dm->data;
53   ISLocalToGlobalMapping ltog,ltogb;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
57   PetscValidPointer(gvec,2);
58   *gvec = 0;
59   ierr = VecCreate(((PetscObject)dm)->comm,gvec);CHKERRQ(ierr);
60   ierr = VecSetSizes(*gvec,red->n,red->N);CHKERRQ(ierr);
61   ierr = VecSetType(*gvec,dm->vectype);CHKERRQ(ierr);
62   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
63   ierr = DMGetLocalToGlobalMappingBlock(dm,&ltogb);CHKERRQ(ierr);
64   ierr = VecSetLocalToGlobalMapping(*gvec,ltog);CHKERRQ(ierr);
65   ierr = VecSetLocalToGlobalMappingBlock(*gvec,ltog);CHKERRQ(ierr);
66   ierr = PetscObjectCompose((PetscObject)*gvec,"DM",(PetscObject)dm);CHKERRQ(ierr);
67   PetscFunctionReturn(0);
68 }
69 
70 #undef __FUNCT__
71 #define __FUNCT__ "DMCreateLocalVector_Redundant"
72 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
73 {
74   PetscErrorCode         ierr;
75   DM_Redundant           *red = (DM_Redundant*)dm->data;
76 
77   PetscFunctionBegin;
78   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
79   PetscValidPointer(lvec,2);
80   *lvec = 0;
81   ierr = VecCreate(PETSC_COMM_SELF,lvec);CHKERRQ(ierr);
82   ierr = VecSetSizes(*lvec,red->N,red->N);CHKERRQ(ierr);
83   ierr = VecSetType(*lvec,dm->vectype);CHKERRQ(ierr);
84   ierr = PetscObjectCompose((PetscObject)*lvec,"DM",(PetscObject)dm);CHKERRQ(ierr);
85   PetscFunctionReturn(0);
86 }
87 
88 #undef __FUNCT__
89 #define __FUNCT__ "DMLocalToGlobalBegin_Redundant"
90 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
91 {
92   PetscErrorCode    ierr;
93   DM_Redundant      *red = (DM_Redundant*)dm->data;
94   const PetscScalar *lv;
95   PetscScalar       *gv;
96   PetscMPIInt       rank;
97 
98   PetscFunctionBegin;
99   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
100   ierr = VecGetArrayRead(l,&lv);CHKERRQ(ierr);
101   ierr = VecGetArray(g,&gv);CHKERRQ(ierr);
102   switch (imode) {
103   case ADD_VALUES:
104   case MAX_VALUES:
105   {
106     void *source;
107     PetscScalar *buffer;
108     PetscInt i;
109     if (rank == red->rank) {
110 #if defined (PETSC_HAVE_MPI_IN_PLACE)
111       buffer = gv;
112       source = MPI_IN_PLACE;
113 #else
114       ierr = PetscMalloc(red->N*sizeof(PetscScalar),&buffer);CHKERRQ(ierr);
115       source = buffer;
116 #endif
117       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
118 #if !defined(PETSC_USE_COMPLEX)
119       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
120 #endif
121     } else {
122       source = (void*)lv;
123     }
124     ierr = MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES)?MPI_SUM:MPI_MAX,red->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
125 #if !defined(PETSC_HAVE_MPI_IN_PLACE)
126     if (rank == red->rank) {ierr = PetscFree(buffer);CHKERRQ(ierr);}
127 #endif
128   } break;
129   case INSERT_VALUES:
130   ierr = PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);
131   break;
132   default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"InsertMode not supported");
133   }
134   ierr = VecRestoreArrayRead(l,&lv);CHKERRQ(ierr);
135   ierr = VecRestoreArray(g,&gv);CHKERRQ(ierr);
136   PetscFunctionReturn(0);
137 }
138 
139 #undef __FUNCT__
140 #define __FUNCT__ "DMLocalToGlobalEnd_Redundant"
141 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
142 {
143   PetscFunctionBegin;
144   PetscFunctionReturn(0);
145 }
146 
147 #undef __FUNCT__
148 #define __FUNCT__ "DMGlobalToLocalBegin_Redundant"
149 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
150 {
151   PetscErrorCode    ierr;
152   DM_Redundant      *red = (DM_Redundant*)dm->data;
153   const PetscScalar *gv;
154   PetscScalar       *lv;
155 
156   PetscFunctionBegin;
157   ierr = VecGetArrayRead(g,&gv);CHKERRQ(ierr);
158   ierr = VecGetArray(l,&lv);CHKERRQ(ierr);
159   switch (imode) {
160   case INSERT_VALUES:
161     if (red->n) {ierr = PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);}
162     ierr = MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
163     break;
164   default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"InsertMode not supported");
165   }
166   ierr = VecRestoreArrayRead(g,&gv);CHKERRQ(ierr);
167   ierr = VecRestoreArray(l,&lv);CHKERRQ(ierr);
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "DMGlobalToLocalEnd_Redundant"
173 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
174 {
175   PetscFunctionBegin;
176   PetscFunctionReturn(0);
177 }
178 
179 #undef __FUNCT__
180 #define __FUNCT__ "DMSetUp_Redundant"
181 static PetscErrorCode DMSetUp_Redundant(DM dm)
182 {
183   PetscErrorCode ierr;
184   DM_Redundant   *red = (DM_Redundant*)dm->data;
185   PetscInt       i,*globals;
186 
187   PetscFunctionBegin;
188   ierr = PetscMalloc(red->N*sizeof(PetscInt),&globals);CHKERRQ(ierr);
189   for (i=0; i<red->N; i++) globals[i] = i;
190   ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr);
191   ierr = PetscObjectReference((PetscObject)dm->ltogmap);CHKERRQ(ierr);
192   dm->ltogmapb = dm->ltogmap;
193   PetscFunctionReturn(0);
194 }
195 
196 #undef __FUNCT__
197 #define __FUNCT__ "DMView_Redundant"
198 static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
199 {
200   PetscErrorCode ierr;
201   DM_Redundant   *red = (DM_Redundant*)dm->data;
202   PetscBool      iascii;
203 
204   PetscFunctionBegin;
205   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
206   if (iascii) {
207     ierr = PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);CHKERRQ(ierr);
208   }
209   PetscFunctionReturn(0);
210 }
211 
212 #undef __FUNCT__
213 #define __FUNCT__ "DMCreateColoring_Redundant"
214 static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring)
215 {
216   DM_Redundant   *red = (DM_Redundant*)dm->data;
217   PetscErrorCode ierr;
218   PetscInt i,nloc;
219   ISColoringValue *colors;
220 
221   PetscFunctionBegin;
222   switch (ctype) {
223   case IS_COLORING_GLOBAL:
224     nloc = red->n;
225     break;
226   case IS_COLORING_GHOSTED:
227     nloc = red->N;
228     break;
229   default: SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
230   }
231   ierr = PetscMalloc(nloc*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
232   for (i=0; i<nloc; i++) colors[i] = i;
233   ierr = ISColoringCreate(((PetscObject)dm)->comm,red->N,nloc,colors,coloring);CHKERRQ(ierr);
234   ierr = ISColoringSetType(*coloring,ctype);CHKERRQ(ierr);
235   PetscFunctionReturn(0);
236 }
237 
238 #undef __FUNCT__
239 #define __FUNCT__ "DMRefine_Redundant"
240 static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
241 {
242   PetscErrorCode ierr;
243   PetscMPIInt flag;
244   DM_Redundant *redc = (DM_Redundant*)dmc->data;
245 
246   PetscFunctionBegin;
247   ierr = MPI_Comm_compare(((PetscObject)dmc)->comm,comm,&flag); CHKERRQ(ierr);
248   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmc)->comm,PETSC_ERR_SUP,"cannot change communicators");
249   ierr = DMRedundantCreate(comm,redc->rank,redc->N,dmf);CHKERRQ(ierr);
250   PetscFunctionReturn(0);
251 }
252 
253 #undef __FUNCT__
254 #define __FUNCT__ "DMCoarsen_Redundant"
255 static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
256 {
257   PetscErrorCode ierr;
258   PetscMPIInt flag;
259   DM_Redundant *redf = (DM_Redundant*)dmf->data;
260 
261   PetscFunctionBegin;
262   ierr = MPI_Comm_compare(((PetscObject)dmf)->comm,comm,&flag); CHKERRQ(ierr);
263   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_SUP,"cannot change communicators");
264   ierr = DMRedundantCreate(comm,redf->rank,redf->N,dmc);CHKERRQ(ierr);
265   PetscFunctionReturn(0);
266 }
267 
268 #undef __FUNCT__
269 #define __FUNCT__ "DMCreateInterpolation_Redundant"
270 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
271 {
272   PetscErrorCode ierr;
273   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
274   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
275   PetscMPIInt    flag;
276   PetscInt       i,rstart,rend;
277 
278   PetscFunctionBegin;
279   ierr = MPI_Comm_compare(((PetscObject)dmc)->comm,((PetscObject)dmf)->comm,&flag); CHKERRQ(ierr);
280   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_SUP,"cannot change communicators");
281   if (redc->rank != redf->rank) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
282   if (redc->N != redf->N) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_ARG_INCOMP,"Global size does not match");
283   ierr = MatCreate(((PetscObject)dmc)->comm,P);CHKERRQ(ierr);
284   ierr = MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);CHKERRQ(ierr);
285   ierr = MatSetType(*P,MATAIJ);CHKERRQ(ierr);
286   ierr = MatSeqAIJSetPreallocation(*P,1,0);CHKERRQ(ierr);
287   ierr = MatMPIAIJSetPreallocation(*P,1,0,0,0);CHKERRQ(ierr);
288   ierr = MatGetOwnershipRange(*P,&rstart,&rend);CHKERRQ(ierr);
289   for (i=rstart; i<rend; i++) {ierr = MatSetValue(*P,i,i,1.0,INSERT_VALUES);CHKERRQ(ierr);}
290   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
291   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
292   if (scale) {ierr = DMCreateInterpolationScale(dmc,dmf,*P,scale);CHKERRQ(ierr);}
293   PetscFunctionReturn(0);
294 }
295 
296 #undef __FUNCT__
297 #define __FUNCT__ "DMRedundantSetSize"
298 /*@
299     DMRedundantSetSize - Sets the size of a densely coupled redundant object
300 
301     Collective on DM
302 
303     Input Parameter:
304 +   dm - redundant DM
305 .   rank - rank of process to own redundant degrees of freedom
306 -   N - total number of redundant degrees of freedom
307 
308     Level: advanced
309 
310 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
311 @*/
312 PetscErrorCode DMRedundantSetSize(DM dm,PetscInt rank,PetscInt N)
313 {
314   PetscErrorCode ierr;
315 
316   PetscFunctionBegin;
317   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
318   PetscValidType(dm,1);
319   PetscValidLogicalCollectiveInt(dm,rank,2);
320   PetscValidLogicalCollectiveInt(dm,N,3);
321   ierr = PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscInt,PetscInt),(dm,rank,N));CHKERRQ(ierr);
322   PetscFunctionReturn(0);
323 }
324 
325 #undef __FUNCT__
326 #define __FUNCT__ "DMRedundantGetSize"
327 /*@
328     DMRedundantGetSize - Gets the size of a densely coupled redundant object
329 
330     Not Collective
331 
332     Input Parameter:
333 +   dm - redundant DM
334 
335     Output Parameters:
336 +   rank - rank of process to own redundant degrees of freedom (or PETSC_NULL)
337 -   N - total number of redundant degrees of freedom (or PETSC_NULL)
338 
339     Level: advanced
340 
341 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
342 @*/
343 PetscErrorCode DMRedundantGetSize(DM dm,PetscInt *rank,PetscInt *N)
344 {
345   PetscErrorCode ierr;
346 
347   PetscFunctionBegin;
348   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
349   PetscValidType(dm,1);
350   ierr = PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscInt*,PetscInt*),(dm,rank,N));CHKERRQ(ierr);
351   PetscFunctionReturn(0);
352 }
353 
354 EXTERN_C_BEGIN
355 #undef __FUNCT__
356 #define __FUNCT__ "DMRedundantSetSize_Redundant"
357 PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscInt rank,PetscInt N)
358 {
359   DM_Redundant   *red = (DM_Redundant*)dm->data;
360   PetscErrorCode ierr;
361   PetscMPIInt    myrank;
362 
363   PetscFunctionBegin;
364   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&myrank);CHKERRQ(ierr);
365   red->rank = rank;
366   red->N = N;
367   red->n = (myrank == rank) ? N : 0;
368   PetscFunctionReturn(0);
369 }
370 
371 #undef __FUNCT__
372 #define __FUNCT__ "DMRedundantGetSize_Redundant"
373 PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
374 {
375   DM_Redundant   *red = (DM_Redundant*)dm->data;
376 
377   PetscFunctionBegin;
378   if (rank) *rank = red->rank;
379   if (N)    *N = red->N;
380   PetscFunctionReturn(0);
381 }
382 EXTERN_C_END
383 
384 EXTERN_C_BEGIN
385 #undef __FUNCT__
386 #define __FUNCT__ "DMCreate_Redundant"
387 PetscErrorCode DMCreate_Redundant(DM dm)
388 {
389   PetscErrorCode ierr;
390   DM_Redundant   *red;
391 
392   PetscFunctionBegin;
393   ierr = PetscNewLog(dm,DM_Redundant,&red);CHKERRQ(ierr);
394   dm->data = red;
395 
396   ierr = PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);CHKERRQ(ierr);
397   dm->ops->setup              = DMSetUp_Redundant;
398   dm->ops->view               = DMView_Redundant;
399   dm->ops->createglobalvector = DMCreateGlobalVector_Redundant;
400   dm->ops->createlocalvector  = DMCreateLocalVector_Redundant;
401   dm->ops->getmatrix          = DMCreateMatrix_Redundant;
402   dm->ops->destroy            = DMDestroy_Redundant;
403   dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant;
404   dm->ops->globaltolocalend   = DMGlobalToLocalEnd_Redundant;
405   dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant;
406   dm->ops->localtoglobalend   = DMLocalToGlobalEnd_Redundant;
407   dm->ops->refine             = DMRefine_Redundant;
408   dm->ops->coarsen            = DMCoarsen_Redundant;
409   dm->ops->getinterpolation   = DMCreateInterpolation_Redundant;
410   dm->ops->getcoloring        = DMCreateColoring_Redundant;
411   ierr = PetscStrallocpy(VECSTANDARD,&dm->vectype);CHKERRQ(ierr);
412   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","DMRedundantSetSize_Redundant",DMRedundantSetSize_Redundant);CHKERRQ(ierr);
413   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","DMRedundantGetSize_Redundant",DMRedundantGetSize_Redundant);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 EXTERN_C_END
417 
418 #undef __FUNCT__
419 #define __FUNCT__ "DMRedundantCreate"
420 /*@C
421     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
422 
423     Collective on MPI_Comm
424 
425     Input Parameter:
426 +   comm - the processors that will share the global vector
427 .   rank - rank to own the redundant values
428 -   N - total number of degrees of freedom
429 
430     Output Parameters:
431 .   red - the redundant DM
432 
433     Level: advanced
434 
435 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM()
436 
437 @*/
438 PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscInt rank,PetscInt N,DM *dm)
439 {
440   PetscErrorCode ierr;
441 
442   PetscFunctionBegin;
443   PetscValidPointer(dm,2);
444   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
445   ierr = DMSetType(*dm,DMREDUNDANT);CHKERRQ(ierr);
446   ierr = DMRedundantSetSize(*dm,rank,N);CHKERRQ(ierr);
447   ierr = DMSetUp(*dm);CHKERRQ(ierr);
448   PetscFunctionReturn(0);
449 }
450