xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision a6dfd86ebdf75fa024070af26d51b62fd16650a3)
1 #include <petsc-private/dmimpl.h>
2 #include <petscdmredundant.h>   /*I      "petscdmredundant.h" I*/
3 
4 typedef struct  {
5   PetscInt rank;                /* owner */
6   PetscInt N;                   /* total number of dofs */
7   PetscInt n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
8 } DM_Redundant;
9 
10 #undef __FUNCT__
11 #define __FUNCT__ "DMCreateMatrix_Redundant"
12 static PetscErrorCode DMCreateMatrix_Redundant(DM dm,MatType mtype,Mat *J)
13 {
14   DM_Redundant           *red = (DM_Redundant*)dm->data;
15   PetscErrorCode         ierr;
16   ISLocalToGlobalMapping ltog,ltogb;
17   PetscInt               i,rstart,rend,*cols;
18   PetscScalar            *vals;
19 
20   PetscFunctionBegin;
21   ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr);
22   ierr = MatSetSizes(*J,red->n,red->n,red->N,red->N);CHKERRQ(ierr);
23   ierr = MatSetType(*J,mtype);CHKERRQ(ierr);
24   ierr = MatSeqAIJSetPreallocation(*J,red->n,PETSC_NULL);CHKERRQ(ierr);
25   ierr = MatSeqBAIJSetPreallocation(*J,1,red->n,PETSC_NULL);CHKERRQ(ierr);
26   ierr = MatMPIAIJSetPreallocation(*J,red->n,PETSC_NULL,red->N-red->n,PETSC_NULL);CHKERRQ(ierr);
27   ierr = MatMPIBAIJSetPreallocation(*J,1,red->n,PETSC_NULL,red->N-red->n,PETSC_NULL);CHKERRQ(ierr);
28 
29   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
30   ierr = DMGetLocalToGlobalMappingBlock(dm,&ltogb);CHKERRQ(ierr);
31   ierr = MatSetLocalToGlobalMapping(*J,ltog,ltog);CHKERRQ(ierr);
32   ierr = MatSetLocalToGlobalMappingBlock(*J,ltogb,ltogb);CHKERRQ(ierr);
33 
34   ierr = PetscMalloc2(red->N,PetscInt,&cols,red->N,PetscScalar,&vals);CHKERRQ(ierr);
35   for (i=0; i<red->N; i++) {
36     cols[i] = i;
37     vals[i] = 0.0;
38   }
39   ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr);
40   for (i=rstart; i<rend; i++) {
41     ierr = MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
42   }
43   ierr = PetscFree2(cols,vals);CHKERRQ(ierr);
44   ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
45   ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
46   PetscFunctionReturn(0);
47 }
48 
49 #undef __FUNCT__
50 #define __FUNCT__ "DMDestroy_Redundant"
51 static PetscErrorCode DMDestroy_Redundant(DM dm)
52 {
53   PetscErrorCode ierr;
54 
55   PetscFunctionBegin;
56   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","",PETSC_NULL);CHKERRQ(ierr);
57   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","",PETSC_NULL);CHKERRQ(ierr);
58   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
59   ierr = PetscFree(dm->data);CHKERRQ(ierr);
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMCreateGlobalVector_Redundant"
65 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
66 {
67   PetscErrorCode         ierr;
68   DM_Redundant           *red = (DM_Redundant*)dm->data;
69   ISLocalToGlobalMapping ltog,ltogb;
70 
71   PetscFunctionBegin;
72   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
73   PetscValidPointer(gvec,2);
74   *gvec = 0;
75   ierr = VecCreate(((PetscObject)dm)->comm,gvec);CHKERRQ(ierr);
76   ierr = VecSetSizes(*gvec,red->n,red->N);CHKERRQ(ierr);
77   ierr = VecSetType(*gvec,dm->vectype);CHKERRQ(ierr);
78   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
79   ierr = DMGetLocalToGlobalMappingBlock(dm,&ltogb);CHKERRQ(ierr);
80   ierr = VecSetLocalToGlobalMapping(*gvec,ltog);CHKERRQ(ierr);
81   ierr = VecSetLocalToGlobalMappingBlock(*gvec,ltog);CHKERRQ(ierr);
82   ierr = VecSetDM(*gvec,dm);CHKERRQ(ierr);
83   PetscFunctionReturn(0);
84 }
85 
86 #undef __FUNCT__
87 #define __FUNCT__ "DMCreateLocalVector_Redundant"
88 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
89 {
90   PetscErrorCode ierr;
91   DM_Redundant   *red = (DM_Redundant*)dm->data;
92 
93   PetscFunctionBegin;
94   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
95   PetscValidPointer(lvec,2);
96   *lvec = 0;
97   ierr = VecCreate(PETSC_COMM_SELF,lvec);CHKERRQ(ierr);
98   ierr = VecSetSizes(*lvec,red->N,red->N);CHKERRQ(ierr);
99   ierr = VecSetType(*lvec,dm->vectype);CHKERRQ(ierr);
100   ierr = VecSetDM(*lvec,dm);CHKERRQ(ierr);
101   PetscFunctionReturn(0);
102 }
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "DMLocalToGlobalBegin_Redundant"
106 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
107 {
108   PetscErrorCode    ierr;
109   DM_Redundant      *red = (DM_Redundant*)dm->data;
110   const PetscScalar *lv;
111   PetscScalar       *gv;
112   PetscMPIInt       rank;
113 
114   PetscFunctionBegin;
115   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
116   ierr = VecGetArrayRead(l,&lv);CHKERRQ(ierr);
117   ierr = VecGetArray(g,&gv);CHKERRQ(ierr);
118   switch (imode) {
119   case ADD_VALUES:
120   case MAX_VALUES:
121   {
122     void        *source;
123     PetscScalar *buffer;
124     PetscInt    i;
125     if (rank == red->rank) {
126 #if defined (PETSC_HAVE_MPI_IN_PLACE)
127       buffer = gv;
128       source = MPI_IN_PLACE;
129 #else
130       ierr = PetscMalloc(red->N*sizeof(PetscScalar),&buffer);CHKERRQ(ierr);
131       source = buffer;
132 #endif
133       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
134 #if !defined(PETSC_USE_COMPLEX)
135       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
136 #endif
137     } else {
138       source = (void*)lv;
139     }
140     ierr = MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES)?MPI_SUM:MPI_MAX,red->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
141 #if !defined(PETSC_HAVE_MPI_IN_PLACE)
142     if (rank == red->rank) {ierr = PetscFree(buffer);CHKERRQ(ierr);}
143 #endif
144   } break;
145   case INSERT_VALUES:
146   ierr = PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);
147   break;
148   default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"InsertMode not supported");
149   }
150   ierr = VecRestoreArrayRead(l,&lv);CHKERRQ(ierr);
151   ierr = VecRestoreArray(g,&gv);CHKERRQ(ierr);
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNCT__
156 #define __FUNCT__ "DMLocalToGlobalEnd_Redundant"
157 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
158 {
159   PetscFunctionBegin;
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNCT__
164 #define __FUNCT__ "DMGlobalToLocalBegin_Redundant"
165 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
166 {
167   PetscErrorCode    ierr;
168   DM_Redundant      *red = (DM_Redundant*)dm->data;
169   const PetscScalar *gv;
170   PetscScalar       *lv;
171 
172   PetscFunctionBegin;
173   ierr = VecGetArrayRead(g,&gv);CHKERRQ(ierr);
174   ierr = VecGetArray(l,&lv);CHKERRQ(ierr);
175   switch (imode) {
176   case INSERT_VALUES:
177     if (red->n) {ierr = PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);}
178     ierr = MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
179     break;
180   default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"InsertMode not supported");
181   }
182   ierr = VecRestoreArrayRead(g,&gv);CHKERRQ(ierr);
183   ierr = VecRestoreArray(l,&lv);CHKERRQ(ierr);
184   PetscFunctionReturn(0);
185 }
186 
187 #undef __FUNCT__
188 #define __FUNCT__ "DMGlobalToLocalEnd_Redundant"
189 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
190 {
191   PetscFunctionBegin;
192   PetscFunctionReturn(0);
193 }
194 
195 #undef __FUNCT__
196 #define __FUNCT__ "DMSetUp_Redundant"
197 static PetscErrorCode DMSetUp_Redundant(DM dm)
198 {
199   PetscErrorCode ierr;
200   DM_Redundant   *red = (DM_Redundant*)dm->data;
201   PetscInt       i,*globals;
202 
203   PetscFunctionBegin;
204   ierr = PetscMalloc(red->N*sizeof(PetscInt),&globals);CHKERRQ(ierr);
205   for (i=0; i<red->N; i++) globals[i] = i;
206   ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr);
207   ierr = PetscObjectReference((PetscObject)dm->ltogmap);CHKERRQ(ierr);
208   dm->ltogmapb = dm->ltogmap;
209   PetscFunctionReturn(0);
210 }
211 
212 #undef __FUNCT__
213 #define __FUNCT__ "DMView_Redundant"
214 static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
215 {
216   PetscErrorCode ierr;
217   DM_Redundant   *red = (DM_Redundant*)dm->data;
218   PetscBool      iascii;
219 
220   PetscFunctionBegin;
221   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
222   if (iascii) {
223     ierr = PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);CHKERRQ(ierr);
224   }
225   PetscFunctionReturn(0);
226 }
227 
228 #undef __FUNCT__
229 #define __FUNCT__ "DMCreateColoring_Redundant"
230 static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
231 {
232   DM_Redundant    *red = (DM_Redundant*)dm->data;
233   PetscErrorCode  ierr;
234   PetscInt        i,nloc;
235   ISColoringValue *colors;
236 
237   PetscFunctionBegin;
238   switch (ctype) {
239   case IS_COLORING_GLOBAL:
240     nloc = red->n;
241     break;
242   case IS_COLORING_GHOSTED:
243     nloc = red->N;
244     break;
245   default: SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
246   }
247   ierr = PetscMalloc(nloc*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
248   for (i=0; i<nloc; i++) colors[i] = i;
249   ierr = ISColoringCreate(((PetscObject)dm)->comm,red->N,nloc,colors,coloring);CHKERRQ(ierr);
250   ierr = ISColoringSetType(*coloring,ctype);CHKERRQ(ierr);
251   PetscFunctionReturn(0);
252 }
253 
254 #undef __FUNCT__
255 #define __FUNCT__ "DMRefine_Redundant"
256 static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
257 {
258   PetscErrorCode ierr;
259   PetscMPIInt    flag;
260   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
261 
262   PetscFunctionBegin;
263   if (comm == MPI_COMM_NULL) comm = ((PetscObject)dmc)->comm;
264   ierr = MPI_Comm_compare(((PetscObject)dmc)->comm,comm,&flag);CHKERRQ(ierr);
265   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmc)->comm,PETSC_ERR_SUP,"cannot change communicators");
266   ierr = DMRedundantCreate(comm,redc->rank,redc->N,dmf);CHKERRQ(ierr);
267   PetscFunctionReturn(0);
268 }
269 
270 #undef __FUNCT__
271 #define __FUNCT__ "DMCoarsen_Redundant"
272 static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
273 {
274   PetscErrorCode ierr;
275   PetscMPIInt    flag;
276   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
277 
278   PetscFunctionBegin;
279   if (comm == MPI_COMM_NULL) comm = ((PetscObject)dmf)->comm;
280   ierr = MPI_Comm_compare(((PetscObject)dmf)->comm,comm,&flag);CHKERRQ(ierr);
281   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_SUP,"cannot change communicators");
282   ierr = DMRedundantCreate(comm,redf->rank,redf->N,dmc);CHKERRQ(ierr);
283   PetscFunctionReturn(0);
284 }
285 
286 #undef __FUNCT__
287 #define __FUNCT__ "DMCreateInterpolation_Redundant"
288 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
289 {
290   PetscErrorCode ierr;
291   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
292   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
293   PetscMPIInt    flag;
294   PetscInt       i,rstart,rend;
295 
296   PetscFunctionBegin;
297   ierr = MPI_Comm_compare(((PetscObject)dmc)->comm,((PetscObject)dmf)->comm,&flag);CHKERRQ(ierr);
298   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_SUP,"cannot change communicators");
299   if (redc->rank != redf->rank) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
300   if (redc->N != redf->N) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_ARG_INCOMP,"Global size does not match");
301   ierr = MatCreate(((PetscObject)dmc)->comm,P);CHKERRQ(ierr);
302   ierr = MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);CHKERRQ(ierr);
303   ierr = MatSetType(*P,MATAIJ);CHKERRQ(ierr);
304   ierr = MatSeqAIJSetPreallocation(*P,1,0);CHKERRQ(ierr);
305   ierr = MatMPIAIJSetPreallocation(*P,1,0,0,0);CHKERRQ(ierr);
306   ierr = MatGetOwnershipRange(*P,&rstart,&rend);CHKERRQ(ierr);
307   for (i=rstart; i<rend; i++) {ierr = MatSetValue(*P,i,i,1.0,INSERT_VALUES);CHKERRQ(ierr);}
308   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
309   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
310   if (scale) {ierr = DMCreateInterpolationScale(dmc,dmf,*P,scale);CHKERRQ(ierr);}
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "DMRedundantSetSize"
316 /*@
317     DMRedundantSetSize - Sets the size of a densely coupled redundant object
318 
319     Collective on DM
320 
321     Input Parameter:
322 +   dm - redundant DM
323 .   rank - rank of process to own redundant degrees of freedom
324 -   N - total number of redundant degrees of freedom
325 
326     Level: advanced
327 
328 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
329 @*/
330 PetscErrorCode DMRedundantSetSize(DM dm,PetscInt rank,PetscInt N)
331 {
332   PetscErrorCode ierr;
333 
334   PetscFunctionBegin;
335   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
336   PetscValidType(dm,1);
337   PetscValidLogicalCollectiveInt(dm,rank,2);
338   PetscValidLogicalCollectiveInt(dm,N,3);
339   ierr = PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscInt,PetscInt),(dm,rank,N));CHKERRQ(ierr);
340   PetscFunctionReturn(0);
341 }
342 
343 #undef __FUNCT__
344 #define __FUNCT__ "DMRedundantGetSize"
345 /*@
346     DMRedundantGetSize - Gets the size of a densely coupled redundant object
347 
348     Not Collective
349 
350     Input Parameter:
351 +   dm - redundant DM
352 
353     Output Parameters:
354 +   rank - rank of process to own redundant degrees of freedom (or PETSC_NULL)
355 -   N - total number of redundant degrees of freedom (or PETSC_NULL)
356 
357     Level: advanced
358 
359 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
360 @*/
361 PetscErrorCode DMRedundantGetSize(DM dm,PetscInt *rank,PetscInt *N)
362 {
363   PetscErrorCode ierr;
364 
365   PetscFunctionBegin;
366   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
367   PetscValidType(dm,1);
368   ierr = PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscInt*,PetscInt*),(dm,rank,N));CHKERRQ(ierr);
369   PetscFunctionReturn(0);
370 }
371 
372 EXTERN_C_BEGIN
373 #undef __FUNCT__
374 #define __FUNCT__ "DMRedundantSetSize_Redundant"
375 PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscInt rank,PetscInt N)
376 {
377   DM_Redundant   *red = (DM_Redundant*)dm->data;
378   PetscErrorCode ierr;
379   PetscMPIInt    myrank;
380 
381   PetscFunctionBegin;
382   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&myrank);CHKERRQ(ierr);
383   red->rank = rank;
384   red->N = N;
385   red->n = (myrank == rank) ? N : 0;
386   PetscFunctionReturn(0);
387 }
388 
389 #undef __FUNCT__
390 #define __FUNCT__ "DMRedundantGetSize_Redundant"
391 PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
392 {
393   DM_Redundant   *red = (DM_Redundant*)dm->data;
394 
395   PetscFunctionBegin;
396   if (rank) *rank = red->rank;
397   if (N)    *N = red->N;
398   PetscFunctionReturn(0);
399 }
400 EXTERN_C_END
401 
402 /*MC
403    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
404          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
405          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
406          processes local computations).
407 
408          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
409 
410 
411   Level: intermediate
412 
413 .seealso: DMType, DMCOMPOSITE, DMCreateRedundant(), DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
414 M*/
415 
416 EXTERN_C_BEGIN
417 #undef __FUNCT__
418 #define __FUNCT__ "DMCreate_Redundant"
419 PetscErrorCode DMCreate_Redundant(DM dm)
420 {
421   PetscErrorCode ierr;
422   DM_Redundant   *red;
423 
424   PetscFunctionBegin;
425   ierr = PetscNewLog(dm,DM_Redundant,&red);CHKERRQ(ierr);
426   dm->data = red;
427 
428   ierr = PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);CHKERRQ(ierr);
429   dm->ops->setup              = DMSetUp_Redundant;
430   dm->ops->view               = DMView_Redundant;
431   dm->ops->createglobalvector = DMCreateGlobalVector_Redundant;
432   dm->ops->createlocalvector  = DMCreateLocalVector_Redundant;
433   dm->ops->creatematrix       = DMCreateMatrix_Redundant;
434   dm->ops->destroy            = DMDestroy_Redundant;
435   dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant;
436   dm->ops->globaltolocalend   = DMGlobalToLocalEnd_Redundant;
437   dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant;
438   dm->ops->localtoglobalend   = DMLocalToGlobalEnd_Redundant;
439   dm->ops->refine             = DMRefine_Redundant;
440   dm->ops->coarsen            = DMCoarsen_Redundant;
441   dm->ops->createinterpolation= DMCreateInterpolation_Redundant;
442   dm->ops->getcoloring        = DMCreateColoring_Redundant;
443   ierr = PetscStrallocpy(VECSTANDARD,(char**)&dm->vectype);CHKERRQ(ierr);
444   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","DMRedundantSetSize_Redundant",DMRedundantSetSize_Redundant);CHKERRQ(ierr);
445   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","DMRedundantGetSize_Redundant",DMRedundantGetSize_Redundant);CHKERRQ(ierr);
446   PetscFunctionReturn(0);
447 }
448 EXTERN_C_END
449 
450 #undef __FUNCT__
451 #define __FUNCT__ "DMRedundantCreate"
452 /*@C
453     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
454 
455     Collective on MPI_Comm
456 
457     Input Parameter:
458 +   comm - the processors that will share the global vector
459 .   rank - rank to own the redundant values
460 -   N - total number of degrees of freedom
461 
462     Output Parameters:
463 .   red - the redundant DM
464 
465     Level: advanced
466 
467 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()
468 
469 @*/
470 PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscInt rank,PetscInt N,DM *dm)
471 {
472   PetscErrorCode ierr;
473 
474   PetscFunctionBegin;
475   PetscValidPointer(dm,2);
476   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
477   ierr = DMSetType(*dm,DMREDUNDANT);CHKERRQ(ierr);
478   ierr = DMRedundantSetSize(*dm,rank,N);CHKERRQ(ierr);
479   ierr = DMSetUp(*dm);CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482