xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision bcaeba4d41d6ca6c6dc4189db20683073a9959ce)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"          I*/
2 #include <petscdmredundant.h>   /*I      "petscdmredundant.h" I*/
3 #include <petscmat.h>           /*I      "petscmat.h"         I*/
4 
5 typedef struct  {
6   PetscInt rank;                /* owner */
7   PetscInt N;                   /* total number of dofs */
8   PetscInt n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
9 } DM_Redundant;
10 
11 #undef __FUNCT__
12 #define __FUNCT__ "DMCreateMatrix_Redundant"
13 static PetscErrorCode DMCreateMatrix_Redundant(DM dm,MatType mtype,Mat *J)
14 {
15   DM_Redundant           *red = (DM_Redundant*)dm->data;
16   PetscErrorCode         ierr;
17   ISLocalToGlobalMapping ltog,ltogb;
18   PetscInt               i,rstart,rend,*cols;
19   PetscScalar            *vals;
20 
21   PetscFunctionBegin;
22   ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr);
23   ierr = MatSetSizes(*J,red->n,red->n,red->N,red->N);CHKERRQ(ierr);
24   ierr = MatSetType(*J,mtype);CHKERRQ(ierr);
25   ierr = MatSeqAIJSetPreallocation(*J,red->n,PETSC_NULL);CHKERRQ(ierr);
26   ierr = MatSeqBAIJSetPreallocation(*J,1,red->n,PETSC_NULL);CHKERRQ(ierr);
27   ierr = MatMPIAIJSetPreallocation(*J,red->n,PETSC_NULL,red->N-red->n,PETSC_NULL);CHKERRQ(ierr);
28   ierr = MatMPIBAIJSetPreallocation(*J,1,red->n,PETSC_NULL,red->N-red->n,PETSC_NULL);CHKERRQ(ierr);
29 
30   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
31   ierr = DMGetLocalToGlobalMappingBlock(dm,&ltogb);CHKERRQ(ierr);
32   ierr = MatSetLocalToGlobalMapping(*J,ltog,ltog);CHKERRQ(ierr);
33   ierr = MatSetLocalToGlobalMappingBlock(*J,ltogb,ltogb);CHKERRQ(ierr);
34 
35   ierr = PetscMalloc2(red->N,PetscInt,&cols,red->N,PetscScalar,&vals);CHKERRQ(ierr);
36   for (i=0; i<red->N; i++) {
37     cols[i] = i;
38     vals[i] = 0.0;
39   }
40   ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr);
41   for (i=rstart; i<rend; i++) {
42     ierr = MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
43   }
44   ierr = PetscFree2(cols,vals);CHKERRQ(ierr);
45   ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
46   ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
47   PetscFunctionReturn(0);
48 }
49 
50 #undef __FUNCT__
51 #define __FUNCT__ "DMDestroy_Redundant"
52 static PetscErrorCode DMDestroy_Redundant(DM dm)
53 {
54   PetscErrorCode ierr;
55 
56   PetscFunctionBegin;
57   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","",PETSC_NULL);CHKERRQ(ierr);
58   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","",PETSC_NULL);CHKERRQ(ierr);
59   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
60   ierr = PetscFree(dm->data);CHKERRQ(ierr);
61   PetscFunctionReturn(0);
62 }
63 
64 #undef __FUNCT__
65 #define __FUNCT__ "DMCreateGlobalVector_Redundant"
66 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
67 {
68   PetscErrorCode         ierr;
69   DM_Redundant           *red = (DM_Redundant*)dm->data;
70   ISLocalToGlobalMapping ltog,ltogb;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
74   PetscValidPointer(gvec,2);
75   *gvec = 0;
76   ierr = VecCreate(((PetscObject)dm)->comm,gvec);CHKERRQ(ierr);
77   ierr = VecSetSizes(*gvec,red->n,red->N);CHKERRQ(ierr);
78   ierr = VecSetType(*gvec,dm->vectype);CHKERRQ(ierr);
79   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
80   ierr = DMGetLocalToGlobalMappingBlock(dm,&ltogb);CHKERRQ(ierr);
81   ierr = VecSetLocalToGlobalMapping(*gvec,ltog);CHKERRQ(ierr);
82   ierr = VecSetLocalToGlobalMappingBlock(*gvec,ltog);CHKERRQ(ierr);
83   ierr = VecSetDM(*gvec,dm);CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "DMCreateLocalVector_Redundant"
89 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
90 {
91   PetscErrorCode ierr;
92   DM_Redundant   *red = (DM_Redundant*)dm->data;
93 
94   PetscFunctionBegin;
95   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
96   PetscValidPointer(lvec,2);
97   *lvec = 0;
98   ierr = VecCreate(PETSC_COMM_SELF,lvec);CHKERRQ(ierr);
99   ierr = VecSetSizes(*lvec,red->N,red->N);CHKERRQ(ierr);
100   ierr = VecSetType(*lvec,dm->vectype);CHKERRQ(ierr);
101   ierr = VecSetDM(*lvec,dm);CHKERRQ(ierr);
102   PetscFunctionReturn(0);
103 }
104 
105 #undef __FUNCT__
106 #define __FUNCT__ "DMLocalToGlobalBegin_Redundant"
107 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
108 {
109   PetscErrorCode    ierr;
110   DM_Redundant      *red = (DM_Redundant*)dm->data;
111   const PetscScalar *lv;
112   PetscScalar       *gv;
113   PetscMPIInt       rank;
114 
115   PetscFunctionBegin;
116   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
117   ierr = VecGetArrayRead(l,&lv);CHKERRQ(ierr);
118   ierr = VecGetArray(g,&gv);CHKERRQ(ierr);
119   switch (imode) {
120   case ADD_VALUES:
121   case MAX_VALUES:
122   {
123     void *source;
124     PetscScalar *buffer;
125     PetscInt i;
126     if (rank == red->rank) {
127 #if defined (PETSC_HAVE_MPI_IN_PLACE)
128       buffer = gv;
129       source = MPI_IN_PLACE;
130 #else
131       ierr = PetscMalloc(red->N*sizeof(PetscScalar),&buffer);CHKERRQ(ierr);
132       source = buffer;
133 #endif
134       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
135 #if !defined(PETSC_USE_COMPLEX)
136       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
137 #endif
138     } else {
139       source = (void*)lv;
140     }
141     ierr = MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES)?MPI_SUM:MPI_MAX,red->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
142 #if !defined(PETSC_HAVE_MPI_IN_PLACE)
143     if (rank == red->rank) {ierr = PetscFree(buffer);CHKERRQ(ierr);}
144 #endif
145   } break;
146   case INSERT_VALUES:
147   ierr = PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);
148   break;
149   default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"InsertMode not supported");
150   }
151   ierr = VecRestoreArrayRead(l,&lv);CHKERRQ(ierr);
152   ierr = VecRestoreArray(g,&gv);CHKERRQ(ierr);
153   PetscFunctionReturn(0);
154 }
155 
156 #undef __FUNCT__
157 #define __FUNCT__ "DMLocalToGlobalEnd_Redundant"
158 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
159 {
160   PetscFunctionBegin;
161   PetscFunctionReturn(0);
162 }
163 
164 #undef __FUNCT__
165 #define __FUNCT__ "DMGlobalToLocalBegin_Redundant"
166 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
167 {
168   PetscErrorCode    ierr;
169   DM_Redundant      *red = (DM_Redundant*)dm->data;
170   const PetscScalar *gv;
171   PetscScalar       *lv;
172 
173   PetscFunctionBegin;
174   ierr = VecGetArrayRead(g,&gv);CHKERRQ(ierr);
175   ierr = VecGetArray(l,&lv);CHKERRQ(ierr);
176   switch (imode) {
177   case INSERT_VALUES:
178     if (red->n) {ierr = PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);}
179     ierr = MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
180     break;
181   default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"InsertMode not supported");
182   }
183   ierr = VecRestoreArrayRead(g,&gv);CHKERRQ(ierr);
184   ierr = VecRestoreArray(l,&lv);CHKERRQ(ierr);
185   PetscFunctionReturn(0);
186 }
187 
188 #undef __FUNCT__
189 #define __FUNCT__ "DMGlobalToLocalEnd_Redundant"
190 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
191 {
192   PetscFunctionBegin;
193   PetscFunctionReturn(0);
194 }
195 
196 #undef __FUNCT__
197 #define __FUNCT__ "DMSetUp_Redundant"
198 static PetscErrorCode DMSetUp_Redundant(DM dm)
199 {
200   PetscErrorCode ierr;
201   DM_Redundant   *red = (DM_Redundant*)dm->data;
202   PetscInt       i,*globals;
203 
204   PetscFunctionBegin;
205   ierr = PetscMalloc(red->N*sizeof(PetscInt),&globals);CHKERRQ(ierr);
206   for (i=0; i<red->N; i++) globals[i] = i;
207   ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr);
208   ierr = PetscObjectReference((PetscObject)dm->ltogmap);CHKERRQ(ierr);
209   dm->ltogmapb = dm->ltogmap;
210   PetscFunctionReturn(0);
211 }
212 
213 #undef __FUNCT__
214 #define __FUNCT__ "DMView_Redundant"
215 static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
216 {
217   PetscErrorCode ierr;
218   DM_Redundant   *red = (DM_Redundant*)dm->data;
219   PetscBool      iascii;
220 
221   PetscFunctionBegin;
222   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
223   if (iascii) {
224     ierr = PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);CHKERRQ(ierr);
225   }
226   PetscFunctionReturn(0);
227 }
228 
229 #undef __FUNCT__
230 #define __FUNCT__ "DMCreateColoring_Redundant"
231 static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
232 {
233   DM_Redundant    *red = (DM_Redundant*)dm->data;
234   PetscErrorCode  ierr;
235   PetscInt        i,nloc;
236   ISColoringValue *colors;
237 
238   PetscFunctionBegin;
239   switch (ctype) {
240   case IS_COLORING_GLOBAL:
241     nloc = red->n;
242     break;
243   case IS_COLORING_GHOSTED:
244     nloc = red->N;
245     break;
246   default: SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
247   }
248   ierr = PetscMalloc(nloc*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
249   for (i=0; i<nloc; i++) colors[i] = i;
250   ierr = ISColoringCreate(((PetscObject)dm)->comm,red->N,nloc,colors,coloring);CHKERRQ(ierr);
251   ierr = ISColoringSetType(*coloring,ctype);CHKERRQ(ierr);
252   PetscFunctionReturn(0);
253 }
254 
255 #undef __FUNCT__
256 #define __FUNCT__ "DMRefine_Redundant"
257 static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
258 {
259   PetscErrorCode ierr;
260   PetscMPIInt    flag;
261   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
262 
263   PetscFunctionBegin;
264   if (comm == MPI_COMM_NULL) comm = ((PetscObject)dmc)->comm;
265   ierr = MPI_Comm_compare(((PetscObject)dmc)->comm,comm,&flag);CHKERRQ(ierr);
266   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmc)->comm,PETSC_ERR_SUP,"cannot change communicators");
267   ierr = DMRedundantCreate(comm,redc->rank,redc->N,dmf);CHKERRQ(ierr);
268   PetscFunctionReturn(0);
269 }
270 
271 #undef __FUNCT__
272 #define __FUNCT__ "DMCoarsen_Redundant"
273 static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
274 {
275   PetscErrorCode ierr;
276   PetscMPIInt    flag;
277   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
278 
279   PetscFunctionBegin;
280   if (comm == MPI_COMM_NULL) comm = ((PetscObject)dmf)->comm;
281   ierr = MPI_Comm_compare(((PetscObject)dmf)->comm,comm,&flag);CHKERRQ(ierr);
282   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_SUP,"cannot change communicators");
283   ierr = DMRedundantCreate(comm,redf->rank,redf->N,dmc);CHKERRQ(ierr);
284   PetscFunctionReturn(0);
285 }
286 
287 #undef __FUNCT__
288 #define __FUNCT__ "DMCreateInterpolation_Redundant"
289 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
290 {
291   PetscErrorCode ierr;
292   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
293   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
294   PetscMPIInt    flag;
295   PetscInt       i,rstart,rend;
296 
297   PetscFunctionBegin;
298   ierr = MPI_Comm_compare(((PetscObject)dmc)->comm,((PetscObject)dmf)->comm,&flag);CHKERRQ(ierr);
299   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_SUP,"cannot change communicators");
300   if (redc->rank != redf->rank) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
301   if (redc->N != redf->N) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_ARG_INCOMP,"Global size does not match");
302   ierr = MatCreate(((PetscObject)dmc)->comm,P);CHKERRQ(ierr);
303   ierr = MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);CHKERRQ(ierr);
304   ierr = MatSetType(*P,MATAIJ);CHKERRQ(ierr);
305   ierr = MatSeqAIJSetPreallocation(*P,1,0);CHKERRQ(ierr);
306   ierr = MatMPIAIJSetPreallocation(*P,1,0,0,0);CHKERRQ(ierr);
307   ierr = MatGetOwnershipRange(*P,&rstart,&rend);CHKERRQ(ierr);
308   for (i=rstart; i<rend; i++) {ierr = MatSetValue(*P,i,i,1.0,INSERT_VALUES);CHKERRQ(ierr);}
309   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
310   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
311   if (scale) {ierr = DMCreateInterpolationScale(dmc,dmf,*P,scale);CHKERRQ(ierr);}
312   PetscFunctionReturn(0);
313 }
314 
315 #undef __FUNCT__
316 #define __FUNCT__ "DMRedundantSetSize"
317 /*@
318     DMRedundantSetSize - Sets the size of a densely coupled redundant object
319 
320     Collective on DM
321 
322     Input Parameter:
323 +   dm - redundant DM
324 .   rank - rank of process to own redundant degrees of freedom
325 -   N - total number of redundant degrees of freedom
326 
327     Level: advanced
328 
329 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
330 @*/
331 PetscErrorCode DMRedundantSetSize(DM dm,PetscInt rank,PetscInt N)
332 {
333   PetscErrorCode ierr;
334 
335   PetscFunctionBegin;
336   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
337   PetscValidType(dm,1);
338   PetscValidLogicalCollectiveInt(dm,rank,2);
339   PetscValidLogicalCollectiveInt(dm,N,3);
340   ierr = PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscInt,PetscInt),(dm,rank,N));CHKERRQ(ierr);
341   PetscFunctionReturn(0);
342 }
343 
344 #undef __FUNCT__
345 #define __FUNCT__ "DMRedundantGetSize"
346 /*@
347     DMRedundantGetSize - Gets the size of a densely coupled redundant object
348 
349     Not Collective
350 
351     Input Parameter:
352 +   dm - redundant DM
353 
354     Output Parameters:
355 +   rank - rank of process to own redundant degrees of freedom (or PETSC_NULL)
356 -   N - total number of redundant degrees of freedom (or PETSC_NULL)
357 
358     Level: advanced
359 
360 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
361 @*/
362 PetscErrorCode DMRedundantGetSize(DM dm,PetscInt *rank,PetscInt *N)
363 {
364   PetscErrorCode ierr;
365 
366   PetscFunctionBegin;
367   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
368   PetscValidType(dm,1);
369   ierr = PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscInt*,PetscInt*),(dm,rank,N));CHKERRQ(ierr);
370   PetscFunctionReturn(0);
371 }
372 
373 EXTERN_C_BEGIN
374 #undef __FUNCT__
375 #define __FUNCT__ "DMRedundantSetSize_Redundant"
376 PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscInt rank,PetscInt N)
377 {
378   DM_Redundant   *red = (DM_Redundant*)dm->data;
379   PetscErrorCode ierr;
380   PetscMPIInt    myrank;
381 
382   PetscFunctionBegin;
383   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&myrank);CHKERRQ(ierr);
384   red->rank = rank;
385   red->N = N;
386   red->n = (myrank == rank) ? N : 0;
387   PetscFunctionReturn(0);
388 }
389 
390 #undef __FUNCT__
391 #define __FUNCT__ "DMRedundantGetSize_Redundant"
392 PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
393 {
394   DM_Redundant   *red = (DM_Redundant*)dm->data;
395 
396   PetscFunctionBegin;
397   if (rank) *rank = red->rank;
398   if (N)    *N = red->N;
399   PetscFunctionReturn(0);
400 }
401 EXTERN_C_END
402 
403 /*MC
404    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
405          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
406          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
407          processes local computations).
408 
409          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
410 
411 
412   Level: intermediate
413 
414 .seealso: DMType, DMCOMPOSITE, DMCreateRedundant(), DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
415 M*/
416 
417 EXTERN_C_BEGIN
418 #undef __FUNCT__
419 #define __FUNCT__ "DMCreate_Redundant"
420 PetscErrorCode DMCreate_Redundant(DM dm)
421 {
422   PetscErrorCode ierr;
423   DM_Redundant   *red;
424 
425   PetscFunctionBegin;
426   ierr = PetscNewLog(dm,DM_Redundant,&red);CHKERRQ(ierr);
427   dm->data = red;
428 
429   ierr = PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);CHKERRQ(ierr);
430   dm->ops->setup              = DMSetUp_Redundant;
431   dm->ops->view               = DMView_Redundant;
432   dm->ops->createglobalvector = DMCreateGlobalVector_Redundant;
433   dm->ops->createlocalvector  = DMCreateLocalVector_Redundant;
434   dm->ops->creatematrix       = DMCreateMatrix_Redundant;
435   dm->ops->destroy            = DMDestroy_Redundant;
436   dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant;
437   dm->ops->globaltolocalend   = DMGlobalToLocalEnd_Redundant;
438   dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant;
439   dm->ops->localtoglobalend   = DMLocalToGlobalEnd_Redundant;
440   dm->ops->refine             = DMRefine_Redundant;
441   dm->ops->coarsen            = DMCoarsen_Redundant;
442   dm->ops->createinterpolation= DMCreateInterpolation_Redundant;
443   dm->ops->getcoloring        = DMCreateColoring_Redundant;
444   ierr = PetscStrallocpy(VECSTANDARD,(char**)&dm->vectype);CHKERRQ(ierr);
445   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","DMRedundantSetSize_Redundant",DMRedundantSetSize_Redundant);CHKERRQ(ierr);
446   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","DMRedundantGetSize_Redundant",DMRedundantGetSize_Redundant);CHKERRQ(ierr);
447   PetscFunctionReturn(0);
448 }
449 EXTERN_C_END
450 
451 #undef __FUNCT__
452 #define __FUNCT__ "DMRedundantCreate"
453 /*@C
454     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
455 
456     Collective on MPI_Comm
457 
458     Input Parameter:
459 +   comm - the processors that will share the global vector
460 .   rank - rank to own the redundant values
461 -   N - total number of degrees of freedom
462 
463     Output Parameters:
464 .   red - the redundant DM
465 
466     Level: advanced
467 
468 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()
469 
470 @*/
471 PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscInt rank,PetscInt N,DM *dm)
472 {
473   PetscErrorCode ierr;
474 
475   PetscFunctionBegin;
476   PetscValidPointer(dm,2);
477   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
478   ierr = DMSetType(*dm,DMREDUNDANT);CHKERRQ(ierr);
479   ierr = DMRedundantSetSize(*dm,rank,N);CHKERRQ(ierr);
480   ierr = DMSetUp(*dm);CHKERRQ(ierr);
481   PetscFunctionReturn(0);
482 }
483