xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision ce94432eddcd14845bc7e8083b7f8ea723b9bf7d)
1 #include <petsc-private/dmimpl.h>
2 #include <petscdmredundant.h>   /*I      "petscdmredundant.h" I*/
3 
4 typedef struct  {
5   PetscInt rank;                /* owner */
6   PetscInt N;                   /* total number of dofs */
7   PetscInt n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
8 } DM_Redundant;
9 
10 #undef __FUNCT__
11 #define __FUNCT__ "DMCreateMatrix_Redundant"
12 static PetscErrorCode DMCreateMatrix_Redundant(DM dm,MatType mtype,Mat *J)
13 {
14   DM_Redundant           *red = (DM_Redundant*)dm->data;
15   PetscErrorCode         ierr;
16   ISLocalToGlobalMapping ltog,ltogb;
17   PetscInt               i,rstart,rend,*cols;
18   PetscScalar            *vals;
19 
20   PetscFunctionBegin;
21   ierr = MatCreate(PetscObjectComm((PetscObject)dm),J);CHKERRQ(ierr);
22   ierr = MatSetSizes(*J,red->n,red->n,red->N,red->N);CHKERRQ(ierr);
23   ierr = MatSetType(*J,mtype);CHKERRQ(ierr);
24   ierr = MatSeqAIJSetPreallocation(*J,red->n,NULL);CHKERRQ(ierr);
25   ierr = MatSeqBAIJSetPreallocation(*J,1,red->n,NULL);CHKERRQ(ierr);
26   ierr = MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr);
27   ierr = MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr);
28 
29   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
30   ierr = DMGetLocalToGlobalMappingBlock(dm,&ltogb);CHKERRQ(ierr);
31   ierr = MatSetLocalToGlobalMapping(*J,ltog,ltog);CHKERRQ(ierr);
32   ierr = MatSetLocalToGlobalMappingBlock(*J,ltogb,ltogb);CHKERRQ(ierr);
33 
34   ierr = PetscMalloc2(red->N,PetscInt,&cols,red->N,PetscScalar,&vals);CHKERRQ(ierr);
35   for (i=0; i<red->N; i++) {
36     cols[i] = i;
37     vals[i] = 0.0;
38   }
39   ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr);
40   for (i=rstart; i<rend; i++) {
41     ierr = MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
42   }
43   ierr = PetscFree2(cols,vals);CHKERRQ(ierr);
44   ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
45   ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
46   PetscFunctionReturn(0);
47 }
48 
49 #undef __FUNCT__
50 #define __FUNCT__ "DMDestroy_Redundant"
51 static PetscErrorCode DMDestroy_Redundant(DM dm)
52 {
53   PetscErrorCode ierr;
54 
55   PetscFunctionBegin;
56   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","",NULL);CHKERRQ(ierr);
57   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","",NULL);CHKERRQ(ierr);
58   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
59   ierr = PetscFree(dm->data);CHKERRQ(ierr);
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMCreateGlobalVector_Redundant"
65 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
66 {
67   PetscErrorCode         ierr;
68   DM_Redundant           *red = (DM_Redundant*)dm->data;
69   ISLocalToGlobalMapping ltog,ltogb;
70 
71   PetscFunctionBegin;
72   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
73   PetscValidPointer(gvec,2);
74   *gvec = 0;
75   ierr  = VecCreate(PetscObjectComm((PetscObject)dm),gvec);CHKERRQ(ierr);
76   ierr  = VecSetSizes(*gvec,red->n,red->N);CHKERRQ(ierr);
77   ierr  = VecSetType(*gvec,dm->vectype);CHKERRQ(ierr);
78   ierr  = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
79   ierr  = DMGetLocalToGlobalMappingBlock(dm,&ltogb);CHKERRQ(ierr);
80   ierr  = VecSetLocalToGlobalMapping(*gvec,ltog);CHKERRQ(ierr);
81   ierr  = VecSetLocalToGlobalMappingBlock(*gvec,ltog);CHKERRQ(ierr);
82   ierr  = VecSetDM(*gvec,dm);CHKERRQ(ierr);
83   PetscFunctionReturn(0);
84 }
85 
86 #undef __FUNCT__
87 #define __FUNCT__ "DMCreateLocalVector_Redundant"
88 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
89 {
90   PetscErrorCode ierr;
91   DM_Redundant   *red = (DM_Redundant*)dm->data;
92 
93   PetscFunctionBegin;
94   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
95   PetscValidPointer(lvec,2);
96   *lvec = 0;
97   ierr  = VecCreate(PETSC_COMM_SELF,lvec);CHKERRQ(ierr);
98   ierr  = VecSetSizes(*lvec,red->N,red->N);CHKERRQ(ierr);
99   ierr  = VecSetType(*lvec,dm->vectype);CHKERRQ(ierr);
100   ierr  = VecSetDM(*lvec,dm);CHKERRQ(ierr);
101   PetscFunctionReturn(0);
102 }
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "DMLocalToGlobalBegin_Redundant"
106 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
107 {
108   PetscErrorCode    ierr;
109   DM_Redundant      *red = (DM_Redundant*)dm->data;
110   const PetscScalar *lv;
111   PetscScalar       *gv;
112   PetscMPIInt       rank;
113 
114   PetscFunctionBegin;
115   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);CHKERRQ(ierr);
116   ierr = VecGetArrayRead(l,&lv);CHKERRQ(ierr);
117   ierr = VecGetArray(g,&gv);CHKERRQ(ierr);
118   switch (imode) {
119   case ADD_VALUES:
120   case MAX_VALUES:
121   {
122     void        *source;
123     PetscScalar *buffer;
124     PetscInt    i;
125     if (rank == red->rank) {
126 #if defined(PETSC_HAVE_MPI_IN_PLACE)
127       buffer = gv;
128       source = MPI_IN_PLACE;
129 #else
130       ierr   = PetscMalloc(red->N*sizeof(PetscScalar),&buffer);CHKERRQ(ierr);
131       source = buffer;
132 #endif
133       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
134 #if !defined(PETSC_USE_COMPLEX)
135       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
136 #endif
137     } else source = (void*)lv;
138     ierr = MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPI_SUM : MPI_MAX,red->rank,PetscObjectComm((PetscObject)dm));CHKERRQ(ierr);
139 #if !defined(PETSC_HAVE_MPI_IN_PLACE)
140     if (rank == red->rank) {ierr = PetscFree(buffer);CHKERRQ(ierr);}
141 #endif
142   } break;
143   case INSERT_VALUES:
144     ierr = PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);
145     break;
146   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
147   }
148   ierr = VecRestoreArrayRead(l,&lv);CHKERRQ(ierr);
149   ierr = VecRestoreArray(g,&gv);CHKERRQ(ierr);
150   PetscFunctionReturn(0);
151 }
152 
153 #undef __FUNCT__
154 #define __FUNCT__ "DMLocalToGlobalEnd_Redundant"
155 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
156 {
157   PetscFunctionBegin;
158   PetscFunctionReturn(0);
159 }
160 
161 #undef __FUNCT__
162 #define __FUNCT__ "DMGlobalToLocalBegin_Redundant"
163 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
164 {
165   PetscErrorCode    ierr;
166   DM_Redundant      *red = (DM_Redundant*)dm->data;
167   const PetscScalar *gv;
168   PetscScalar       *lv;
169 
170   PetscFunctionBegin;
171   ierr = VecGetArrayRead(g,&gv);CHKERRQ(ierr);
172   ierr = VecGetArray(l,&lv);CHKERRQ(ierr);
173   switch (imode) {
174   case INSERT_VALUES:
175     if (red->n) {ierr = PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);}
176     ierr = MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,PetscObjectComm((PetscObject)dm));CHKERRQ(ierr);
177     break;
178   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
179   }
180   ierr = VecRestoreArrayRead(g,&gv);CHKERRQ(ierr);
181   ierr = VecRestoreArray(l,&lv);CHKERRQ(ierr);
182   PetscFunctionReturn(0);
183 }
184 
185 #undef __FUNCT__
186 #define __FUNCT__ "DMGlobalToLocalEnd_Redundant"
187 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
188 {
189   PetscFunctionBegin;
190   PetscFunctionReturn(0);
191 }
192 
193 #undef __FUNCT__
194 #define __FUNCT__ "DMSetUp_Redundant"
195 static PetscErrorCode DMSetUp_Redundant(DM dm)
196 {
197   PetscErrorCode ierr;
198   DM_Redundant   *red = (DM_Redundant*)dm->data;
199   PetscInt       i,*globals;
200 
201   PetscFunctionBegin;
202   ierr = PetscMalloc(red->N*sizeof(PetscInt),&globals);CHKERRQ(ierr);
203   for (i=0; i<red->N; i++) globals[i] = i;
204   ierr         = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr);
205   ierr         = PetscObjectReference((PetscObject)dm->ltogmap);CHKERRQ(ierr);
206   dm->ltogmapb = dm->ltogmap;
207   PetscFunctionReturn(0);
208 }
209 
210 #undef __FUNCT__
211 #define __FUNCT__ "DMView_Redundant"
212 static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
213 {
214   PetscErrorCode ierr;
215   DM_Redundant   *red = (DM_Redundant*)dm->data;
216   PetscBool      iascii;
217 
218   PetscFunctionBegin;
219   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
220   if (iascii) {
221     ierr = PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);CHKERRQ(ierr);
222   }
223   PetscFunctionReturn(0);
224 }
225 
226 #undef __FUNCT__
227 #define __FUNCT__ "DMCreateColoring_Redundant"
228 static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
229 {
230   DM_Redundant    *red = (DM_Redundant*)dm->data;
231   PetscErrorCode  ierr;
232   PetscInt        i,nloc;
233   ISColoringValue *colors;
234 
235   PetscFunctionBegin;
236   switch (ctype) {
237   case IS_COLORING_GLOBAL:
238     nloc = red->n;
239     break;
240   case IS_COLORING_GHOSTED:
241     nloc = red->N;
242     break;
243   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
244   }
245   ierr = PetscMalloc(nloc*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
246   for (i=0; i<nloc; i++) colors[i] = i;
247   ierr = ISColoringCreate(PetscObjectComm((PetscObject)dm),red->N,nloc,colors,coloring);CHKERRQ(ierr);
248   ierr = ISColoringSetType(*coloring,ctype);CHKERRQ(ierr);
249   PetscFunctionReturn(0);
250 }
251 
252 #undef __FUNCT__
253 #define __FUNCT__ "DMRefine_Redundant"
254 static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
255 {
256   PetscErrorCode ierr;
257   PetscMPIInt    flag;
258   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
259 
260   PetscFunctionBegin;
261   if (comm == MPI_COMM_NULL) {
262     ierr = PetscObjectGetComm((PetscObject)dmc,&comm);CHKERRQ(ierr);
263   }
264   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),comm,&flag);CHKERRQ(ierr);
265   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmc),PETSC_ERR_SUP,"cannot change communicators");
266   ierr = DMRedundantCreate(comm,redc->rank,redc->N,dmf);CHKERRQ(ierr);
267   PetscFunctionReturn(0);
268 }
269 
270 #undef __FUNCT__
271 #define __FUNCT__ "DMCoarsen_Redundant"
272 static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
273 {
274   PetscErrorCode ierr;
275   PetscMPIInt    flag;
276   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
277 
278   PetscFunctionBegin;
279   if (comm == MPI_COMM_NULL) {
280     ierr = PetscObjectGetComm((PetscObject)dmf,&comm);CHKERRQ(ierr);
281   }
282   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmf),comm,&flag);CHKERRQ(ierr);
283   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
284   ierr = DMRedundantCreate(comm,redf->rank,redf->N,dmc);CHKERRQ(ierr);
285   PetscFunctionReturn(0);
286 }
287 
288 #undef __FUNCT__
289 #define __FUNCT__ "DMCreateInterpolation_Redundant"
290 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
291 {
292   PetscErrorCode ierr;
293   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
294   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
295   PetscMPIInt    flag;
296   PetscInt       i,rstart,rend;
297 
298   PetscFunctionBegin;
299   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),PetscObjectComm((PetscObject)dmf),&flag);CHKERRQ(ierr);
300   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
301   if (redc->rank != redf->rank) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
302   if (redc->N != redf->N) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Global size does not match");
303   ierr = MatCreate(PetscObjectComm((PetscObject)dmc),P);CHKERRQ(ierr);
304   ierr = MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);CHKERRQ(ierr);
305   ierr = MatSetType(*P,MATAIJ);CHKERRQ(ierr);
306   ierr = MatSeqAIJSetPreallocation(*P,1,0);CHKERRQ(ierr);
307   ierr = MatMPIAIJSetPreallocation(*P,1,0,0,0);CHKERRQ(ierr);
308   ierr = MatGetOwnershipRange(*P,&rstart,&rend);CHKERRQ(ierr);
309   for (i=rstart; i<rend; i++) {ierr = MatSetValue(*P,i,i,1.0,INSERT_VALUES);CHKERRQ(ierr);}
310   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
311   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
312   if (scale) {ierr = DMCreateInterpolationScale(dmc,dmf,*P,scale);CHKERRQ(ierr);}
313   PetscFunctionReturn(0);
314 }
315 
316 #undef __FUNCT__
317 #define __FUNCT__ "DMRedundantSetSize"
318 /*@
319     DMRedundantSetSize - Sets the size of a densely coupled redundant object
320 
321     Collective on DM
322 
323     Input Parameter:
324 +   dm - redundant DM
325 .   rank - rank of process to own redundant degrees of freedom
326 -   N - total number of redundant degrees of freedom
327 
328     Level: advanced
329 
330 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
331 @*/
332 PetscErrorCode DMRedundantSetSize(DM dm,PetscInt rank,PetscInt N)
333 {
334   PetscErrorCode ierr;
335 
336   PetscFunctionBegin;
337   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
338   PetscValidType(dm,1);
339   PetscValidLogicalCollectiveInt(dm,rank,2);
340   PetscValidLogicalCollectiveInt(dm,N,3);
341   ierr = PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscInt,PetscInt),(dm,rank,N));CHKERRQ(ierr);
342   PetscFunctionReturn(0);
343 }
344 
345 #undef __FUNCT__
346 #define __FUNCT__ "DMRedundantGetSize"
347 /*@
348     DMRedundantGetSize - Gets the size of a densely coupled redundant object
349 
350     Not Collective
351 
352     Input Parameter:
353 +   dm - redundant DM
354 
355     Output Parameters:
356 +   rank - rank of process to own redundant degrees of freedom (or NULL)
357 -   N - total number of redundant degrees of freedom (or NULL)
358 
359     Level: advanced
360 
361 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
362 @*/
363 PetscErrorCode DMRedundantGetSize(DM dm,PetscInt *rank,PetscInt *N)
364 {
365   PetscErrorCode ierr;
366 
367   PetscFunctionBegin;
368   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
369   PetscValidType(dm,1);
370   ierr = PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscInt*,PetscInt*),(dm,rank,N));CHKERRQ(ierr);
371   PetscFunctionReturn(0);
372 }
373 
374 EXTERN_C_BEGIN
375 #undef __FUNCT__
376 #define __FUNCT__ "DMRedundantSetSize_Redundant"
377 PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscInt rank,PetscInt N)
378 {
379   DM_Redundant   *red = (DM_Redundant*)dm->data;
380   PetscErrorCode ierr;
381   PetscMPIInt    myrank;
382 
383   PetscFunctionBegin;
384   ierr      = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank);CHKERRQ(ierr);
385   red->rank = rank;
386   red->N    = N;
387   red->n    = (myrank == rank) ? N : 0;
388   PetscFunctionReturn(0);
389 }
390 
391 #undef __FUNCT__
392 #define __FUNCT__ "DMRedundantGetSize_Redundant"
393 PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
394 {
395   DM_Redundant *red = (DM_Redundant*)dm->data;
396 
397   PetscFunctionBegin;
398   if (rank) *rank = red->rank;
399   if (N)    *N = red->N;
400   PetscFunctionReturn(0);
401 }
402 EXTERN_C_END
403 
404 /*MC
405    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
406          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
407          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
408          processes local computations).
409 
410          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
411 
412 
413   Level: intermediate
414 
415 .seealso: DMType, DMCOMPOSITE, DMCreateRedundant(), DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
416 M*/
417 
418 EXTERN_C_BEGIN
419 #undef __FUNCT__
420 #define __FUNCT__ "DMCreate_Redundant"
421 PetscErrorCode DMCreate_Redundant(DM dm)
422 {
423   PetscErrorCode ierr;
424   DM_Redundant   *red;
425 
426   PetscFunctionBegin;
427   ierr     = PetscNewLog(dm,DM_Redundant,&red);CHKERRQ(ierr);
428   dm->data = red;
429 
430   ierr = PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);CHKERRQ(ierr);
431 
432   dm->ops->setup              = DMSetUp_Redundant;
433   dm->ops->view               = DMView_Redundant;
434   dm->ops->createglobalvector = DMCreateGlobalVector_Redundant;
435   dm->ops->createlocalvector  = DMCreateLocalVector_Redundant;
436   dm->ops->creatematrix       = DMCreateMatrix_Redundant;
437   dm->ops->destroy            = DMDestroy_Redundant;
438   dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant;
439   dm->ops->globaltolocalend   = DMGlobalToLocalEnd_Redundant;
440   dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant;
441   dm->ops->localtoglobalend   = DMLocalToGlobalEnd_Redundant;
442   dm->ops->refine             = DMRefine_Redundant;
443   dm->ops->coarsen            = DMCoarsen_Redundant;
444   dm->ops->createinterpolation= DMCreateInterpolation_Redundant;
445   dm->ops->getcoloring        = DMCreateColoring_Redundant;
446 
447   ierr = PetscStrallocpy(VECSTANDARD,(char**)&dm->vectype);CHKERRQ(ierr);
448   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","DMRedundantSetSize_Redundant",DMRedundantSetSize_Redundant);CHKERRQ(ierr);
449   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","DMRedundantGetSize_Redundant",DMRedundantGetSize_Redundant);CHKERRQ(ierr);
450   PetscFunctionReturn(0);
451 }
452 EXTERN_C_END
453 
454 #undef __FUNCT__
455 #define __FUNCT__ "DMRedundantCreate"
456 /*@C
457     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
458 
459     Collective on MPI_Comm
460 
461     Input Parameter:
462 +   comm - the processors that will share the global vector
463 .   rank - rank to own the redundant values
464 -   N - total number of degrees of freedom
465 
466     Output Parameters:
467 .   red - the redundant DM
468 
469     Level: advanced
470 
471 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()
472 
473 @*/
474 PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscInt rank,PetscInt N,DM *dm)
475 {
476   PetscErrorCode ierr;
477 
478   PetscFunctionBegin;
479   PetscValidPointer(dm,2);
480   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
481   ierr = DMSetType(*dm,DMREDUNDANT);CHKERRQ(ierr);
482   ierr = DMRedundantSetSize(*dm,rank,N);CHKERRQ(ierr);
483   ierr = DMSetUp(*dm);CHKERRQ(ierr);
484   PetscFunctionReturn(0);
485 }
486