xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 0c24e6a1e7d892dc3a9be60cfbbff2f1d2c50e14)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
5 */
6 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
7 #include "petscksp.h"
8 
9 typedef struct {
10   PC           pc;                   /* actual preconditioner used on each processor */
11   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of pc->comm */
12   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscTruth   useparallelmat;
16   PetscSubcomm psubcomm;
17   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
18 } PC_Redundant;
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PCView_Redundant"
22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26   PetscMPIInt    rank;
27   PetscTruth     iascii,isstring;
28   PetscViewer    sviewer,subviewer;
29   PetscInt       color = red->psubcomm->color;
30 
31   PetscFunctionBegin;
32   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
33   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
34   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
35   if (iascii) {
36     ierr = PetscViewerASCIIPrintf(viewer,"  Redundant solver preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
37     ierr = PetscViewerGetSubcomm(viewer,red->pc->comm,&subviewer);CHKERRQ(ierr);
38     if (!color) { /* only view first redundant pc */
39       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
40       ierr = PCView(red->pc,subviewer);CHKERRQ(ierr);
41       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
42     }
43     ierr = PetscViewerRestoreSubcomm(viewer,red->pc->comm,&subviewer);CHKERRQ(ierr);
44   } else if (isstring) { /* not test it yet! */
45     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
46     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
47     if (!rank) {
48       ierr = PCView(red->pc,sviewer);CHKERRQ(ierr);
49     }
50     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #include "include/private/matimpl.h"        /*I "petscmat.h" I*/
58 #undef __FUNCT__
59 #define __FUNCT__ "PCSetUp_Redundant"
60 static PetscErrorCode PCSetUp_Redundant(PC pc)
61 {
62   PC_Redundant   *red = (PC_Redundant*)pc->data;
63   PetscErrorCode ierr;
64   PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
65   PetscMPIInt    size;
66   MatReuse       reuse = MAT_INITIAL_MATRIX;
67   MatStructure   str = DIFFERENT_NONZERO_PATTERN;
68   MPI_Comm       comm = pc->comm,subcomm;
69   Vec            vec;
70   PetscMPIInt    subsize,subrank;
71   const char     *prefix;
72 
73   PetscFunctionBegin;
74   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
75   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
76 
77   if (!pc->setupcalled) {
78     ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr);
79     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
80 
81     /* create a new PC that processors in each subcomm have copy of */
82     subcomm = red->psubcomm->comm;
83     ierr = PCCreate(subcomm,&red->pc);CHKERRQ(ierr);
84     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
85     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
86     ierr = PCSetOptionsPrefix(red->pc,prefix);CHKERRQ(ierr);
87     ierr = PCAppendOptionsPrefix(red->pc,"redundant_");CHKERRQ(ierr);
88 
89     /* create working vectors xsub/ysub and xdup/ydup */
90     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
91     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
92 
93     /* get local size of xsub/ysub */
94     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
95     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
96     rstart_sub = pc->pmat->rmap.range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
97     if (subrank+1 < subsize){
98       rend_sub = pc->pmat->rmap.range[red->psubcomm->n*(subrank+1)];
99     } else {
100       rend_sub = m;
101     }
102     mloc_sub = rend_sub - rstart_sub;
103     ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
104     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
105     ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);
106 
107     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
108        Note: we use communicator dupcomm, not pc->comm! */
109     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
110     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
111 
112     /* create vec scatters */
113     if (!red->scatterin){
114       IS       is1,is2;
115       PetscInt *idx1,*idx2,i,j,k;
116 
117       ierr = PetscMalloc(2*red->psubcomm->n*mlocal*sizeof(PetscInt),&idx1);CHKERRQ(ierr);
118       idx2 = idx1 + red->psubcomm->n*mlocal;
119       j = 0;
120       for (k=0; k<red->psubcomm->n; k++){
121         for (i=mstart; i<mend; i++){
122           idx1[j]   = i;
123           idx2[j++] = i + m*k;
124         }
125       }
126       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,&is1);CHKERRQ(ierr);
127       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,&is2);CHKERRQ(ierr);
128       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
129       ierr = ISDestroy(is1);CHKERRQ(ierr);
130       ierr = ISDestroy(is2);CHKERRQ(ierr);
131 
132       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
133       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
134       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
135       ierr = ISDestroy(is1);CHKERRQ(ierr);
136       ierr = ISDestroy(is2);CHKERRQ(ierr);
137       ierr = PetscFree(idx1);CHKERRQ(ierr);
138     }
139   }
140   ierr = VecDestroy(vec);CHKERRQ(ierr);
141 
142   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
143   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
144   if (size == 1) {
145     red->useparallelmat = PETSC_FALSE;
146   }
147 
148   if (red->useparallelmat) {
149     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
150       /* destroy old matrices */
151       if (red->pmats) {
152         ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
153       }
154     } else if (pc->setupcalled == 1) {
155       reuse = MAT_REUSE_MATRIX;
156       str   = SAME_NONZERO_PATTERN;
157     }
158 
159     /* grab the parallel matrix and put it into processors of a subcomminicator */
160     /*--------------------------------------------------------------------------*/
161     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
162     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
163 
164     /* tell PC of the subcommunicator its operators */
165     ierr = PCSetOperators(red->pc,red->pmats,red->pmats,str);CHKERRQ(ierr);
166   } else {
167     ierr = PCSetOperators(red->pc,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
168   }
169   if (pc->setfromoptionscalled){
170     ierr = PCSetFromOptions(red->pc);CHKERRQ(ierr);
171   }
172   ierr = PCSetUp(red->pc);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "PCApply_Redundant"
178 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
179 {
180   PC_Redundant   *red = (PC_Redundant*)pc->data;
181   PetscErrorCode ierr;
182   PetscScalar    *array;
183 
184   PetscFunctionBegin;
185   /* scatter x to xdup */
186   ierr = VecScatterBegin(x,red->xdup,INSERT_VALUES,SCATTER_FORWARD,red->scatterin);CHKERRQ(ierr);
187   ierr = VecScatterEnd(x,red->xdup,INSERT_VALUES,SCATTER_FORWARD,red->scatterin);CHKERRQ(ierr);
188 
189   /* place xdup's local array into xsub */
190   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
191   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
192 
193   /* apply preconditioner on each processor */
194   ierr = PCApply(red->pc,red->xsub,red->ysub);CHKERRQ(ierr);
195   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
196   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
197 
198   /* place ysub's local array into ydup */
199   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
200   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
201 
202   /* scatter ydup to y */
203   ierr = VecScatterBegin(red->ydup,y,INSERT_VALUES,SCATTER_FORWARD,red->scatterout);CHKERRQ(ierr);
204   ierr = VecScatterEnd(red->ydup,y,INSERT_VALUES,SCATTER_FORWARD,red->scatterout);CHKERRQ(ierr);
205   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
206   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
207   PetscFunctionReturn(0);
208 }
209 
210 #undef __FUNCT__
211 #define __FUNCT__ "PCDestroy_Redundant"
212 static PetscErrorCode PCDestroy_Redundant(PC pc)
213 {
214   PC_Redundant   *red = (PC_Redundant*)pc->data;
215   PetscErrorCode ierr;
216 
217   PetscFunctionBegin;
218   if (red->scatterin)  {ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr);}
219   if (red->scatterout) {ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr);}
220   if (red->ysub)       {ierr = VecDestroy(red->ysub);CHKERRQ(ierr);}
221   if (red->xsub)       {ierr = VecDestroy(red->xsub);CHKERRQ(ierr);}
222   if (red->xdup)       {ierr = VecDestroy(red->xdup);CHKERRQ(ierr);}
223   if (red->ydup)       {ierr = VecDestroy(red->ydup);CHKERRQ(ierr);}
224   if (red->pmats) {
225     ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
226   }
227   if (red->psubcomm) {ierr = PetscSubcommDestroy(red->psubcomm);CHKERRQ(ierr);}
228   if (red->pc) {ierr = PCDestroy(red->pc);CHKERRQ(ierr);}
229   ierr = PetscFree(red);CHKERRQ(ierr);
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "PCSetFromOptions_Redundant"
235 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
236 {
237   PetscErrorCode ierr;
238   PC_Redundant   *red = (PC_Redundant*)pc->data;
239 
240   PetscFunctionBegin;
241   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
242   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
243   ierr = PetscOptionsTail();CHKERRQ(ierr);
244   PetscFunctionReturn(0);
245 }
246 
247 EXTERN_C_BEGIN
248 #undef __FUNCT__
249 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
250 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
251 {
252   PC_Redundant *red = (PC_Redundant*)pc->data;
253 
254   PetscFunctionBegin;
255   red->nsubcomm = nreds;
256   PetscFunctionReturn(0);
257 }
258 EXTERN_C_END
259 
260 #undef __FUNCT__
261 #define __FUNCT__ "PCRedundantSetNumber"
262 /*@
263    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
264 
265    Collective on PC
266 
267    Input Parameters:
268 +  pc - the preconditioner context
269 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
270                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
271 
272    Level: advanced
273 
274 .keywords: PC, redundant solve
275 @*/
276 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber(PC pc,PetscInt nredundant)
277 {
278   PetscErrorCode ierr,(*f)(PC,PetscInt);
279 
280   PetscFunctionBegin;
281   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
282   if (nredundant <= 0) SETERRQ1(PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
283   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetNumber_C",(void (**)(void))&f);CHKERRQ(ierr);
284   if (f) {
285     ierr = (*f)(pc,nredundant);CHKERRQ(ierr);
286   }
287   PetscFunctionReturn(0);
288 }
289 
290 EXTERN_C_BEGIN
291 #undef __FUNCT__
292 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
293 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
294 {
295   PC_Redundant   *red = (PC_Redundant*)pc->data;
296   PetscErrorCode ierr;
297 
298   PetscFunctionBegin;
299   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
300   if (red->scatterin) { ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr); }
301   red->scatterin  = in;
302   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
303   if (red->scatterout) { ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr); }
304   red->scatterout = out;
305   PetscFunctionReturn(0);
306 }
307 EXTERN_C_END
308 
309 #undef __FUNCT__
310 #define __FUNCT__ "PCRedundantSetScatter"
311 /*@
312    PCRedundantSetScatter - Sets the scatter used to copy values into the
313      redundant local solve and the scatter to move them back into the global
314      vector.
315 
316    Collective on PC
317 
318    Input Parameters:
319 +  pc - the preconditioner context
320 .  in - the scatter to move the values in
321 -  out - the scatter to move them out
322 
323    Level: advanced
324 
325 .keywords: PC, redundant solve
326 @*/
327 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
328 {
329   PetscErrorCode ierr,(*f)(PC,VecScatter,VecScatter);
330 
331   PetscFunctionBegin;
332   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
333   PetscValidHeaderSpecific(in,VEC_SCATTER_COOKIE,2);
334   PetscValidHeaderSpecific(out,VEC_SCATTER_COOKIE,3);
335   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetScatter_C",(void (**)(void))&f);CHKERRQ(ierr);
336   if (f) {
337     ierr = (*f)(pc,in,out);CHKERRQ(ierr);
338   }
339   PetscFunctionReturn(0);
340 }
341 
342 EXTERN_C_BEGIN
343 #undef __FUNCT__
344 #define __FUNCT__ "PCRedundantGetPC_Redundant"
345 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC_Redundant(PC pc,PC *innerpc)
346 {
347   PC_Redundant *red = (PC_Redundant*)pc->data;
348 
349   PetscFunctionBegin;
350   *innerpc = red->pc;
351   PetscFunctionReturn(0);
352 }
353 EXTERN_C_END
354 
355 #undef __FUNCT__
356 #define __FUNCT__ "PCRedundantGetPC"
357 /*@
358    PCRedundantGetPC - Gets the sequential PC created by the redundant PC.
359 
360    Not Collective
361 
362    Input Parameter:
363 .  pc - the preconditioner context
364 
365    Output Parameter:
366 .  innerpc - the sequential PC
367 
368    Level: advanced
369 
370 .keywords: PC, redundant solve
371 @*/
372 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC(PC pc,PC *innerpc)
373 {
374   PetscErrorCode ierr,(*f)(PC,PC*);
375 
376   PetscFunctionBegin;
377   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
378   PetscValidPointer(innerpc,2);
379   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetPC_C",(void (**)(void))&f);CHKERRQ(ierr);
380   if (f) {
381     ierr = (*f)(pc,innerpc);CHKERRQ(ierr);
382   }
383   PetscFunctionReturn(0);
384 }
385 
386 EXTERN_C_BEGIN
387 #undef __FUNCT__
388 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
389 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
390 {
391   PC_Redundant *red = (PC_Redundant*)pc->data;
392 
393   PetscFunctionBegin;
394   if (mat)  *mat  = red->pmats;
395   if (pmat) *pmat = red->pmats;
396   PetscFunctionReturn(0);
397 }
398 EXTERN_C_END
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "PCRedundantGetOperators"
402 /*@
403    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
404 
405    Not Collective
406 
407    Input Parameter:
408 .  pc - the preconditioner context
409 
410    Output Parameters:
411 +  mat - the matrix
412 -  pmat - the (possibly different) preconditioner matrix
413 
414    Level: advanced
415 
416 .keywords: PC, redundant solve
417 @*/
418 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
419 {
420   PetscErrorCode ierr,(*f)(PC,Mat*,Mat*);
421 
422   PetscFunctionBegin;
423   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
424   if (mat)  PetscValidPointer(mat,2);
425   if (pmat) PetscValidPointer(pmat,3);
426   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetOperators_C",(void (**)(void))&f);CHKERRQ(ierr);
427   if (f) {
428     ierr = (*f)(pc,mat,pmat);CHKERRQ(ierr);
429   }
430   PetscFunctionReturn(0);
431 }
432 
433 /* -------------------------------------------------------------------------------------*/
434 /*MC
435      PCREDUNDANT - Runs a preconditioner for the entire problem on subgroups of processors
436 
437      Options for the redundant preconditioners can be set with -redundant_pc_xxx
438 
439   Options Database:
440 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
441                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
442 
443    Level: intermediate
444 
445 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
446            PCRedundantGetPC(), PCRedundantGetOperators(), PCRedundantSetNumber()
447 M*/
448 
449 EXTERN_C_BEGIN
450 #undef __FUNCT__
451 #define __FUNCT__ "PCCreate_Redundant"
452 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_Redundant(PC pc)
453 {
454   PetscErrorCode ierr;
455   PC_Redundant   *red;
456   PetscMPIInt    size;
457 
458   PetscFunctionBegin;
459   ierr = PetscNew(PC_Redundant,&red);CHKERRQ(ierr);
460   ierr = PetscLogObjectMemory(pc,sizeof(PC_Redundant));CHKERRQ(ierr);
461   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
462   red->nsubcomm       = size;
463   red->useparallelmat = PETSC_TRUE;
464   pc->data            = (void*)red;
465 
466   pc->ops->apply           = PCApply_Redundant;
467   pc->ops->applytranspose  = 0;
468   pc->ops->setup           = PCSetUp_Redundant;
469   pc->ops->destroy         = PCDestroy_Redundant;
470   pc->ops->setfromoptions  = PCSetFromOptions_Redundant;
471   pc->ops->view            = PCView_Redundant;
472   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
473                     PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
474   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant",
475                     PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
476   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetPC_C","PCRedundantGetPC_Redundant",
477                     PCRedundantGetPC_Redundant);CHKERRQ(ierr);
478   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
479                     PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 EXTERN_C_END
483