xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision db268bfa9c46c24c134d4e2e1d78f27d60e1123e)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
5 */
6 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
7 #include "petscksp.h"
8 
9 typedef struct {
10   PC           pc;                   /* actual preconditioner used on each processor */
11   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of pc->comm */
12   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscTruth   useparallelmat;
16   PetscSubcomm psubcomm;
17   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
18 } PC_Redundant;
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PCView_Redundant"
22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26   PetscMPIInt    rank;
27   PetscTruth     iascii,isstring;
28   PetscViewer    sviewer,subviewer;
29   PetscInt       color = red->psubcomm->color;
30 
31   PetscFunctionBegin;
32   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
33   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
34   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
35   if (iascii) {
36     ierr = PetscViewerASCIIPrintf(viewer,"  Redundant solver preconditioner: First PC (color=0) follows\n");CHKERRQ(ierr);
37     ierr = PetscViewerGetSubcomm(viewer,red->pc->comm,&subviewer);CHKERRQ(ierr);
38     if (!color) { /* only view first redundant pc */
39       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
40       ierr = PCView(red->pc,subviewer);CHKERRQ(ierr);
41       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
42     }
43     ierr = PetscViewerRestoreSubcomm(viewer,red->pc->comm,&subviewer);CHKERRQ(ierr);
44   } else if (isstring) { /* not test it yet! */
45     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
46     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
47     if (!rank) {
48       ierr = PCView(red->pc,sviewer);CHKERRQ(ierr);
49     }
50     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #include "include/private/matimpl.h"        /*I "petscmat.h" I*/
58 #undef __FUNCT__
59 #define __FUNCT__ "PCSetUp_Redundant"
60 static PetscErrorCode PCSetUp_Redundant(PC pc)
61 {
62   PC_Redundant   *red = (PC_Redundant*)pc->data;
63   PetscErrorCode ierr;
64   PetscInt       mstart,mend,mlocal,m;
65   PetscMPIInt    size;
66   MatReuse       reuse = MAT_INITIAL_MATRIX;
67   MatStructure   str   = DIFFERENT_NONZERO_PATTERN;
68   MPI_Comm       comm = pc->comm,subcomm;
69   Vec            vec;
70   PetscInt       mlocal_sub;
71   PetscMPIInt    subsize,subrank;
72   PetscInt       rstart_sub,rend_sub,mloc_sub;
73   const char     *prefix;
74 
75   PetscFunctionBegin;
76   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
77   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
78 
79   if (!pc->setupcalled) {
80     ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr);
81     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
82 
83     /* create a new PC that processors in each subcomm have copy of */
84     subcomm = red->psubcomm->comm;
85     ierr = PCCreate(subcomm,&red->pc);CHKERRQ(ierr);
86     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
87     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
88     ierr = PCSetOptionsPrefix(red->pc,prefix);CHKERRQ(ierr);
89     ierr = PCAppendOptionsPrefix(red->pc,"redundant_");CHKERRQ(ierr);
90     ierr = PCSetFromOptions(red->pc);CHKERRQ(ierr);
91 
92     /* create working vectors xsub/ysub and xdup/ydup */
93     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
94     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
95 
96     /* get local size of xsub/ysub */
97     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
98     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
99     rstart_sub = pc->pmat->rmap.range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
100     if (subrank+1 < subsize){
101       rend_sub = pc->pmat->rmap.range[red->psubcomm->n*(subrank+1)];
102     } else {
103       rend_sub = m;
104     }
105     mloc_sub = rend_sub - rstart_sub;
106     ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
107     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
108     ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);
109 
110     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
111        Note: we use communicator dupcomm, not pc->comm! */
112     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
113     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
114 
115     /* create vec scatters */
116     if (!red->scatterin){
117       IS       is1,is2;
118       PetscInt *idx1,*idx2,i,j,k;
119       ierr = PetscMalloc(2*red->psubcomm->n*mlocal*sizeof(PetscInt),&idx1);CHKERRQ(ierr);
120       idx2 = idx1 + red->psubcomm->n*mlocal;
121       j = 0;
122       for (k=0; k<red->psubcomm->n; k++){
123         for (i=mstart; i<mend; i++){
124           idx1[j]   = i;
125           idx2[j++] = i + m*k;
126         }
127       }
128       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,&is1);CHKERRQ(ierr);
129       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,&is2);CHKERRQ(ierr);
130       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
131       ierr = ISDestroy(is1);CHKERRQ(ierr);
132       ierr = ISDestroy(is2);CHKERRQ(ierr);
133 
134       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
135       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
136       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
137       ierr = ISDestroy(is1);CHKERRQ(ierr);
138       ierr = ISDestroy(is2);CHKERRQ(ierr);
139       ierr = PetscFree(idx1);CHKERRQ(ierr);
140     }
141   }
142   ierr = VecDestroy(vec);CHKERRQ(ierr);
143 
144   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
145   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
146   if (size == 1) {
147     red->useparallelmat = PETSC_FALSE;
148   }
149 
150   if (red->useparallelmat) {
151     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
152       /* destroy old matrices */
153       if (red->pmats) {
154         ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
155       }
156     } else if (pc->setupcalled == 1) {
157       reuse = MAT_REUSE_MATRIX;
158       str   = SAME_NONZERO_PATTERN;
159     }
160 
161     /* grab the parallel matrix and put it into processors of a subcomminicator */
162     /*--------------------------------------------------------------------------*/
163     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
164     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
165 
166     /* tell PC of the subcommunicator its operators */
167     ierr = PCSetOperators(red->pc,red->pmats,red->pmats,str);CHKERRQ(ierr);
168   } else {
169     ierr = PCSetOperators(red->pc,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
170   }
171   ierr = PCSetFromOptions(red->pc);CHKERRQ(ierr);
172   ierr = PCSetUp(red->pc);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "PCApply_Redundant"
178 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
179 {
180   PC_Redundant   *red = (PC_Redundant*)pc->data;
181   PetscErrorCode ierr;
182   PetscScalar    *array;
183 
184   PetscFunctionBegin;
185   /* scatter x to xdup */
186   ierr = VecScatterBegin(x,red->xdup,INSERT_VALUES,SCATTER_FORWARD,red->scatterin);CHKERRQ(ierr);
187   ierr = VecScatterEnd(x,red->xdup,INSERT_VALUES,SCATTER_FORWARD,red->scatterin);CHKERRQ(ierr);
188 
189   /* place xdup's local array into xsub */
190   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
191   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
192 
193   /* apply preconditioner on each processor */
194   ierr = PCApply(red->pc,red->xsub,red->ysub);CHKERRQ(ierr);
195   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
196   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
197 
198   /* place ysub's local array into ydup */
199   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
200   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
201 
202   /* scatter ydup to y */
203   ierr = VecScatterBegin(red->ydup,y,INSERT_VALUES,SCATTER_FORWARD,red->scatterout);CHKERRQ(ierr);
204   ierr = VecScatterEnd(red->ydup,y,INSERT_VALUES,SCATTER_FORWARD,red->scatterout);CHKERRQ(ierr);
205   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
206   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
207   PetscFunctionReturn(0);
208 }
209 
210 #undef __FUNCT__
211 #define __FUNCT__ "PCDestroy_Redundant"
212 static PetscErrorCode PCDestroy_Redundant(PC pc)
213 {
214   PC_Redundant   *red = (PC_Redundant*)pc->data;
215   PetscErrorCode ierr;
216 
217   PetscFunctionBegin;
218   if (red->scatterin)  {ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr);}
219   if (red->scatterout) {ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr);}
220   if (red->ysub)       {ierr = VecDestroy(red->ysub);CHKERRQ(ierr);}
221   if (red->xsub)       {ierr = VecDestroy(red->xsub);CHKERRQ(ierr);}
222   if (red->xdup)       {ierr = VecDestroy(red->xdup);CHKERRQ(ierr);}
223   if (red->ydup)       {ierr = VecDestroy(red->ydup);CHKERRQ(ierr);}
224   if (red->pmats) {
225     ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
226   }
227   if (red->psubcomm) {ierr = PetscSubcommDestroy(red->psubcomm);CHKERRQ(ierr);}
228   if (red->pc) {ierr = PCDestroy(red->pc);CHKERRQ(ierr);}
229   ierr = PetscFree(red);CHKERRQ(ierr);
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "PCSetFromOptions_Redundant"
235 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
236 {
237   PetscErrorCode ierr;
238   PC_Redundant   *red = (PC_Redundant*)pc->data;
239 
240   PetscFunctionBegin;
241   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
242   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
243   ierr = PetscOptionsTail();CHKERRQ(ierr);
244   PetscFunctionReturn(0);
245 }
246 
247 EXTERN_C_BEGIN
248 #undef __FUNCT__
249 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
250 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
251 {
252   PC_Redundant *red = (PC_Redundant*)pc->data;
253 
254   PetscFunctionBegin;
255   red->nsubcomm = nreds;
256   PetscFunctionReturn(0);
257 }
258 EXTERN_C_END
259 
260 #undef __FUNCT__
261 #define __FUNCT__ "PCRedundantSetNumber"
262 /*@
263    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
264 
265    Collective on PC
266 
267    Input Parameters:
268 +  pc - the preconditioner context
269 -  nredundant - number of redundant preconditioner contexts
270 
271    Level: advanced
272 
273 .keywords: PC, redundant solve
274 @*/
275 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber(PC pc,PetscInt nredundant)
276 {
277   PetscErrorCode ierr,(*f)(PC,PetscInt);
278 
279   PetscFunctionBegin;
280   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
281   if (nredundant <= 0) SETERRQ1(PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
282   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetNumber_C",(void (**)(void))&f);CHKERRQ(ierr);
283   if (f) {
284     ierr = (*f)(pc,nredundant);CHKERRQ(ierr);
285   }
286   PetscFunctionReturn(0);
287 }
288 
289 EXTERN_C_BEGIN
290 #undef __FUNCT__
291 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
292 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
293 {
294   PC_Redundant   *red = (PC_Redundant*)pc->data;
295   PetscErrorCode ierr;
296 
297   PetscFunctionBegin;
298   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
299   if (red->scatterin) { ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr); }
300   red->scatterin  = in;
301   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
302   if (red->scatterout) { ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr); }
303   red->scatterout = out;
304   PetscFunctionReturn(0);
305 }
306 EXTERN_C_END
307 
308 #undef __FUNCT__
309 #define __FUNCT__ "PCRedundantSetScatter"
310 /*@
311    PCRedundantSetScatter - Sets the scatter used to copy values into the
312      redundant local solve and the scatter to move them back into the global
313      vector.
314 
315    Collective on PC
316 
317    Input Parameters:
318 +  pc - the preconditioner context
319 .  in - the scatter to move the values in
320 -  out - the scatter to move them out
321 
322    Level: advanced
323 
324 .keywords: PC, redundant solve
325 @*/
326 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
327 {
328   PetscErrorCode ierr,(*f)(PC,VecScatter,VecScatter);
329 
330   PetscFunctionBegin;
331   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
332   PetscValidHeaderSpecific(in,VEC_SCATTER_COOKIE,2);
333   PetscValidHeaderSpecific(out,VEC_SCATTER_COOKIE,3);
334   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetScatter_C",(void (**)(void))&f);CHKERRQ(ierr);
335   if (f) {
336     ierr = (*f)(pc,in,out);CHKERRQ(ierr);
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 EXTERN_C_BEGIN
342 #undef __FUNCT__
343 #define __FUNCT__ "PCRedundantGetPC_Redundant"
344 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC_Redundant(PC pc,PC *innerpc)
345 {
346   PC_Redundant *red = (PC_Redundant*)pc->data;
347 
348   PetscFunctionBegin;
349   *innerpc = red->pc;
350   PetscFunctionReturn(0);
351 }
352 EXTERN_C_END
353 
354 #undef __FUNCT__
355 #define __FUNCT__ "PCRedundantGetPC"
356 /*@
357    PCRedundantGetPC - Gets the sequential PC created by the redundant PC.
358 
359    Not Collective
360 
361    Input Parameter:
362 .  pc - the preconditioner context
363 
364    Output Parameter:
365 .  innerpc - the sequential PC
366 
367    Level: advanced
368 
369 .keywords: PC, redundant solve
370 @*/
371 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC(PC pc,PC *innerpc)
372 {
373   PetscErrorCode ierr,(*f)(PC,PC*);
374 
375   PetscFunctionBegin;
376   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
377   PetscValidPointer(innerpc,2);
378   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetPC_C",(void (**)(void))&f);CHKERRQ(ierr);
379   if (f) {
380     ierr = (*f)(pc,innerpc);CHKERRQ(ierr);
381   }
382   PetscFunctionReturn(0);
383 }
384 
385 EXTERN_C_BEGIN
386 #undef __FUNCT__
387 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
388 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
389 {
390   PC_Redundant *red = (PC_Redundant*)pc->data;
391 
392   PetscFunctionBegin;
393   if (mat)  *mat  = red->pmats;
394   if (pmat) *pmat = red->pmats;
395   PetscFunctionReturn(0);
396 }
397 EXTERN_C_END
398 
399 #undef __FUNCT__
400 #define __FUNCT__ "PCRedundantGetOperators"
401 /*@
402    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
403 
404    Not Collective
405 
406    Input Parameter:
407 .  pc - the preconditioner context
408 
409    Output Parameters:
410 +  mat - the matrix
411 -  pmat - the (possibly different) preconditioner matrix
412 
413    Level: advanced
414 
415 .keywords: PC, redundant solve
416 @*/
417 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
418 {
419   PetscErrorCode ierr,(*f)(PC,Mat*,Mat*);
420 
421   PetscFunctionBegin;
422   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
423   if (mat)  PetscValidPointer(mat,2);
424   if (pmat) PetscValidPointer(pmat,3);
425   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetOperators_C",(void (**)(void))&f);CHKERRQ(ierr);
426   if (f) {
427     ierr = (*f)(pc,mat,pmat);CHKERRQ(ierr);
428   }
429   PetscFunctionReturn(0);
430 }
431 
432 /* -------------------------------------------------------------------------------------*/
433 /*MC
434      PCREDUNDANT - Runs a preconditioner for the entire problem on subgroups of processors
435 
436      Options for the redundant preconditioners can be set with -redundant_pc_xxx
437 
438   Options Database:
439 .  -pc_redundant_number_comm - number of sub communicators to use
440 
441    Level: intermediate
442 
443 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
444            PCRedundantGetPC(), PCRedundantGetOperators()
445 M*/
446 
447 EXTERN_C_BEGIN
448 #undef __FUNCT__
449 #define __FUNCT__ "PCCreate_Redundant"
450 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_Redundant(PC pc)
451 {
452   PetscErrorCode ierr;
453   PC_Redundant   *red;
454   PetscMPIInt    size;
455 
456   PetscFunctionBegin;
457   ierr = PetscNew(PC_Redundant,&red);CHKERRQ(ierr);
458   ierr = PetscLogObjectMemory(pc,sizeof(PC_Redundant));CHKERRQ(ierr);
459   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
460   red->nsubcomm       = size;
461   red->useparallelmat = PETSC_TRUE;
462   pc->data            = (void*)red;
463 
464   pc->ops->apply           = PCApply_Redundant;
465   pc->ops->applytranspose  = 0;
466   pc->ops->setup           = PCSetUp_Redundant;
467   pc->ops->destroy         = PCDestroy_Redundant;
468   pc->ops->setfromoptions  = PCSetFromOptions_Redundant;
469   pc->ops->view            = PCView_Redundant;
470   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
471                     PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
472   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant",
473                     PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
474   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetPC_C","PCRedundantGetPC_Redundant",
475                     PCRedundantGetPC_Redundant);CHKERRQ(ierr);
476   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
477                     PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
478   PetscFunctionReturn(0);
479 }
480 EXTERN_C_END
481