xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision ce0a2cd1da0658c2b28aad1be2e2c8e41567bece)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
5 */
6 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
7 #include "petscksp.h"
8 
9 typedef struct {
10   KSP          ksp;
11   PC           pc;                   /* actual preconditioner used on each processor */
12   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of ((PetscObject)pc)->comm */
13   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
14   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
15   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
16   PetscTruth   useparallelmat;
17   PetscSubcomm psubcomm;
18   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
19 } PC_Redundant;
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "PCView_Redundant"
23 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
24 {
25   PC_Redundant   *red = (PC_Redundant*)pc->data;
26   PetscErrorCode ierr;
27   PetscMPIInt    rank;
28   PetscTruth     iascii,isstring;
29   PetscViewer    sviewer,subviewer;
30   PetscInt       color = red->psubcomm->color;
31 
32   PetscFunctionBegin;
33   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
34   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
35   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
36   if (iascii) {
37     ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
38     ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
39     if (!color) { /* only view first redundant pc */
40       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
41       ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
42       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
43     }
44     ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
45   } else if (isstring) { /* not test it yet! */
46     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
47     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
48     if (!rank) {
49       ierr = KSPView(red->ksp,sviewer);CHKERRQ(ierr);
50     }
51     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #include "include/private/matimpl.h"        /*I "petscmat.h" I*/
59 #undef __FUNCT__
60 #define __FUNCT__ "PCSetUp_Redundant"
61 static PetscErrorCode PCSetUp_Redundant(PC pc)
62 {
63   PC_Redundant   *red = (PC_Redundant*)pc->data;
64   PetscErrorCode ierr;
65   PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
66   PetscMPIInt    size;
67   MatReuse       reuse = MAT_INITIAL_MATRIX;
68   MatStructure   str = DIFFERENT_NONZERO_PATTERN;
69   MPI_Comm       comm = ((PetscObject)pc)->comm,subcomm;
70   Vec            vec;
71   PetscMPIInt    subsize,subrank;
72   const char     *prefix;
73   KSP            subksp;
74 
75   PetscFunctionBegin;
76   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
77   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
78 
79   if (!pc->setupcalled) {
80     ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr);
81     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
82 
83     /* create a new PC that processors in each subcomm have copy of */
84     subcomm = red->psubcomm->comm;
85     ierr = KSPCreate(subcomm,&subksp);CHKERRQ(ierr);
86     ierr = PetscLogObjectParent(pc,subksp);CHKERRQ(ierr);
87     ierr = KSPSetType(subksp,KSPPREONLY);CHKERRQ(ierr);
88     ierr = KSPGetPC(subksp,&red->pc);CHKERRQ(ierr);
89     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
90 
91     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
92     ierr = KSPSetOptionsPrefix(subksp,prefix);CHKERRQ(ierr);
93     ierr = KSPAppendOptionsPrefix(subksp,"redundant_");CHKERRQ(ierr);
94     red->ksp = subksp;
95 
96     /* create working vectors xsub/ysub and xdup/ydup */
97     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
98     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
99 
100     /* get local size of xsub/ysub */
101     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
102     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
103     rstart_sub = pc->pmat->rmap.range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
104     if (subrank+1 < subsize){
105       rend_sub = pc->pmat->rmap.range[red->psubcomm->n*(subrank+1)];
106     } else {
107       rend_sub = m;
108     }
109     mloc_sub = rend_sub - rstart_sub;
110     ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
111     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
112     ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);
113 
114     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
115        Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */
116     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
117     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
118 
119     /* create vec scatters */
120     if (!red->scatterin){
121       IS       is1,is2;
122       PetscInt *idx1,*idx2,i,j,k;
123 
124       ierr = PetscMalloc(2*red->psubcomm->n*mlocal*sizeof(PetscInt),&idx1);CHKERRQ(ierr);
125       idx2 = idx1 + red->psubcomm->n*mlocal;
126       j = 0;
127       for (k=0; k<red->psubcomm->n; k++){
128         for (i=mstart; i<mend; i++){
129           idx1[j]   = i;
130           idx2[j++] = i + m*k;
131         }
132       }
133       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,&is1);CHKERRQ(ierr);
134       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,&is2);CHKERRQ(ierr);
135       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
136       ierr = ISDestroy(is1);CHKERRQ(ierr);
137       ierr = ISDestroy(is2);CHKERRQ(ierr);
138 
139       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
140       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
141       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
142       ierr = ISDestroy(is1);CHKERRQ(ierr);
143       ierr = ISDestroy(is2);CHKERRQ(ierr);
144       ierr = PetscFree(idx1);CHKERRQ(ierr);
145     }
146   }
147   ierr = VecDestroy(vec);CHKERRQ(ierr);
148 
149   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
150   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
151   if (size == 1) {
152     red->useparallelmat = PETSC_FALSE;
153   }
154 
155   if (red->useparallelmat) {
156     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
157       /* destroy old matrices */
158       if (red->pmats) {
159         ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
160       }
161     } else if (pc->setupcalled == 1) {
162       reuse = MAT_REUSE_MATRIX;
163       str   = SAME_NONZERO_PATTERN;
164     }
165 
166     /* grab the parallel matrix and put it into processors of a subcomminicator */
167     /*--------------------------------------------------------------------------*/
168     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
169     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
170     /* tell PC of the subcommunicator its operators */
171     ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr);
172   } else {
173     ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
174   }
175   if (pc->setfromoptionscalled){
176     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
177   }
178   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
179   PetscFunctionReturn(0);
180 }
181 
182 #undef __FUNCT__
183 #define __FUNCT__ "PCApply_Redundant"
184 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
185 {
186   PC_Redundant   *red = (PC_Redundant*)pc->data;
187   PetscErrorCode ierr;
188   PetscScalar    *array;
189 
190   PetscFunctionBegin;
191   /* scatter x to xdup */
192   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
193   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
194 
195   /* place xdup's local array into xsub */
196   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
197   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
198 
199   /* apply preconditioner on each processor */
200   ierr = PCApply(red->pc,red->xsub,red->ysub);CHKERRQ(ierr);
201   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
202   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
203 
204   /* place ysub's local array into ydup */
205   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
206   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
207 
208   /* scatter ydup to y */
209   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
210   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
211   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
212   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
213   PetscFunctionReturn(0);
214 }
215 
216 #undef __FUNCT__
217 #define __FUNCT__ "PCDestroy_Redundant"
218 static PetscErrorCode PCDestroy_Redundant(PC pc)
219 {
220   PC_Redundant   *red = (PC_Redundant*)pc->data;
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   if (red->scatterin)  {ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr);}
225   if (red->scatterout) {ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr);}
226   if (red->ysub)       {ierr = VecDestroy(red->ysub);CHKERRQ(ierr);}
227   if (red->xsub)       {ierr = VecDestroy(red->xsub);CHKERRQ(ierr);}
228   if (red->xdup)       {ierr = VecDestroy(red->xdup);CHKERRQ(ierr);}
229   if (red->ydup)       {ierr = VecDestroy(red->ydup);CHKERRQ(ierr);}
230   if (red->pmats) {
231     ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
232   }
233   if (red->psubcomm) {ierr = PetscSubcommDestroy(red->psubcomm);CHKERRQ(ierr);}
234   if (red->ksp) {ierr = KSPDestroy(red->ksp);CHKERRQ(ierr);}
235   ierr = PetscFree(red);CHKERRQ(ierr);
236   PetscFunctionReturn(0);
237 }
238 
239 #undef __FUNCT__
240 #define __FUNCT__ "PCSetFromOptions_Redundant"
241 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
242 {
243   PetscErrorCode ierr;
244   PC_Redundant   *red = (PC_Redundant*)pc->data;
245 
246   PetscFunctionBegin;
247   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
248   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
249   ierr = PetscOptionsTail();CHKERRQ(ierr);
250   PetscFunctionReturn(0);
251 }
252 
253 EXTERN_C_BEGIN
254 #undef __FUNCT__
255 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
256 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
257 {
258   PC_Redundant *red = (PC_Redundant*)pc->data;
259 
260   PetscFunctionBegin;
261   red->nsubcomm = nreds;
262   PetscFunctionReturn(0);
263 }
264 EXTERN_C_END
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "PCRedundantSetNumber"
268 /*@
269    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
270 
271    Collective on PC
272 
273    Input Parameters:
274 +  pc - the preconditioner context
275 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
276                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
277 
278    Level: advanced
279 
280 .keywords: PC, redundant solve
281 @*/
282 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber(PC pc,PetscInt nredundant)
283 {
284   PetscErrorCode ierr,(*f)(PC,PetscInt);
285 
286   PetscFunctionBegin;
287   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
288   if (nredundant <= 0) SETERRQ1(PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
289   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetNumber_C",(void (**)(void))&f);CHKERRQ(ierr);
290   if (f) {
291     ierr = (*f)(pc,nredundant);CHKERRQ(ierr);
292   }
293   PetscFunctionReturn(0);
294 }
295 
296 EXTERN_C_BEGIN
297 #undef __FUNCT__
298 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
299 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
300 {
301   PC_Redundant   *red = (PC_Redundant*)pc->data;
302   PetscErrorCode ierr;
303 
304   PetscFunctionBegin;
305   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
306   if (red->scatterin) { ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr); }
307   red->scatterin  = in;
308   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
309   if (red->scatterout) { ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr); }
310   red->scatterout = out;
311   PetscFunctionReturn(0);
312 }
313 EXTERN_C_END
314 
315 #undef __FUNCT__
316 #define __FUNCT__ "PCRedundantSetScatter"
317 /*@
318    PCRedundantSetScatter - Sets the scatter used to copy values into the
319      redundant local solve and the scatter to move them back into the global
320      vector.
321 
322    Collective on PC
323 
324    Input Parameters:
325 +  pc - the preconditioner context
326 .  in - the scatter to move the values in
327 -  out - the scatter to move them out
328 
329    Level: advanced
330 
331 .keywords: PC, redundant solve
332 @*/
333 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
334 {
335   PetscErrorCode ierr,(*f)(PC,VecScatter,VecScatter);
336 
337   PetscFunctionBegin;
338   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
339   PetscValidHeaderSpecific(in,VEC_SCATTER_COOKIE,2);
340   PetscValidHeaderSpecific(out,VEC_SCATTER_COOKIE,3);
341   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetScatter_C",(void (**)(void))&f);CHKERRQ(ierr);
342   if (f) {
343     ierr = (*f)(pc,in,out);CHKERRQ(ierr);
344   }
345   PetscFunctionReturn(0);
346 }
347 
348 EXTERN_C_BEGIN
349 #undef __FUNCT__
350 #define __FUNCT__ "PCRedundantGetPC_Redundant"
351 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC_Redundant(PC pc,PC *innerpc)
352 {
353   PC_Redundant *red = (PC_Redundant*)pc->data;
354 
355   PetscFunctionBegin;
356   *innerpc = red->pc;
357   PetscFunctionReturn(0);
358 }
359 EXTERN_C_END
360 
361 #undef __FUNCT__
362 #define __FUNCT__ "PCRedundantGetPC"
363 /*@
364    PCRedundantGetPC - Gets the sequential PC created by the redundant PC.
365 
366    Not Collective
367 
368    Input Parameter:
369 .  pc - the preconditioner context
370 
371    Output Parameter:
372 .  innerpc - the sequential PC
373 
374    Level: advanced
375 
376 .keywords: PC, redundant solve
377 @*/
378 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC(PC pc,PC *innerpc)
379 {
380   PetscErrorCode ierr,(*f)(PC,PC*);
381 
382   PetscFunctionBegin;
383   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
384   PetscValidPointer(innerpc,2);
385   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetPC_C",(void (**)(void))&f);CHKERRQ(ierr);
386   if (f) {
387     ierr = (*f)(pc,innerpc);CHKERRQ(ierr);
388   }
389   PetscFunctionReturn(0);
390 }
391 
392 EXTERN_C_BEGIN
393 #undef __FUNCT__
394 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
395 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
396 {
397   PC_Redundant *red = (PC_Redundant*)pc->data;
398 
399   PetscFunctionBegin;
400   if (mat)  *mat  = red->pmats;
401   if (pmat) *pmat = red->pmats;
402   PetscFunctionReturn(0);
403 }
404 EXTERN_C_END
405 
406 #undef __FUNCT__
407 #define __FUNCT__ "PCRedundantGetOperators"
408 /*@
409    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
410 
411    Not Collective
412 
413    Input Parameter:
414 .  pc - the preconditioner context
415 
416    Output Parameters:
417 +  mat - the matrix
418 -  pmat - the (possibly different) preconditioner matrix
419 
420    Level: advanced
421 
422 .keywords: PC, redundant solve
423 @*/
424 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
425 {
426   PetscErrorCode ierr,(*f)(PC,Mat*,Mat*);
427 
428   PetscFunctionBegin;
429   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
430   if (mat)  PetscValidPointer(mat,2);
431   if (pmat) PetscValidPointer(pmat,3);
432   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetOperators_C",(void (**)(void))&f);CHKERRQ(ierr);
433   if (f) {
434     ierr = (*f)(pc,mat,pmat);CHKERRQ(ierr);
435   }
436   PetscFunctionReturn(0);
437 }
438 
439 /* -------------------------------------------------------------------------------------*/
440 /*MC
441      PCREDUNDANT - Runs a preconditioner for the entire problem on subgroups of processors
442 
443      Options for the redundant preconditioners can be set with -redundant_pc_xxx
444 
445   Options Database:
446 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
447                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
448 
449    Level: intermediate
450 
451 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
452            PCRedundantGetPC(), PCRedundantGetOperators(), PCRedundantSetNumber()
453 M*/
454 
455 EXTERN_C_BEGIN
456 #undef __FUNCT__
457 #define __FUNCT__ "PCCreate_Redundant"
458 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_Redundant(PC pc)
459 {
460   PetscErrorCode ierr;
461   PC_Redundant   *red;
462   PetscMPIInt    size;
463 
464   PetscFunctionBegin;
465   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
466   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
467   red->nsubcomm       = size;
468   red->useparallelmat = PETSC_TRUE;
469   pc->data            = (void*)red;
470 
471   pc->ops->apply           = PCApply_Redundant;
472   pc->ops->applytranspose  = 0;
473   pc->ops->setup           = PCSetUp_Redundant;
474   pc->ops->destroy         = PCDestroy_Redundant;
475   pc->ops->setfromoptions  = PCSetFromOptions_Redundant;
476   pc->ops->view            = PCView_Redundant;
477   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
478                     PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
479   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant",
480                     PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
481   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetPC_C","PCRedundantGetPC_Redundant",
482                     PCRedundantGetPC_Redundant);CHKERRQ(ierr);
483   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
484                     PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
485   PetscFunctionReturn(0);
486 }
487 EXTERN_C_END
488