xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 0369a392ee39ea4d01e519e42e389892d4e1e2e4)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP                ksp;
10   PC                 pc;                   /* actual preconditioner used on each processor */
11   Vec                xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec                xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat                pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter         scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool          useparallelmat;
16   PetscSubcomm       psubcomm;
17   PetscInt           nsubcomm;             /* num of data structure PetscSubcomm */
18   PetscBool          shifttypeset;
19   MatFactorShiftType shifttype;
20 } PC_Redundant;
21 
22 PetscErrorCode  PCFactorSetShiftType_Redundant(PC pc,MatFactorShiftType shifttype)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25 
26   PetscFunctionBegin;
27   if (red->ksp) {
28     PC pc;
29     PetscCall(KSPGetPC(red->ksp,&pc));
30     PetscCall(PCFactorSetShiftType(pc,shifttype));
31   } else {
32     red->shifttypeset = PETSC_TRUE;
33     red->shifttype    = shifttype;
34   }
35   PetscFunctionReturn(0);
36 }
37 
38 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
39 {
40   PC_Redundant   *red = (PC_Redundant*)pc->data;
41   PetscBool      iascii,isstring;
42   PetscViewer    subviewer;
43 
44   PetscFunctionBegin;
45   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
46   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring));
47   if (iascii) {
48     if (!red->psubcomm) {
49       PetscCall(PetscViewerASCIIPrintf(viewer,"  Not yet setup\n"));
50     } else {
51       PetscCall(PetscViewerASCIIPrintf(viewer,"  First (color=0) of %D PCs follows\n",red->nsubcomm));
52       PetscCall(PetscViewerGetSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer));
53       if (!red->psubcomm->color) { /* only view first redundant pc */
54         PetscCall(PetscViewerASCIIPushTab(subviewer));
55         PetscCall(KSPView(red->ksp,subviewer));
56         PetscCall(PetscViewerASCIIPopTab(subviewer));
57       }
58       PetscCall(PetscViewerRestoreSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer));
59     }
60   } else if (isstring) {
61     PetscCall(PetscViewerStringSPrintf(viewer," Redundant solver preconditioner"));
62   }
63   PetscFunctionReturn(0);
64 }
65 
66 #include <../src/mat/impls/aij/mpi/mpiaij.h>
67 static PetscErrorCode PCSetUp_Redundant(PC pc)
68 {
69   PC_Redundant   *red = (PC_Redundant*)pc->data;
70   PetscInt       mstart,mend,mlocal,M;
71   PetscMPIInt    size;
72   MPI_Comm       comm,subcomm;
73   Vec            x;
74 
75   PetscFunctionBegin;
76   PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
77 
78   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
79   PetscCallMPI(MPI_Comm_size(comm,&size));
80   if (size == 1) red->useparallelmat = PETSC_FALSE;
81 
82   if (!pc->setupcalled) {
83     PetscInt mloc_sub;
84     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
85       KSP ksp;
86       PetscCall(PCRedundantGetKSP(pc,&ksp));
87     }
88     subcomm = PetscSubcommChild(red->psubcomm);
89 
90     if (red->useparallelmat) {
91       /* grab the parallel matrix and put it into processors of a subcomminicator */
92       PetscCall(MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats));
93 
94       PetscCallMPI(MPI_Comm_size(subcomm,&size));
95       if (size > 1) {
96         PetscBool foundpack,issbaij;
97         PetscCall(PetscObjectTypeCompare((PetscObject)red->pmats,MATMPISBAIJ,&issbaij));
98         if (!issbaij) {
99           PetscCall(MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_LU,&foundpack));
100         } else {
101           PetscCall(MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_CHOLESKY,&foundpack));
102         }
103         if (!foundpack) { /* reset default ksp and pc */
104           PetscCall(KSPSetType(red->ksp,KSPGMRES));
105           PetscCall(PCSetType(red->pc,PCBJACOBI));
106         } else {
107           PetscCall(PCFactorSetMatSolverType(red->pc,NULL));
108         }
109       }
110 
111       PetscCall(KSPSetOperators(red->ksp,red->pmats,red->pmats));
112 
113       /* get working vectors xsub and ysub */
114       PetscCall(MatCreateVecs(red->pmats,&red->xsub,&red->ysub));
115 
116       /* create working vectors xdup and ydup.
117        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
118        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
119        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
120       PetscCall(MatGetLocalSize(red->pmats,&mloc_sub,NULL));
121       PetscCall(VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup));
122       PetscCall(VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup));
123 
124       /* create vecscatters */
125       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
126         IS       is1,is2;
127         PetscInt *idx1,*idx2,i,j,k;
128 
129         PetscCall(MatCreateVecs(pc->pmat,&x,NULL));
130         PetscCall(VecGetSize(x,&M));
131         PetscCall(VecGetOwnershipRange(x,&mstart,&mend));
132         mlocal = mend - mstart;
133         PetscCall(PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2));
134         j    = 0;
135         for (k=0; k<red->psubcomm->n; k++) {
136           for (i=mstart; i<mend; i++) {
137             idx1[j]   = i;
138             idx2[j++] = i + M*k;
139           }
140         }
141         PetscCall(ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1));
142         PetscCall(ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2));
143         PetscCall(VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin));
144         PetscCall(ISDestroy(&is1));
145         PetscCall(ISDestroy(&is2));
146 
147         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
148         PetscCall(ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1));
149         PetscCall(ISCreateStride(comm,mlocal,mstart,1,&is2));
150         PetscCall(VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout));
151         PetscCall(ISDestroy(&is1));
152         PetscCall(ISDestroy(&is2));
153         PetscCall(PetscFree2(idx1,idx2));
154         PetscCall(VecDestroy(&x));
155       }
156     } else { /* !red->useparallelmat */
157       PetscCall(KSPSetOperators(red->ksp,pc->mat,pc->pmat));
158     }
159   } else { /* pc->setupcalled */
160     if (red->useparallelmat) {
161       MatReuse       reuse;
162       /* grab the parallel matrix and put it into processors of a subcomminicator */
163       /*--------------------------------------------------------------------------*/
164       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
165         /* destroy old matrices */
166         PetscCall(MatDestroy(&red->pmats));
167         reuse = MAT_INITIAL_MATRIX;
168       } else {
169         reuse = MAT_REUSE_MATRIX;
170       }
171       PetscCall(MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats));
172       PetscCall(KSPSetOperators(red->ksp,red->pmats,red->pmats));
173     } else { /* !red->useparallelmat */
174       PetscCall(KSPSetOperators(red->ksp,pc->mat,pc->pmat));
175     }
176   }
177 
178   if (pc->setfromoptionscalled) {
179     PetscCall(KSPSetFromOptions(red->ksp));
180   }
181   PetscCall(KSPSetUp(red->ksp));
182   PetscFunctionReturn(0);
183 }
184 
185 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
186 {
187   PC_Redundant   *red = (PC_Redundant*)pc->data;
188   PetscScalar    *array;
189 
190   PetscFunctionBegin;
191   if (!red->useparallelmat) {
192     PetscCall(KSPSolve(red->ksp,x,y));
193     PetscCall(KSPCheckSolve(red->ksp,pc,y));
194     PetscFunctionReturn(0);
195   }
196 
197   /* scatter x to xdup */
198   PetscCall(VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD));
199   PetscCall(VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD));
200 
201   /* place xdup's local array into xsub */
202   PetscCall(VecGetArray(red->xdup,&array));
203   PetscCall(VecPlaceArray(red->xsub,(const PetscScalar*)array));
204 
205   /* apply preconditioner on each processor */
206   PetscCall(KSPSolve(red->ksp,red->xsub,red->ysub));
207   PetscCall(KSPCheckSolve(red->ksp,pc,red->ysub));
208   PetscCall(VecResetArray(red->xsub));
209   PetscCall(VecRestoreArray(red->xdup,&array));
210 
211   /* place ysub's local array into ydup */
212   PetscCall(VecGetArray(red->ysub,&array));
213   PetscCall(VecPlaceArray(red->ydup,(const PetscScalar*)array));
214 
215   /* scatter ydup to y */
216   PetscCall(VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD));
217   PetscCall(VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD));
218   PetscCall(VecResetArray(red->ydup));
219   PetscCall(VecRestoreArray(red->ysub,&array));
220   PetscFunctionReturn(0);
221 }
222 
223 static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
224 {
225   PC_Redundant   *red = (PC_Redundant*)pc->data;
226   PetscScalar    *array;
227 
228   PetscFunctionBegin;
229   if (!red->useparallelmat) {
230     PetscCall(KSPSolveTranspose(red->ksp,x,y));
231     PetscCall(KSPCheckSolve(red->ksp,pc,y));
232     PetscFunctionReturn(0);
233   }
234 
235   /* scatter x to xdup */
236   PetscCall(VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD));
237   PetscCall(VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD));
238 
239   /* place xdup's local array into xsub */
240   PetscCall(VecGetArray(red->xdup,&array));
241   PetscCall(VecPlaceArray(red->xsub,(const PetscScalar*)array));
242 
243   /* apply preconditioner on each processor */
244   PetscCall(KSPSolveTranspose(red->ksp,red->xsub,red->ysub));
245   PetscCall(KSPCheckSolve(red->ksp,pc,red->ysub));
246   PetscCall(VecResetArray(red->xsub));
247   PetscCall(VecRestoreArray(red->xdup,&array));
248 
249   /* place ysub's local array into ydup */
250   PetscCall(VecGetArray(red->ysub,&array));
251   PetscCall(VecPlaceArray(red->ydup,(const PetscScalar*)array));
252 
253   /* scatter ydup to y */
254   PetscCall(VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD));
255   PetscCall(VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD));
256   PetscCall(VecResetArray(red->ydup));
257   PetscCall(VecRestoreArray(red->ysub,&array));
258   PetscFunctionReturn(0);
259 }
260 
261 static PetscErrorCode PCReset_Redundant(PC pc)
262 {
263   PC_Redundant   *red = (PC_Redundant*)pc->data;
264 
265   PetscFunctionBegin;
266   if (red->useparallelmat) {
267     PetscCall(VecScatterDestroy(&red->scatterin));
268     PetscCall(VecScatterDestroy(&red->scatterout));
269     PetscCall(VecDestroy(&red->ysub));
270     PetscCall(VecDestroy(&red->xsub));
271     PetscCall(VecDestroy(&red->xdup));
272     PetscCall(VecDestroy(&red->ydup));
273   }
274   PetscCall(MatDestroy(&red->pmats));
275   PetscCall(KSPReset(red->ksp));
276   PetscFunctionReturn(0);
277 }
278 
279 static PetscErrorCode PCDestroy_Redundant(PC pc)
280 {
281   PC_Redundant   *red = (PC_Redundant*)pc->data;
282 
283   PetscFunctionBegin;
284   PetscCall(PCReset_Redundant(pc));
285   PetscCall(KSPDestroy(&red->ksp));
286   PetscCall(PetscSubcommDestroy(&red->psubcomm));
287   PetscCall(PetscFree(pc->data));
288   PetscFunctionReturn(0);
289 }
290 
291 static PetscErrorCode PCSetFromOptions_Redundant(PetscOptionItems *PetscOptionsObject,PC pc)
292 {
293   PC_Redundant   *red = (PC_Redundant*)pc->data;
294 
295   PetscFunctionBegin;
296   PetscCall(PetscOptionsHead(PetscOptionsObject,"Redundant options"));
297   PetscCall(PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,NULL));
298   PetscCall(PetscOptionsTail());
299   PetscFunctionReturn(0);
300 }
301 
302 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
303 {
304   PC_Redundant *red = (PC_Redundant*)pc->data;
305 
306   PetscFunctionBegin;
307   red->nsubcomm = nreds;
308   PetscFunctionReturn(0);
309 }
310 
311 /*@
312    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
313 
314    Logically Collective on PC
315 
316    Input Parameters:
317 +  pc - the preconditioner context
318 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
319                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
320 
321    Level: advanced
322 
323 @*/
324 PetscErrorCode PCRedundantSetNumber(PC pc,PetscInt nredundant)
325 {
326   PetscFunctionBegin;
327   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
328   PetscCheckFalse(nredundant <= 0,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
329   PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));
330   PetscFunctionReturn(0);
331 }
332 
333 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
334 {
335   PC_Redundant   *red = (PC_Redundant*)pc->data;
336 
337   PetscFunctionBegin;
338   PetscCall(PetscObjectReference((PetscObject)in));
339   PetscCall(VecScatterDestroy(&red->scatterin));
340 
341   red->scatterin  = in;
342 
343   PetscCall(PetscObjectReference((PetscObject)out));
344   PetscCall(VecScatterDestroy(&red->scatterout));
345   red->scatterout = out;
346   PetscFunctionReturn(0);
347 }
348 
349 /*@
350    PCRedundantSetScatter - Sets the scatter used to copy values into the
351      redundant local solve and the scatter to move them back into the global
352      vector.
353 
354    Logically Collective on PC
355 
356    Input Parameters:
357 +  pc - the preconditioner context
358 .  in - the scatter to move the values in
359 -  out - the scatter to move them out
360 
361    Level: advanced
362 
363 @*/
364 PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
365 {
366   PetscFunctionBegin;
367   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
368   PetscValidHeaderSpecific(in,PETSCSF_CLASSID,2);
369   PetscValidHeaderSpecific(out,PETSCSF_CLASSID,3);
370   PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));
371   PetscFunctionReturn(0);
372 }
373 
374 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
375 {
376   PC_Redundant   *red = (PC_Redundant*)pc->data;
377   MPI_Comm       comm,subcomm;
378   const char     *prefix;
379   PetscBool      issbaij;
380 
381   PetscFunctionBegin;
382   if (!red->psubcomm) {
383     PetscCall(PCGetOptionsPrefix(pc,&prefix));
384 
385     PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
386     PetscCall(PetscSubcommCreate(comm,&red->psubcomm));
387     PetscCall(PetscSubcommSetNumber(red->psubcomm,red->nsubcomm));
388     PetscCall(PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS));
389 
390     PetscCall(PetscSubcommSetOptionsPrefix(red->psubcomm,prefix));
391     PetscCall(PetscSubcommSetFromOptions(red->psubcomm));
392     PetscCall(PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm)));
393 
394     /* create a new PC that processors in each subcomm have copy of */
395     subcomm = PetscSubcommChild(red->psubcomm);
396 
397     PetscCall(KSPCreate(subcomm,&red->ksp));
398     PetscCall(KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure));
399     PetscCall(PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1));
400     PetscCall(PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp));
401     PetscCall(KSPSetType(red->ksp,KSPPREONLY));
402     PetscCall(KSPGetPC(red->ksp,&red->pc));
403     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat,MATSEQSBAIJ,&issbaij));
404     if (!issbaij) {
405       PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat,MATMPISBAIJ,&issbaij));
406     }
407     if (!issbaij) {
408       PetscCall(PCSetType(red->pc,PCLU));
409     } else {
410       PetscCall(PCSetType(red->pc,PCCHOLESKY));
411     }
412     if (red->shifttypeset) {
413       PetscCall(PCFactorSetShiftType(red->pc,red->shifttype));
414       red->shifttypeset = PETSC_FALSE;
415     }
416     PetscCall(KSPSetOptionsPrefix(red->ksp,prefix));
417     PetscCall(KSPAppendOptionsPrefix(red->ksp,"redundant_"));
418   }
419   *innerksp = red->ksp;
420   PetscFunctionReturn(0);
421 }
422 
423 /*@
424    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
425 
426    Not Collective
427 
428    Input Parameter:
429 .  pc - the preconditioner context
430 
431    Output Parameter:
432 .  innerksp - the KSP on the smaller set of processes
433 
434    Level: advanced
435 
436 @*/
437 PetscErrorCode PCRedundantGetKSP(PC pc,KSP *innerksp)
438 {
439   PetscFunctionBegin;
440   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
441   PetscValidPointer(innerksp,2);
442   PetscUseMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));
443   PetscFunctionReturn(0);
444 }
445 
446 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
447 {
448   PC_Redundant *red = (PC_Redundant*)pc->data;
449 
450   PetscFunctionBegin;
451   if (mat)  *mat  = red->pmats;
452   if (pmat) *pmat = red->pmats;
453   PetscFunctionReturn(0);
454 }
455 
456 /*@
457    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
458 
459    Not Collective
460 
461    Input Parameter:
462 .  pc - the preconditioner context
463 
464    Output Parameters:
465 +  mat - the matrix
466 -  pmat - the (possibly different) preconditioner matrix
467 
468    Level: advanced
469 
470 @*/
471 PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
472 {
473   PetscFunctionBegin;
474   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
475   if (mat)  PetscValidPointer(mat,2);
476   if (pmat) PetscValidPointer(pmat,3);
477   PetscUseMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));
478   PetscFunctionReturn(0);
479 }
480 
481 /* -------------------------------------------------------------------------------------*/
482 /*MC
483      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
484 
485      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
486 
487   Options Database:
488 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
489                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
490 
491    Level: intermediate
492 
493    Notes:
494     The default KSP is preonly and the default PC is LU or CHOLESKY if Pmat is of type MATSBAIJ.
495 
496    PCFactorSetShiftType() applied to this PC will convey they shift type into the inner PC if it is factorization based.
497 
498    Developer Notes:
499     Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
500 
501 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
502            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
503 M*/
504 
505 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
506 {
507   PC_Redundant   *red;
508   PetscMPIInt    size;
509 
510   PetscFunctionBegin;
511   PetscCall(PetscNewLog(pc,&red));
512   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
513 
514   red->nsubcomm       = size;
515   red->useparallelmat = PETSC_TRUE;
516   pc->data            = (void*)red;
517 
518   pc->ops->apply          = PCApply_Redundant;
519   pc->ops->applytranspose = PCApplyTranspose_Redundant;
520   pc->ops->setup          = PCSetUp_Redundant;
521   pc->ops->destroy        = PCDestroy_Redundant;
522   pc->ops->reset          = PCReset_Redundant;
523   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
524   pc->ops->view           = PCView_Redundant;
525 
526   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant));
527   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant));
528   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant));
529   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant));
530   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCFactorSetShiftType_C",PCFactorSetShiftType_Redundant));
531   PetscFunctionReturn(0);
532 }
533