xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision e0f5bfbec699682fa3e8b8532b1176849ea4e12a)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>
6 #include <petscksp.h> /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP                ksp;
10   PC                 pc;                    /* actual preconditioner used on each processor */
11   Vec                xsub, ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec                xdup, ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat                pmats;                 /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter         scatterin, scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool          useparallelmat;
16   PetscSubcomm       psubcomm;
17   PetscInt           nsubcomm; /* num of data structure PetscSubcomm */
18   PetscBool          shifttypeset;
19   MatFactorShiftType shifttype;
20 } PC_Redundant;
21 
22 PetscErrorCode PCFactorSetShiftType_Redundant(PC pc, MatFactorShiftType shifttype) {
23   PC_Redundant *red = (PC_Redundant *)pc->data;
24 
25   PetscFunctionBegin;
26   if (red->ksp) {
27     PC pc;
28     PetscCall(KSPGetPC(red->ksp, &pc));
29     PetscCall(PCFactorSetShiftType(pc, shifttype));
30   } else {
31     red->shifttypeset = PETSC_TRUE;
32     red->shifttype    = shifttype;
33   }
34   PetscFunctionReturn(0);
35 }
36 
37 static PetscErrorCode PCView_Redundant(PC pc, PetscViewer viewer) {
38   PC_Redundant *red = (PC_Redundant *)pc->data;
39   PetscBool     iascii, isstring;
40   PetscViewer   subviewer;
41 
42   PetscFunctionBegin;
43   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
44   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
45   if (iascii) {
46     if (!red->psubcomm) {
47       PetscCall(PetscViewerASCIIPrintf(viewer, "  Not yet setup\n"));
48     } else {
49       PetscCall(PetscViewerASCIIPrintf(viewer, "  First (color=0) of %" PetscInt_FMT " PCs follows\n", red->nsubcomm));
50       PetscCall(PetscViewerGetSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
51       if (!red->psubcomm->color) { /* only view first redundant pc */
52         PetscCall(PetscViewerASCIIPushTab(subviewer));
53         PetscCall(KSPView(red->ksp, subviewer));
54         PetscCall(PetscViewerASCIIPopTab(subviewer));
55       }
56       PetscCall(PetscViewerRestoreSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
57     }
58   } else if (isstring) {
59     PetscCall(PetscViewerStringSPrintf(viewer, " Redundant solver preconditioner"));
60   }
61   PetscFunctionReturn(0);
62 }
63 
64 #include <../src/mat/impls/aij/mpi/mpiaij.h>
65 static PetscErrorCode PCSetUp_Redundant(PC pc) {
66   PC_Redundant *red = (PC_Redundant *)pc->data;
67   PetscInt      mstart, mend, mlocal, M;
68   PetscMPIInt   size;
69   MPI_Comm      comm, subcomm;
70   Vec           x;
71 
72   PetscFunctionBegin;
73   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
74 
75   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
76   PetscCallMPI(MPI_Comm_size(comm, &size));
77   if (size == 1) red->useparallelmat = PETSC_FALSE;
78 
79   if (!pc->setupcalled) {
80     PetscInt mloc_sub;
81     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
82       KSP ksp;
83       PetscCall(PCRedundantGetKSP(pc, &ksp));
84     }
85     subcomm = PetscSubcommChild(red->psubcomm);
86 
87     if (red->useparallelmat) {
88       /* grab the parallel matrix and put it into processors of a subcomminicator */
89       PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, subcomm, MAT_INITIAL_MATRIX, &red->pmats));
90 
91       PetscCallMPI(MPI_Comm_size(subcomm, &size));
92       if (size > 1) {
93         PetscBool foundpack, issbaij;
94         PetscCall(PetscObjectTypeCompare((PetscObject)red->pmats, MATMPISBAIJ, &issbaij));
95         if (!issbaij) {
96           PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_LU, &foundpack));
97         } else {
98           PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_CHOLESKY, &foundpack));
99         }
100         if (!foundpack) { /* reset default ksp and pc */
101           PetscCall(KSPSetType(red->ksp, KSPGMRES));
102           PetscCall(PCSetType(red->pc, PCBJACOBI));
103         } else {
104           PetscCall(PCFactorSetMatSolverType(red->pc, NULL));
105         }
106       }
107 
108       PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
109 
110       /* get working vectors xsub and ysub */
111       PetscCall(MatCreateVecs(red->pmats, &red->xsub, &red->ysub));
112 
113       /* create working vectors xdup and ydup.
114        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
115        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
116        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
117       PetscCall(MatGetLocalSize(red->pmats, &mloc_sub, NULL));
118       PetscCall(VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm), mloc_sub, PETSC_DECIDE, &red->xdup));
119       PetscCall(VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm), 1, mloc_sub, PETSC_DECIDE, NULL, &red->ydup));
120 
121       /* create vecscatters */
122       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
123         IS        is1, is2;
124         PetscInt *idx1, *idx2, i, j, k;
125 
126         PetscCall(MatCreateVecs(pc->pmat, &x, NULL));
127         PetscCall(VecGetSize(x, &M));
128         PetscCall(VecGetOwnershipRange(x, &mstart, &mend));
129         mlocal = mend - mstart;
130         PetscCall(PetscMalloc2(red->psubcomm->n * mlocal, &idx1, red->psubcomm->n * mlocal, &idx2));
131         j = 0;
132         for (k = 0; k < red->psubcomm->n; k++) {
133           for (i = mstart; i < mend; i++) {
134             idx1[j]   = i;
135             idx2[j++] = i + M * k;
136           }
137         }
138         PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx1, PETSC_COPY_VALUES, &is1));
139         PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx2, PETSC_COPY_VALUES, &is2));
140         PetscCall(VecScatterCreate(x, is1, red->xdup, is2, &red->scatterin));
141         PetscCall(ISDestroy(&is1));
142         PetscCall(ISDestroy(&is2));
143 
144         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
145         PetscCall(ISCreateStride(comm, mlocal, mstart + red->psubcomm->color * M, 1, &is1));
146         PetscCall(ISCreateStride(comm, mlocal, mstart, 1, &is2));
147         PetscCall(VecScatterCreate(red->xdup, is1, x, is2, &red->scatterout));
148         PetscCall(ISDestroy(&is1));
149         PetscCall(ISDestroy(&is2));
150         PetscCall(PetscFree2(idx1, idx2));
151         PetscCall(VecDestroy(&x));
152       }
153     } else { /* !red->useparallelmat */
154       PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
155     }
156   } else { /* pc->setupcalled */
157     if (red->useparallelmat) {
158       MatReuse reuse;
159       /* grab the parallel matrix and put it into processors of a subcomminicator */
160       /*--------------------------------------------------------------------------*/
161       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
162         /* destroy old matrices */
163         PetscCall(MatDestroy(&red->pmats));
164         reuse = MAT_INITIAL_MATRIX;
165       } else {
166         reuse = MAT_REUSE_MATRIX;
167       }
168       PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, PetscSubcommChild(red->psubcomm), reuse, &red->pmats));
169       PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
170     } else { /* !red->useparallelmat */
171       PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
172     }
173   }
174 
175   if (pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(red->ksp));
176   PetscCall(KSPSetUp(red->ksp));
177   PetscFunctionReturn(0);
178 }
179 
180 static PetscErrorCode PCApply_Redundant(PC pc, Vec x, Vec y) {
181   PC_Redundant *red = (PC_Redundant *)pc->data;
182   PetscScalar  *array;
183 
184   PetscFunctionBegin;
185   if (!red->useparallelmat) {
186     PetscCall(KSPSolve(red->ksp, x, y));
187     PetscCall(KSPCheckSolve(red->ksp, pc, y));
188     PetscFunctionReturn(0);
189   }
190 
191   /* scatter x to xdup */
192   PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
193   PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
194 
195   /* place xdup's local array into xsub */
196   PetscCall(VecGetArray(red->xdup, &array));
197   PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
198 
199   /* apply preconditioner on each processor */
200   PetscCall(KSPSolve(red->ksp, red->xsub, red->ysub));
201   PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
202   PetscCall(VecResetArray(red->xsub));
203   PetscCall(VecRestoreArray(red->xdup, &array));
204 
205   /* place ysub's local array into ydup */
206   PetscCall(VecGetArray(red->ysub, &array));
207   PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
208 
209   /* scatter ydup to y */
210   PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
211   PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
212   PetscCall(VecResetArray(red->ydup));
213   PetscCall(VecRestoreArray(red->ysub, &array));
214   PetscFunctionReturn(0);
215 }
216 
217 static PetscErrorCode PCApplyTranspose_Redundant(PC pc, Vec x, Vec y) {
218   PC_Redundant *red = (PC_Redundant *)pc->data;
219   PetscScalar  *array;
220 
221   PetscFunctionBegin;
222   if (!red->useparallelmat) {
223     PetscCall(KSPSolveTranspose(red->ksp, x, y));
224     PetscCall(KSPCheckSolve(red->ksp, pc, y));
225     PetscFunctionReturn(0);
226   }
227 
228   /* scatter x to xdup */
229   PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
230   PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
231 
232   /* place xdup's local array into xsub */
233   PetscCall(VecGetArray(red->xdup, &array));
234   PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
235 
236   /* apply preconditioner on each processor */
237   PetscCall(KSPSolveTranspose(red->ksp, red->xsub, red->ysub));
238   PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
239   PetscCall(VecResetArray(red->xsub));
240   PetscCall(VecRestoreArray(red->xdup, &array));
241 
242   /* place ysub's local array into ydup */
243   PetscCall(VecGetArray(red->ysub, &array));
244   PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
245 
246   /* scatter ydup to y */
247   PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
248   PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
249   PetscCall(VecResetArray(red->ydup));
250   PetscCall(VecRestoreArray(red->ysub, &array));
251   PetscFunctionReturn(0);
252 }
253 
254 static PetscErrorCode PCReset_Redundant(PC pc) {
255   PC_Redundant *red = (PC_Redundant *)pc->data;
256 
257   PetscFunctionBegin;
258   if (red->useparallelmat) {
259     PetscCall(VecScatterDestroy(&red->scatterin));
260     PetscCall(VecScatterDestroy(&red->scatterout));
261     PetscCall(VecDestroy(&red->ysub));
262     PetscCall(VecDestroy(&red->xsub));
263     PetscCall(VecDestroy(&red->xdup));
264     PetscCall(VecDestroy(&red->ydup));
265   }
266   PetscCall(MatDestroy(&red->pmats));
267   PetscCall(KSPReset(red->ksp));
268   PetscFunctionReturn(0);
269 }
270 
271 static PetscErrorCode PCDestroy_Redundant(PC pc) {
272   PC_Redundant *red = (PC_Redundant *)pc->data;
273 
274   PetscFunctionBegin;
275   PetscCall(PCReset_Redundant(pc));
276   PetscCall(KSPDestroy(&red->ksp));
277   PetscCall(PetscSubcommDestroy(&red->psubcomm));
278   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", NULL));
279   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", NULL));
280   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", NULL));
281   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", NULL));
282   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", NULL));
283   PetscCall(PetscFree(pc->data));
284   PetscFunctionReturn(0);
285 }
286 
287 static PetscErrorCode PCSetFromOptions_Redundant(PC pc, PetscOptionItems *PetscOptionsObject) {
288   PC_Redundant *red = (PC_Redundant *)pc->data;
289 
290   PetscFunctionBegin;
291   PetscOptionsHeadBegin(PetscOptionsObject, "Redundant options");
292   PetscCall(PetscOptionsInt("-pc_redundant_number", "Number of redundant pc", "PCRedundantSetNumber", red->nsubcomm, &red->nsubcomm, NULL));
293   PetscOptionsHeadEnd();
294   PetscFunctionReturn(0);
295 }
296 
297 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc, PetscInt nreds) {
298   PC_Redundant *red = (PC_Redundant *)pc->data;
299 
300   PetscFunctionBegin;
301   red->nsubcomm = nreds;
302   PetscFunctionReturn(0);
303 }
304 
305 /*@
306    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
307 
308    Logically Collective on pc
309 
310    Input Parameters:
311 +  pc - the preconditioner context
312 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
313                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
314 
315    Level: advanced
316 
317 .seealso: `PCREDUNDANT`
318 @*/
319 PetscErrorCode PCRedundantSetNumber(PC pc, PetscInt nredundant) {
320   PetscFunctionBegin;
321   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
322   PetscCheck(nredundant > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "num of redundant pc %" PetscInt_FMT " must be positive", nredundant);
323   PetscTryMethod(pc, "PCRedundantSetNumber_C", (PC, PetscInt), (pc, nredundant));
324   PetscFunctionReturn(0);
325 }
326 
327 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc, VecScatter in, VecScatter out) {
328   PC_Redundant *red = (PC_Redundant *)pc->data;
329 
330   PetscFunctionBegin;
331   PetscCall(PetscObjectReference((PetscObject)in));
332   PetscCall(VecScatterDestroy(&red->scatterin));
333 
334   red->scatterin = in;
335 
336   PetscCall(PetscObjectReference((PetscObject)out));
337   PetscCall(VecScatterDestroy(&red->scatterout));
338   red->scatterout = out;
339   PetscFunctionReturn(0);
340 }
341 
342 /*@
343    PCRedundantSetScatter - Sets the scatter used to copy values into the
344      redundant local solve and the scatter to move them back into the global
345      vector.
346 
347    Logically Collective on pc
348 
349    Input Parameters:
350 +  pc - the preconditioner context
351 .  in - the scatter to move the values in
352 -  out - the scatter to move them out
353 
354    Level: advanced
355 
356 .seealso: `PCREDUNDANT`
357 @*/
358 PetscErrorCode PCRedundantSetScatter(PC pc, VecScatter in, VecScatter out) {
359   PetscFunctionBegin;
360   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
361   PetscValidHeaderSpecific(in, PETSCSF_CLASSID, 2);
362   PetscValidHeaderSpecific(out, PETSCSF_CLASSID, 3);
363   PetscTryMethod(pc, "PCRedundantSetScatter_C", (PC, VecScatter, VecScatter), (pc, in, out));
364   PetscFunctionReturn(0);
365 }
366 
367 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc, KSP *innerksp) {
368   PC_Redundant *red = (PC_Redundant *)pc->data;
369   MPI_Comm      comm, subcomm;
370   const char   *prefix;
371   PetscBool     issbaij;
372 
373   PetscFunctionBegin;
374   if (!red->psubcomm) {
375     PetscCall(PCGetOptionsPrefix(pc, &prefix));
376 
377     PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
378     PetscCall(PetscSubcommCreate(comm, &red->psubcomm));
379     PetscCall(PetscSubcommSetNumber(red->psubcomm, red->nsubcomm));
380     PetscCall(PetscSubcommSetType(red->psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
381 
382     PetscCall(PetscSubcommSetOptionsPrefix(red->psubcomm, prefix));
383     PetscCall(PetscSubcommSetFromOptions(red->psubcomm));
384 
385     /* create a new PC that processors in each subcomm have copy of */
386     subcomm = PetscSubcommChild(red->psubcomm);
387 
388     PetscCall(KSPCreate(subcomm, &red->ksp));
389     PetscCall(KSPSetErrorIfNotConverged(red->ksp, pc->erroriffailure));
390     PetscCall(PetscObjectIncrementTabLevel((PetscObject)red->ksp, (PetscObject)pc, 1));
391     PetscCall(KSPSetType(red->ksp, KSPPREONLY));
392     PetscCall(KSPGetPC(red->ksp, &red->pc));
393     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATSEQSBAIJ, &issbaij));
394     if (!issbaij) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATMPISBAIJ, &issbaij));
395     if (!issbaij) {
396       PetscCall(PCSetType(red->pc, PCLU));
397     } else {
398       PetscCall(PCSetType(red->pc, PCCHOLESKY));
399     }
400     if (red->shifttypeset) {
401       PetscCall(PCFactorSetShiftType(red->pc, red->shifttype));
402       red->shifttypeset = PETSC_FALSE;
403     }
404     PetscCall(KSPSetOptionsPrefix(red->ksp, prefix));
405     PetscCall(KSPAppendOptionsPrefix(red->ksp, "redundant_"));
406   }
407   *innerksp = red->ksp;
408   PetscFunctionReturn(0);
409 }
410 
411 /*@
412    PCRedundantGetKSP - Gets the less parallel `KSP` created by the redundant `PC`.
413 
414    Not Collective
415 
416    Input Parameter:
417 .  pc - the preconditioner context
418 
419    Output Parameter:
420 .  innerksp - the `KSP` on the smaller set of processes
421 
422    Level: advanced
423 
424 .seealso: `PCREDUNDANT`
425 @*/
426 PetscErrorCode PCRedundantGetKSP(PC pc, KSP *innerksp) {
427   PetscFunctionBegin;
428   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
429   PetscValidPointer(innerksp, 2);
430   PetscUseMethod(pc, "PCRedundantGetKSP_C", (PC, KSP *), (pc, innerksp));
431   PetscFunctionReturn(0);
432 }
433 
434 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc, Mat *mat, Mat *pmat) {
435   PC_Redundant *red = (PC_Redundant *)pc->data;
436 
437   PetscFunctionBegin;
438   if (mat) *mat = red->pmats;
439   if (pmat) *pmat = red->pmats;
440   PetscFunctionReturn(0);
441 }
442 
443 /*@
444    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
445 
446    Not Collective
447 
448    Input Parameter:
449 .  pc - the preconditioner context
450 
451    Output Parameters:
452 +  mat - the matrix
453 -  pmat - the (possibly different) preconditioner matrix
454 
455    Level: advanced
456 
457 .seealso: `PCREDUNDANT`
458 @*/
459 PetscErrorCode PCRedundantGetOperators(PC pc, Mat *mat, Mat *pmat) {
460   PetscFunctionBegin;
461   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
462   if (mat) PetscValidPointer(mat, 2);
463   if (pmat) PetscValidPointer(pmat, 3);
464   PetscUseMethod(pc, "PCRedundantGetOperators_C", (PC, Mat *, Mat *), (pc, mat, pmat));
465   PetscFunctionReturn(0);
466 }
467 
468 /*MC
469      PCREDUNDANT - Runs a `KSP` solver with preconditioner for the entire problem on subgroups of processors
470 
471   Options Database Key:
472 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
473                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
474 
475    Level: intermediate
476 
477    Notes:
478    Options for the redundant preconditioners can be set using the options database prefix -redundant_
479 
480    The default `KSP` is preonly and the default `PC` is `PCLU` or `PCCHOLESKY` if Pmat is of type `MATSBAIJ`.
481 
482    `PCFactorSetShiftType()` applied to this `PC` will convey they shift type into the inner `PC` if it is factorization based.
483 
484    Developer Note:
485    `PCSetInitialGuessNonzero()` is not used by this class but likely should be.
486 
487 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PCRedundantSetScatter()`,
488           `PCRedundantGetKSP()`, `PCRedundantGetOperators()`, `PCRedundantSetNumber()`, `PCREDISTRIBUTE`
489 M*/
490 
491 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc) {
492   PC_Redundant *red;
493   PetscMPIInt   size;
494 
495   PetscFunctionBegin;
496   PetscCall(PetscNew(&red));
497   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
498 
499   red->nsubcomm       = size;
500   red->useparallelmat = PETSC_TRUE;
501   pc->data            = (void *)red;
502 
503   pc->ops->apply          = PCApply_Redundant;
504   pc->ops->applytranspose = PCApplyTranspose_Redundant;
505   pc->ops->setup          = PCSetUp_Redundant;
506   pc->ops->destroy        = PCDestroy_Redundant;
507   pc->ops->reset          = PCReset_Redundant;
508   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
509   pc->ops->view           = PCView_Redundant;
510 
511   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", PCRedundantSetScatter_Redundant));
512   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", PCRedundantSetNumber_Redundant));
513   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", PCRedundantGetKSP_Redundant));
514   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", PCRedundantGetOperators_Redundant));
515   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", PCFactorSetShiftType_Redundant));
516   PetscFunctionReturn(0);
517 }
518