xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision d2522c19e8fa9bca20aaca277941d9a63e71db6a)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>
6 #include <petscksp.h> /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP                ksp;
10   PC                 pc;                    /* actual preconditioner used on each processor */
11   Vec                xsub, ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec                xdup, ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat                pmats;                 /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter         scatterin, scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool          useparallelmat;
16   PetscSubcomm       psubcomm;
17   PetscInt           nsubcomm; /* num of data structure PetscSubcomm */
18   PetscBool          shifttypeset;
19   MatFactorShiftType shifttype;
20 } PC_Redundant;
21 
22 PetscErrorCode PCFactorSetShiftType_Redundant(PC pc, MatFactorShiftType shifttype) {
23   PC_Redundant *red = (PC_Redundant *)pc->data;
24 
25   PetscFunctionBegin;
26   if (red->ksp) {
27     PC pc;
28     PetscCall(KSPGetPC(red->ksp, &pc));
29     PetscCall(PCFactorSetShiftType(pc, shifttype));
30   } else {
31     red->shifttypeset = PETSC_TRUE;
32     red->shifttype    = shifttype;
33   }
34   PetscFunctionReturn(0);
35 }
36 
37 static PetscErrorCode PCView_Redundant(PC pc, PetscViewer viewer) {
38   PC_Redundant *red = (PC_Redundant *)pc->data;
39   PetscBool     iascii, isstring;
40   PetscViewer   subviewer;
41 
42   PetscFunctionBegin;
43   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
44   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
45   if (iascii) {
46     if (!red->psubcomm) {
47       PetscCall(PetscViewerASCIIPrintf(viewer, "  Not yet setup\n"));
48     } else {
49       PetscCall(PetscViewerASCIIPrintf(viewer, "  First (color=0) of %" PetscInt_FMT " PCs follows\n", red->nsubcomm));
50       PetscCall(PetscViewerGetSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
51       if (!red->psubcomm->color) { /* only view first redundant pc */
52         PetscCall(PetscViewerASCIIPushTab(subviewer));
53         PetscCall(KSPView(red->ksp, subviewer));
54         PetscCall(PetscViewerASCIIPopTab(subviewer));
55       }
56       PetscCall(PetscViewerRestoreSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
57     }
58   } else if (isstring) {
59     PetscCall(PetscViewerStringSPrintf(viewer, " Redundant solver preconditioner"));
60   }
61   PetscFunctionReturn(0);
62 }
63 
64 #include <../src/mat/impls/aij/mpi/mpiaij.h>
65 static PetscErrorCode PCSetUp_Redundant(PC pc) {
66   PC_Redundant *red = (PC_Redundant *)pc->data;
67   PetscInt      mstart, mend, mlocal, M;
68   PetscMPIInt   size;
69   MPI_Comm      comm, subcomm;
70   Vec           x;
71 
72   PetscFunctionBegin;
73   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
74 
75   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
76   PetscCallMPI(MPI_Comm_size(comm, &size));
77   if (size == 1) red->useparallelmat = PETSC_FALSE;
78 
79   if (!pc->setupcalled) {
80     PetscInt mloc_sub;
81     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
82       KSP ksp;
83       PetscCall(PCRedundantGetKSP(pc, &ksp));
84     }
85     subcomm = PetscSubcommChild(red->psubcomm);
86 
87     if (red->useparallelmat) {
88       /* grab the parallel matrix and put it into processors of a subcomminicator */
89       PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, subcomm, MAT_INITIAL_MATRIX, &red->pmats));
90 
91       PetscCallMPI(MPI_Comm_size(subcomm, &size));
92       if (size > 1) {
93         PetscBool foundpack, issbaij;
94         PetscCall(PetscObjectTypeCompare((PetscObject)red->pmats, MATMPISBAIJ, &issbaij));
95         if (!issbaij) {
96           PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_LU, &foundpack));
97         } else {
98           PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_CHOLESKY, &foundpack));
99         }
100         if (!foundpack) { /* reset default ksp and pc */
101           PetscCall(KSPSetType(red->ksp, KSPGMRES));
102           PetscCall(PCSetType(red->pc, PCBJACOBI));
103         } else {
104           PetscCall(PCFactorSetMatSolverType(red->pc, NULL));
105         }
106       }
107 
108       PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
109 
110       /* get working vectors xsub and ysub */
111       PetscCall(MatCreateVecs(red->pmats, &red->xsub, &red->ysub));
112 
113       /* create working vectors xdup and ydup.
114        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
115        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
116        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
117       PetscCall(MatGetLocalSize(red->pmats, &mloc_sub, NULL));
118       PetscCall(VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm), mloc_sub, PETSC_DECIDE, &red->xdup));
119       PetscCall(VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm), 1, mloc_sub, PETSC_DECIDE, NULL, &red->ydup));
120 
121       /* create vecscatters */
122       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
123         IS        is1, is2;
124         PetscInt *idx1, *idx2, i, j, k;
125 
126         PetscCall(MatCreateVecs(pc->pmat, &x, NULL));
127         PetscCall(VecGetSize(x, &M));
128         PetscCall(VecGetOwnershipRange(x, &mstart, &mend));
129         mlocal = mend - mstart;
130         PetscCall(PetscMalloc2(red->psubcomm->n * mlocal, &idx1, red->psubcomm->n * mlocal, &idx2));
131         j = 0;
132         for (k = 0; k < red->psubcomm->n; k++) {
133           for (i = mstart; i < mend; i++) {
134             idx1[j]   = i;
135             idx2[j++] = i + M * k;
136           }
137         }
138         PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx1, PETSC_COPY_VALUES, &is1));
139         PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx2, PETSC_COPY_VALUES, &is2));
140         PetscCall(VecScatterCreate(x, is1, red->xdup, is2, &red->scatterin));
141         PetscCall(ISDestroy(&is1));
142         PetscCall(ISDestroy(&is2));
143 
144         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
145         PetscCall(ISCreateStride(comm, mlocal, mstart + red->psubcomm->color * M, 1, &is1));
146         PetscCall(ISCreateStride(comm, mlocal, mstart, 1, &is2));
147         PetscCall(VecScatterCreate(red->xdup, is1, x, is2, &red->scatterout));
148         PetscCall(ISDestroy(&is1));
149         PetscCall(ISDestroy(&is2));
150         PetscCall(PetscFree2(idx1, idx2));
151         PetscCall(VecDestroy(&x));
152       }
153     } else { /* !red->useparallelmat */
154       PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
155     }
156   } else { /* pc->setupcalled */
157     if (red->useparallelmat) {
158       MatReuse reuse;
159       /* grab the parallel matrix and put it into processors of a subcomminicator */
160       /*--------------------------------------------------------------------------*/
161       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
162         /* destroy old matrices */
163         PetscCall(MatDestroy(&red->pmats));
164         reuse = MAT_INITIAL_MATRIX;
165       } else {
166         reuse = MAT_REUSE_MATRIX;
167       }
168       PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, PetscSubcommChild(red->psubcomm), reuse, &red->pmats));
169       PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
170     } else { /* !red->useparallelmat */
171       PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
172     }
173   }
174 
175   if (pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(red->ksp));
176   PetscCall(KSPSetUp(red->ksp));
177   PetscFunctionReturn(0);
178 }
179 
180 static PetscErrorCode PCApply_Redundant(PC pc, Vec x, Vec y) {
181   PC_Redundant *red = (PC_Redundant *)pc->data;
182   PetscScalar  *array;
183 
184   PetscFunctionBegin;
185   if (!red->useparallelmat) {
186     PetscCall(KSPSolve(red->ksp, x, y));
187     PetscCall(KSPCheckSolve(red->ksp, pc, y));
188     PetscFunctionReturn(0);
189   }
190 
191   /* scatter x to xdup */
192   PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
193   PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
194 
195   /* place xdup's local array into xsub */
196   PetscCall(VecGetArray(red->xdup, &array));
197   PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
198 
199   /* apply preconditioner on each processor */
200   PetscCall(KSPSolve(red->ksp, red->xsub, red->ysub));
201   PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
202   PetscCall(VecResetArray(red->xsub));
203   PetscCall(VecRestoreArray(red->xdup, &array));
204 
205   /* place ysub's local array into ydup */
206   PetscCall(VecGetArray(red->ysub, &array));
207   PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
208 
209   /* scatter ydup to y */
210   PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
211   PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
212   PetscCall(VecResetArray(red->ydup));
213   PetscCall(VecRestoreArray(red->ysub, &array));
214   PetscFunctionReturn(0);
215 }
216 
217 static PetscErrorCode PCApplyTranspose_Redundant(PC pc, Vec x, Vec y) {
218   PC_Redundant *red = (PC_Redundant *)pc->data;
219   PetscScalar  *array;
220 
221   PetscFunctionBegin;
222   if (!red->useparallelmat) {
223     PetscCall(KSPSolveTranspose(red->ksp, x, y));
224     PetscCall(KSPCheckSolve(red->ksp, pc, y));
225     PetscFunctionReturn(0);
226   }
227 
228   /* scatter x to xdup */
229   PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
230   PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
231 
232   /* place xdup's local array into xsub */
233   PetscCall(VecGetArray(red->xdup, &array));
234   PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
235 
236   /* apply preconditioner on each processor */
237   PetscCall(KSPSolveTranspose(red->ksp, red->xsub, red->ysub));
238   PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
239   PetscCall(VecResetArray(red->xsub));
240   PetscCall(VecRestoreArray(red->xdup, &array));
241 
242   /* place ysub's local array into ydup */
243   PetscCall(VecGetArray(red->ysub, &array));
244   PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
245 
246   /* scatter ydup to y */
247   PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
248   PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
249   PetscCall(VecResetArray(red->ydup));
250   PetscCall(VecRestoreArray(red->ysub, &array));
251   PetscFunctionReturn(0);
252 }
253 
254 static PetscErrorCode PCReset_Redundant(PC pc) {
255   PC_Redundant *red = (PC_Redundant *)pc->data;
256 
257   PetscFunctionBegin;
258   if (red->useparallelmat) {
259     PetscCall(VecScatterDestroy(&red->scatterin));
260     PetscCall(VecScatterDestroy(&red->scatterout));
261     PetscCall(VecDestroy(&red->ysub));
262     PetscCall(VecDestroy(&red->xsub));
263     PetscCall(VecDestroy(&red->xdup));
264     PetscCall(VecDestroy(&red->ydup));
265   }
266   PetscCall(MatDestroy(&red->pmats));
267   PetscCall(KSPReset(red->ksp));
268   PetscFunctionReturn(0);
269 }
270 
271 static PetscErrorCode PCDestroy_Redundant(PC pc) {
272   PC_Redundant *red = (PC_Redundant *)pc->data;
273 
274   PetscFunctionBegin;
275   PetscCall(PCReset_Redundant(pc));
276   PetscCall(KSPDestroy(&red->ksp));
277   PetscCall(PetscSubcommDestroy(&red->psubcomm));
278   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", NULL));
279   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", NULL));
280   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", NULL));
281   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", NULL));
282   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", NULL));
283   PetscCall(PetscFree(pc->data));
284   PetscFunctionReturn(0);
285 }
286 
287 static PetscErrorCode PCSetFromOptions_Redundant(PC pc, PetscOptionItems *PetscOptionsObject) {
288   PC_Redundant *red = (PC_Redundant *)pc->data;
289 
290   PetscFunctionBegin;
291   PetscOptionsHeadBegin(PetscOptionsObject, "Redundant options");
292   PetscCall(PetscOptionsInt("-pc_redundant_number", "Number of redundant pc", "PCRedundantSetNumber", red->nsubcomm, &red->nsubcomm, NULL));
293   PetscOptionsHeadEnd();
294   PetscFunctionReturn(0);
295 }
296 
297 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc, PetscInt nreds) {
298   PC_Redundant *red = (PC_Redundant *)pc->data;
299 
300   PetscFunctionBegin;
301   red->nsubcomm = nreds;
302   PetscFunctionReturn(0);
303 }
304 
305 /*@
306    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
307 
308    Logically Collective on PC
309 
310    Input Parameters:
311 +  pc - the preconditioner context
312 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
313                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
314 
315    Level: advanced
316 
317 @*/
318 PetscErrorCode PCRedundantSetNumber(PC pc, PetscInt nredundant) {
319   PetscFunctionBegin;
320   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
321   PetscCheck(nredundant > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "num of redundant pc %" PetscInt_FMT " must be positive", nredundant);
322   PetscTryMethod(pc, "PCRedundantSetNumber_C", (PC, PetscInt), (pc, nredundant));
323   PetscFunctionReturn(0);
324 }
325 
326 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc, VecScatter in, VecScatter out) {
327   PC_Redundant *red = (PC_Redundant *)pc->data;
328 
329   PetscFunctionBegin;
330   PetscCall(PetscObjectReference((PetscObject)in));
331   PetscCall(VecScatterDestroy(&red->scatterin));
332 
333   red->scatterin = in;
334 
335   PetscCall(PetscObjectReference((PetscObject)out));
336   PetscCall(VecScatterDestroy(&red->scatterout));
337   red->scatterout = out;
338   PetscFunctionReturn(0);
339 }
340 
341 /*@
342    PCRedundantSetScatter - Sets the scatter used to copy values into the
343      redundant local solve and the scatter to move them back into the global
344      vector.
345 
346    Logically Collective on PC
347 
348    Input Parameters:
349 +  pc - the preconditioner context
350 .  in - the scatter to move the values in
351 -  out - the scatter to move them out
352 
353    Level: advanced
354 
355 @*/
356 PetscErrorCode PCRedundantSetScatter(PC pc, VecScatter in, VecScatter out) {
357   PetscFunctionBegin;
358   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
359   PetscValidHeaderSpecific(in, PETSCSF_CLASSID, 2);
360   PetscValidHeaderSpecific(out, PETSCSF_CLASSID, 3);
361   PetscTryMethod(pc, "PCRedundantSetScatter_C", (PC, VecScatter, VecScatter), (pc, in, out));
362   PetscFunctionReturn(0);
363 }
364 
365 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc, KSP *innerksp) {
366   PC_Redundant *red = (PC_Redundant *)pc->data;
367   MPI_Comm      comm, subcomm;
368   const char   *prefix;
369   PetscBool     issbaij;
370 
371   PetscFunctionBegin;
372   if (!red->psubcomm) {
373     PetscCall(PCGetOptionsPrefix(pc, &prefix));
374 
375     PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
376     PetscCall(PetscSubcommCreate(comm, &red->psubcomm));
377     PetscCall(PetscSubcommSetNumber(red->psubcomm, red->nsubcomm));
378     PetscCall(PetscSubcommSetType(red->psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
379 
380     PetscCall(PetscSubcommSetOptionsPrefix(red->psubcomm, prefix));
381     PetscCall(PetscSubcommSetFromOptions(red->psubcomm));
382     PetscCall(PetscLogObjectMemory((PetscObject)pc, sizeof(PetscSubcomm)));
383 
384     /* create a new PC that processors in each subcomm have copy of */
385     subcomm = PetscSubcommChild(red->psubcomm);
386 
387     PetscCall(KSPCreate(subcomm, &red->ksp));
388     PetscCall(KSPSetErrorIfNotConverged(red->ksp, pc->erroriffailure));
389     PetscCall(PetscObjectIncrementTabLevel((PetscObject)red->ksp, (PetscObject)pc, 1));
390     PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)red->ksp));
391     PetscCall(KSPSetType(red->ksp, KSPPREONLY));
392     PetscCall(KSPGetPC(red->ksp, &red->pc));
393     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATSEQSBAIJ, &issbaij));
394     if (!issbaij) { PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATMPISBAIJ, &issbaij)); }
395     if (!issbaij) {
396       PetscCall(PCSetType(red->pc, PCLU));
397     } else {
398       PetscCall(PCSetType(red->pc, PCCHOLESKY));
399     }
400     if (red->shifttypeset) {
401       PetscCall(PCFactorSetShiftType(red->pc, red->shifttype));
402       red->shifttypeset = PETSC_FALSE;
403     }
404     PetscCall(KSPSetOptionsPrefix(red->ksp, prefix));
405     PetscCall(KSPAppendOptionsPrefix(red->ksp, "redundant_"));
406   }
407   *innerksp = red->ksp;
408   PetscFunctionReturn(0);
409 }
410 
411 /*@
412    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
413 
414    Not Collective
415 
416    Input Parameter:
417 .  pc - the preconditioner context
418 
419    Output Parameter:
420 .  innerksp - the KSP on the smaller set of processes
421 
422    Level: advanced
423 
424 @*/
425 PetscErrorCode PCRedundantGetKSP(PC pc, KSP *innerksp) {
426   PetscFunctionBegin;
427   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
428   PetscValidPointer(innerksp, 2);
429   PetscUseMethod(pc, "PCRedundantGetKSP_C", (PC, KSP *), (pc, innerksp));
430   PetscFunctionReturn(0);
431 }
432 
433 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc, Mat *mat, Mat *pmat) {
434   PC_Redundant *red = (PC_Redundant *)pc->data;
435 
436   PetscFunctionBegin;
437   if (mat) *mat = red->pmats;
438   if (pmat) *pmat = red->pmats;
439   PetscFunctionReturn(0);
440 }
441 
442 /*@
443    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
444 
445    Not Collective
446 
447    Input Parameter:
448 .  pc - the preconditioner context
449 
450    Output Parameters:
451 +  mat - the matrix
452 -  pmat - the (possibly different) preconditioner matrix
453 
454    Level: advanced
455 
456 @*/
457 PetscErrorCode PCRedundantGetOperators(PC pc, Mat *mat, Mat *pmat) {
458   PetscFunctionBegin;
459   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
460   if (mat) PetscValidPointer(mat, 2);
461   if (pmat) PetscValidPointer(pmat, 3);
462   PetscUseMethod(pc, "PCRedundantGetOperators_C", (PC, Mat *, Mat *), (pc, mat, pmat));
463   PetscFunctionReturn(0);
464 }
465 
466 /* -------------------------------------------------------------------------------------*/
467 /*MC
468      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
469 
470      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
471 
472   Options Database:
473 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
474                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
475 
476    Level: intermediate
477 
478    Notes:
479     The default KSP is preonly and the default PC is LU or CHOLESKY if Pmat is of type MATSBAIJ.
480 
481    PCFactorSetShiftType() applied to this PC will convey they shift type into the inner PC if it is factorization based.
482 
483    Developer Notes:
484     Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
485 
486 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PCRedundantSetScatter()`,
487           `PCRedundantGetKSP()`, `PCRedundantGetOperators()`, `PCRedundantSetNumber()`
488 M*/
489 
490 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc) {
491   PC_Redundant *red;
492   PetscMPIInt   size;
493 
494   PetscFunctionBegin;
495   PetscCall(PetscNewLog(pc, &red));
496   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
497 
498   red->nsubcomm       = size;
499   red->useparallelmat = PETSC_TRUE;
500   pc->data            = (void *)red;
501 
502   pc->ops->apply          = PCApply_Redundant;
503   pc->ops->applytranspose = PCApplyTranspose_Redundant;
504   pc->ops->setup          = PCSetUp_Redundant;
505   pc->ops->destroy        = PCDestroy_Redundant;
506   pc->ops->reset          = PCReset_Redundant;
507   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
508   pc->ops->view           = PCView_Redundant;
509 
510   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", PCRedundantSetScatter_Redundant));
511   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", PCRedundantSetNumber_Redundant));
512   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", PCRedundantGetKSP_Redundant));
513   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", PCRedundantGetOperators_Redundant));
514   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", PCFactorSetShiftType_Redundant));
515   PetscFunctionReturn(0);
516 }
517