xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision cedd07cade5cbdfdad435c8172b7ec8972d9cd8d)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>
6 #include <petscksp.h> /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP                ksp;
10   PC                 pc;                    /* actual preconditioner used on each processor */
11   Vec                xsub, ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec                xdup, ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat                pmats;                 /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter         scatterin, scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool          useparallelmat;
16   PetscSubcomm       psubcomm;
17   PetscInt           nsubcomm; /* num of data structure PetscSubcomm */
18   PetscBool          shifttypeset;
19   MatFactorShiftType shifttype;
20 } PC_Redundant;
21 
22 PetscErrorCode PCFactorSetShiftType_Redundant(PC pc, MatFactorShiftType shifttype)
23 {
24   PC_Redundant *red = (PC_Redundant *)pc->data;
25 
26   PetscFunctionBegin;
27   if (red->ksp) {
28     PC pc;
29     PetscCall(KSPGetPC(red->ksp, &pc));
30     PetscCall(PCFactorSetShiftType(pc, shifttype));
31   } else {
32     red->shifttypeset = PETSC_TRUE;
33     red->shifttype    = shifttype;
34   }
35   PetscFunctionReturn(0);
36 }
37 
38 static PetscErrorCode PCView_Redundant(PC pc, PetscViewer viewer)
39 {
40   PC_Redundant *red = (PC_Redundant *)pc->data;
41   PetscBool     iascii, isstring;
42   PetscViewer   subviewer;
43 
44   PetscFunctionBegin;
45   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
46   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
47   if (iascii) {
48     if (!red->psubcomm) {
49       PetscCall(PetscViewerASCIIPrintf(viewer, "  Not yet setup\n"));
50     } else {
51       PetscCall(PetscViewerASCIIPrintf(viewer, "  First (color=0) of %" PetscInt_FMT " PCs follows\n", red->nsubcomm));
52       PetscCall(PetscViewerGetSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
53       if (!red->psubcomm->color) { /* only view first redundant pc */
54         PetscCall(PetscViewerASCIIPushTab(subviewer));
55         PetscCall(KSPView(red->ksp, subviewer));
56         PetscCall(PetscViewerASCIIPopTab(subviewer));
57       }
58       PetscCall(PetscViewerRestoreSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
59     }
60   } else if (isstring) {
61     PetscCall(PetscViewerStringSPrintf(viewer, " Redundant solver preconditioner"));
62   }
63   PetscFunctionReturn(0);
64 }
65 
66 #include <../src/mat/impls/aij/mpi/mpiaij.h>
67 static PetscErrorCode PCSetUp_Redundant(PC pc)
68 {
69   PC_Redundant *red = (PC_Redundant *)pc->data;
70   PetscInt      mstart, mend, mlocal, M;
71   PetscMPIInt   size;
72   MPI_Comm      comm, subcomm;
73   Vec           x;
74 
75   PetscFunctionBegin;
76   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
77 
78   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
79   PetscCallMPI(MPI_Comm_size(comm, &size));
80   if (size == 1) red->useparallelmat = PETSC_FALSE;
81 
82   if (!pc->setupcalled) {
83     PetscInt mloc_sub;
84     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
85       KSP ksp;
86       PetscCall(PCRedundantGetKSP(pc, &ksp));
87     }
88     subcomm = PetscSubcommChild(red->psubcomm);
89 
90     if (red->useparallelmat) {
91       /* grab the parallel matrix and put it into processors of a subcomminicator */
92       PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, subcomm, MAT_INITIAL_MATRIX, &red->pmats));
93 
94       PetscCallMPI(MPI_Comm_size(subcomm, &size));
95       if (size > 1) {
96         PetscBool foundpack, issbaij;
97         PetscCall(PetscObjectTypeCompare((PetscObject)red->pmats, MATMPISBAIJ, &issbaij));
98         if (!issbaij) {
99           PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_LU, &foundpack));
100         } else {
101           PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_CHOLESKY, &foundpack));
102         }
103         if (!foundpack) { /* reset default ksp and pc */
104           PetscCall(KSPSetType(red->ksp, KSPGMRES));
105           PetscCall(PCSetType(red->pc, PCBJACOBI));
106         } else {
107           PetscCall(PCFactorSetMatSolverType(red->pc, NULL));
108         }
109       }
110 
111       PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
112 
113       /* get working vectors xsub and ysub */
114       PetscCall(MatCreateVecs(red->pmats, &red->xsub, &red->ysub));
115 
116       /* create working vectors xdup and ydup.
117        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
118        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
119        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
120       PetscCall(MatGetLocalSize(red->pmats, &mloc_sub, NULL));
121       PetscCall(VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm), mloc_sub, PETSC_DECIDE, &red->xdup));
122       PetscCall(VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm), 1, mloc_sub, PETSC_DECIDE, NULL, &red->ydup));
123 
124       /* create vecscatters */
125       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
126         IS        is1, is2;
127         PetscInt *idx1, *idx2, i, j, k;
128 
129         PetscCall(MatCreateVecs(pc->pmat, &x, NULL));
130         PetscCall(VecGetSize(x, &M));
131         PetscCall(VecGetOwnershipRange(x, &mstart, &mend));
132         mlocal = mend - mstart;
133         PetscCall(PetscMalloc2(red->psubcomm->n * mlocal, &idx1, red->psubcomm->n * mlocal, &idx2));
134         j = 0;
135         for (k = 0; k < red->psubcomm->n; k++) {
136           for (i = mstart; i < mend; i++) {
137             idx1[j]   = i;
138             idx2[j++] = i + M * k;
139           }
140         }
141         PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx1, PETSC_COPY_VALUES, &is1));
142         PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx2, PETSC_COPY_VALUES, &is2));
143         PetscCall(VecScatterCreate(x, is1, red->xdup, is2, &red->scatterin));
144         PetscCall(ISDestroy(&is1));
145         PetscCall(ISDestroy(&is2));
146 
147         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
148         PetscCall(ISCreateStride(comm, mlocal, mstart + red->psubcomm->color * M, 1, &is1));
149         PetscCall(ISCreateStride(comm, mlocal, mstart, 1, &is2));
150         PetscCall(VecScatterCreate(red->xdup, is1, x, is2, &red->scatterout));
151         PetscCall(ISDestroy(&is1));
152         PetscCall(ISDestroy(&is2));
153         PetscCall(PetscFree2(idx1, idx2));
154         PetscCall(VecDestroy(&x));
155       }
156     } else { /* !red->useparallelmat */
157       PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
158     }
159   } else { /* pc->setupcalled */
160     if (red->useparallelmat) {
161       MatReuse reuse;
162       /* grab the parallel matrix and put it into processors of a subcomminicator */
163       /*--------------------------------------------------------------------------*/
164       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
165         /* destroy old matrices */
166         PetscCall(MatDestroy(&red->pmats));
167         reuse = MAT_INITIAL_MATRIX;
168       } else {
169         reuse = MAT_REUSE_MATRIX;
170       }
171       PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, PetscSubcommChild(red->psubcomm), reuse, &red->pmats));
172       PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
173     } else { /* !red->useparallelmat */
174       PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
175     }
176   }
177 
178   if (pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(red->ksp));
179   PetscCall(KSPSetUp(red->ksp));
180   PetscFunctionReturn(0);
181 }
182 
183 static PetscErrorCode PCApply_Redundant(PC pc, Vec x, Vec y)
184 {
185   PC_Redundant *red = (PC_Redundant *)pc->data;
186   PetscScalar  *array;
187 
188   PetscFunctionBegin;
189   if (!red->useparallelmat) {
190     PetscCall(KSPSolve(red->ksp, x, y));
191     PetscCall(KSPCheckSolve(red->ksp, pc, y));
192     PetscFunctionReturn(0);
193   }
194 
195   /* scatter x to xdup */
196   PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
197   PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
198 
199   /* place xdup's local array into xsub */
200   PetscCall(VecGetArray(red->xdup, &array));
201   PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
202 
203   /* apply preconditioner on each processor */
204   PetscCall(KSPSolve(red->ksp, red->xsub, red->ysub));
205   PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
206   PetscCall(VecResetArray(red->xsub));
207   PetscCall(VecRestoreArray(red->xdup, &array));
208 
209   /* place ysub's local array into ydup */
210   PetscCall(VecGetArray(red->ysub, &array));
211   PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
212 
213   /* scatter ydup to y */
214   PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
215   PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
216   PetscCall(VecResetArray(red->ydup));
217   PetscCall(VecRestoreArray(red->ysub, &array));
218   PetscFunctionReturn(0);
219 }
220 
221 static PetscErrorCode PCApplyTranspose_Redundant(PC pc, Vec x, Vec y)
222 {
223   PC_Redundant *red = (PC_Redundant *)pc->data;
224   PetscScalar  *array;
225 
226   PetscFunctionBegin;
227   if (!red->useparallelmat) {
228     PetscCall(KSPSolveTranspose(red->ksp, x, y));
229     PetscCall(KSPCheckSolve(red->ksp, pc, y));
230     PetscFunctionReturn(0);
231   }
232 
233   /* scatter x to xdup */
234   PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
235   PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
236 
237   /* place xdup's local array into xsub */
238   PetscCall(VecGetArray(red->xdup, &array));
239   PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
240 
241   /* apply preconditioner on each processor */
242   PetscCall(KSPSolveTranspose(red->ksp, red->xsub, red->ysub));
243   PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
244   PetscCall(VecResetArray(red->xsub));
245   PetscCall(VecRestoreArray(red->xdup, &array));
246 
247   /* place ysub's local array into ydup */
248   PetscCall(VecGetArray(red->ysub, &array));
249   PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
250 
251   /* scatter ydup to y */
252   PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
253   PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
254   PetscCall(VecResetArray(red->ydup));
255   PetscCall(VecRestoreArray(red->ysub, &array));
256   PetscFunctionReturn(0);
257 }
258 
259 static PetscErrorCode PCReset_Redundant(PC pc)
260 {
261   PC_Redundant *red = (PC_Redundant *)pc->data;
262 
263   PetscFunctionBegin;
264   if (red->useparallelmat) {
265     PetscCall(VecScatterDestroy(&red->scatterin));
266     PetscCall(VecScatterDestroy(&red->scatterout));
267     PetscCall(VecDestroy(&red->ysub));
268     PetscCall(VecDestroy(&red->xsub));
269     PetscCall(VecDestroy(&red->xdup));
270     PetscCall(VecDestroy(&red->ydup));
271   }
272   PetscCall(MatDestroy(&red->pmats));
273   PetscCall(KSPReset(red->ksp));
274   PetscFunctionReturn(0);
275 }
276 
277 static PetscErrorCode PCDestroy_Redundant(PC pc)
278 {
279   PC_Redundant *red = (PC_Redundant *)pc->data;
280 
281   PetscFunctionBegin;
282   PetscCall(PCReset_Redundant(pc));
283   PetscCall(KSPDestroy(&red->ksp));
284   PetscCall(PetscSubcommDestroy(&red->psubcomm));
285   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", NULL));
286   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", NULL));
287   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", NULL));
288   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", NULL));
289   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", NULL));
290   PetscCall(PetscFree(pc->data));
291   PetscFunctionReturn(0);
292 }
293 
294 static PetscErrorCode PCSetFromOptions_Redundant(PC pc, PetscOptionItems *PetscOptionsObject)
295 {
296   PC_Redundant *red = (PC_Redundant *)pc->data;
297 
298   PetscFunctionBegin;
299   PetscOptionsHeadBegin(PetscOptionsObject, "Redundant options");
300   PetscCall(PetscOptionsInt("-pc_redundant_number", "Number of redundant pc", "PCRedundantSetNumber", red->nsubcomm, &red->nsubcomm, NULL));
301   PetscOptionsHeadEnd();
302   PetscFunctionReturn(0);
303 }
304 
305 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc, PetscInt nreds)
306 {
307   PC_Redundant *red = (PC_Redundant *)pc->data;
308 
309   PetscFunctionBegin;
310   red->nsubcomm = nreds;
311   PetscFunctionReturn(0);
312 }
313 
314 /*@
315    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
316 
317    Logically Collective
318 
319    Input Parameters:
320 +  pc - the preconditioner context
321 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
322                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
323 
324    Level: advanced
325 
326 .seealso: `PCREDUNDANT`
327 @*/
328 PetscErrorCode PCRedundantSetNumber(PC pc, PetscInt nredundant)
329 {
330   PetscFunctionBegin;
331   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
332   PetscCheck(nredundant > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "num of redundant pc %" PetscInt_FMT " must be positive", nredundant);
333   PetscTryMethod(pc, "PCRedundantSetNumber_C", (PC, PetscInt), (pc, nredundant));
334   PetscFunctionReturn(0);
335 }
336 
337 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc, VecScatter in, VecScatter out)
338 {
339   PC_Redundant *red = (PC_Redundant *)pc->data;
340 
341   PetscFunctionBegin;
342   PetscCall(PetscObjectReference((PetscObject)in));
343   PetscCall(VecScatterDestroy(&red->scatterin));
344 
345   red->scatterin = in;
346 
347   PetscCall(PetscObjectReference((PetscObject)out));
348   PetscCall(VecScatterDestroy(&red->scatterout));
349   red->scatterout = out;
350   PetscFunctionReturn(0);
351 }
352 
353 /*@
354    PCRedundantSetScatter - Sets the scatter used to copy values into the
355      redundant local solve and the scatter to move them back into the global
356      vector.
357 
358    Logically Collective
359 
360    Input Parameters:
361 +  pc - the preconditioner context
362 .  in - the scatter to move the values in
363 -  out - the scatter to move them out
364 
365    Level: advanced
366 
367 .seealso: `PCREDUNDANT`
368 @*/
369 PetscErrorCode PCRedundantSetScatter(PC pc, VecScatter in, VecScatter out)
370 {
371   PetscFunctionBegin;
372   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
373   PetscValidHeaderSpecific(in, PETSCSF_CLASSID, 2);
374   PetscValidHeaderSpecific(out, PETSCSF_CLASSID, 3);
375   PetscTryMethod(pc, "PCRedundantSetScatter_C", (PC, VecScatter, VecScatter), (pc, in, out));
376   PetscFunctionReturn(0);
377 }
378 
379 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc, KSP *innerksp)
380 {
381   PC_Redundant *red = (PC_Redundant *)pc->data;
382   MPI_Comm      comm, subcomm;
383   const char   *prefix;
384   PetscBool     issbaij;
385 
386   PetscFunctionBegin;
387   if (!red->psubcomm) {
388     PetscCall(PCGetOptionsPrefix(pc, &prefix));
389 
390     PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
391     PetscCall(PetscSubcommCreate(comm, &red->psubcomm));
392     PetscCall(PetscSubcommSetNumber(red->psubcomm, red->nsubcomm));
393     PetscCall(PetscSubcommSetType(red->psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
394 
395     PetscCall(PetscSubcommSetOptionsPrefix(red->psubcomm, prefix));
396     PetscCall(PetscSubcommSetFromOptions(red->psubcomm));
397 
398     /* create a new PC that processors in each subcomm have copy of */
399     subcomm = PetscSubcommChild(red->psubcomm);
400 
401     PetscCall(KSPCreate(subcomm, &red->ksp));
402     PetscCall(KSPSetErrorIfNotConverged(red->ksp, pc->erroriffailure));
403     PetscCall(PetscObjectIncrementTabLevel((PetscObject)red->ksp, (PetscObject)pc, 1));
404     PetscCall(KSPSetType(red->ksp, KSPPREONLY));
405     PetscCall(KSPGetPC(red->ksp, &red->pc));
406     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATSEQSBAIJ, &issbaij));
407     if (!issbaij) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATMPISBAIJ, &issbaij));
408     if (!issbaij) {
409       PetscCall(PCSetType(red->pc, PCLU));
410     } else {
411       PetscCall(PCSetType(red->pc, PCCHOLESKY));
412     }
413     if (red->shifttypeset) {
414       PetscCall(PCFactorSetShiftType(red->pc, red->shifttype));
415       red->shifttypeset = PETSC_FALSE;
416     }
417     PetscCall(KSPSetOptionsPrefix(red->ksp, prefix));
418     PetscCall(KSPAppendOptionsPrefix(red->ksp, "redundant_"));
419   }
420   *innerksp = red->ksp;
421   PetscFunctionReturn(0);
422 }
423 
424 /*@
425    PCRedundantGetKSP - Gets the less parallel `KSP` created by the redundant `PC`.
426 
427    Not Collective
428 
429    Input Parameter:
430 .  pc - the preconditioner context
431 
432    Output Parameter:
433 .  innerksp - the `KSP` on the smaller set of processes
434 
435    Level: advanced
436 
437 .seealso: `PCREDUNDANT`
438 @*/
439 PetscErrorCode PCRedundantGetKSP(PC pc, KSP *innerksp)
440 {
441   PetscFunctionBegin;
442   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
443   PetscValidPointer(innerksp, 2);
444   PetscUseMethod(pc, "PCRedundantGetKSP_C", (PC, KSP *), (pc, innerksp));
445   PetscFunctionReturn(0);
446 }
447 
448 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc, Mat *mat, Mat *pmat)
449 {
450   PC_Redundant *red = (PC_Redundant *)pc->data;
451 
452   PetscFunctionBegin;
453   if (mat) *mat = red->pmats;
454   if (pmat) *pmat = red->pmats;
455   PetscFunctionReturn(0);
456 }
457 
458 /*@
459    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
460 
461    Not Collective
462 
463    Input Parameter:
464 .  pc - the preconditioner context
465 
466    Output Parameters:
467 +  mat - the matrix
468 -  pmat - the (possibly different) preconditioner matrix
469 
470    Level: advanced
471 
472 .seealso: `PCREDUNDANT`
473 @*/
474 PetscErrorCode PCRedundantGetOperators(PC pc, Mat *mat, Mat *pmat)
475 {
476   PetscFunctionBegin;
477   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
478   if (mat) PetscValidPointer(mat, 2);
479   if (pmat) PetscValidPointer(pmat, 3);
480   PetscUseMethod(pc, "PCRedundantGetOperators_C", (PC, Mat *, Mat *), (pc, mat, pmat));
481   PetscFunctionReturn(0);
482 }
483 
484 /*MC
485      PCREDUNDANT - Runs a `KSP` solver with preconditioner for the entire problem on subgroups of processors
486 
487   Options Database Key:
488 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
489                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
490 
491    Level: intermediate
492 
493    Notes:
494    Options for the redundant preconditioners can be set using the options database prefix -redundant_
495 
496    The default `KSP` is preonly and the default `PC` is `PCLU` or `PCCHOLESKY` if Pmat is of type `MATSBAIJ`.
497 
498    `PCFactorSetShiftType()` applied to this `PC` will convey they shift type into the inner `PC` if it is factorization based.
499 
500    Developer Note:
501    `PCSetInitialGuessNonzero()` is not used by this class but likely should be.
502 
503 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PCRedundantSetScatter()`,
504           `PCRedundantGetKSP()`, `PCRedundantGetOperators()`, `PCRedundantSetNumber()`, `PCREDISTRIBUTE`
505 M*/
506 
507 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
508 {
509   PC_Redundant *red;
510   PetscMPIInt   size;
511 
512   PetscFunctionBegin;
513   PetscCall(PetscNew(&red));
514   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
515 
516   red->nsubcomm       = size;
517   red->useparallelmat = PETSC_TRUE;
518   pc->data            = (void *)red;
519 
520   pc->ops->apply          = PCApply_Redundant;
521   pc->ops->applytranspose = PCApplyTranspose_Redundant;
522   pc->ops->setup          = PCSetUp_Redundant;
523   pc->ops->destroy        = PCDestroy_Redundant;
524   pc->ops->reset          = PCReset_Redundant;
525   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
526   pc->ops->view           = PCView_Redundant;
527 
528   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", PCRedundantSetScatter_Redundant));
529   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", PCRedundantSetNumber_Redundant));
530   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", PCRedundantGetKSP_Redundant));
531   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", PCRedundantGetOperators_Redundant));
532   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", PCFactorSetShiftType_Redundant));
533   PetscFunctionReturn(0);
534 }
535