xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision db9fc945a3c8a0a4b4e9c7cf33c611e450ed8efd)
1 /*
2   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
3 */
4 #include <petsc/private/pcimpl.h>
5 #include <petscksp.h> /*I "petscksp.h" I*/
6 
7 typedef struct {
8   KSP                ksp;
9   PC                 pc;                    /* actual preconditioner used on each processor */
10   Vec                xsub, ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
11   Vec                xdup, ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
12   Mat                pmats;                 /* matrix and optional preconditioner matrix belong to a subcommunicator */
13   VecScatter         scatterin, scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
14   PetscBool          useparallelmat;
15   PetscSubcomm       psubcomm;
16   PetscInt           nsubcomm; /* num of data structure PetscSubcomm */
17   PetscBool          shifttypeset;
18   MatFactorShiftType shifttype;
19 } PC_Redundant;
20 
21 PetscErrorCode PCFactorSetShiftType_Redundant(PC pc, MatFactorShiftType shifttype)
22 {
23   PC_Redundant *red = (PC_Redundant *)pc->data;
24 
25   PetscFunctionBegin;
26   if (red->ksp) {
27     PC pc;
28     PetscCall(KSPGetPC(red->ksp, &pc));
29     PetscCall(PCFactorSetShiftType(pc, shifttype));
30   } else {
31     red->shifttypeset = PETSC_TRUE;
32     red->shifttype    = shifttype;
33   }
34   PetscFunctionReturn(PETSC_SUCCESS);
35 }
36 
37 static PetscErrorCode PCView_Redundant(PC pc, PetscViewer viewer)
38 {
39   PC_Redundant *red = (PC_Redundant *)pc->data;
40   PetscBool     iascii, isstring;
41   PetscViewer   subviewer;
42 
43   PetscFunctionBegin;
44   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
45   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
46   if (iascii) {
47     if (!red->psubcomm) {
48       PetscCall(PetscViewerASCIIPrintf(viewer, "  Not yet setup\n"));
49     } else {
50       PetscCall(PetscViewerASCIIPrintf(viewer, "  First (color=0) of %" PetscInt_FMT " PCs follows\n", red->nsubcomm));
51       PetscCall(PetscViewerGetSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
52       if (!red->psubcomm->color) { /* only view first redundant pc */
53         PetscCall(PetscViewerASCIIPushTab(subviewer));
54         PetscCall(KSPView(red->ksp, subviewer));
55         PetscCall(PetscViewerASCIIPopTab(subviewer));
56       }
57       PetscCall(PetscViewerRestoreSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
58     }
59   } else if (isstring) {
60     PetscCall(PetscViewerStringSPrintf(viewer, " Redundant solver preconditioner"));
61   }
62   PetscFunctionReturn(PETSC_SUCCESS);
63 }
64 
65 #include <../src/mat/impls/aij/mpi/mpiaij.h>
66 static PetscErrorCode PCSetUp_Redundant(PC pc)
67 {
68   PC_Redundant *red = (PC_Redundant *)pc->data;
69   PetscInt      mstart, mend, mlocal, M;
70   PetscMPIInt   size;
71   MPI_Comm      comm, subcomm;
72   Vec           x;
73 
74   PetscFunctionBegin;
75   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
76 
77   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
78   PetscCallMPI(MPI_Comm_size(comm, &size));
79   if (size == 1) red->useparallelmat = PETSC_FALSE;
80 
81   if (!pc->setupcalled) {
82     PetscInt mloc_sub;
83     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
84       KSP ksp;
85       PetscCall(PCRedundantGetKSP(pc, &ksp));
86     }
87     subcomm = PetscSubcommChild(red->psubcomm);
88 
89     if (red->useparallelmat) {
90       /* grab the parallel matrix and put it into the processes of a subcommunicator */
91       PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, subcomm, MAT_INITIAL_MATRIX, &red->pmats));
92 
93       PetscCallMPI(MPI_Comm_size(subcomm, &size));
94       if (size > 1) {
95         PetscBool foundpack, issbaij;
96         PetscCall(PetscObjectTypeCompare((PetscObject)red->pmats, MATMPISBAIJ, &issbaij));
97         if (!issbaij) {
98           PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_LU, &foundpack));
99         } else {
100           PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_CHOLESKY, &foundpack));
101         }
102         if (!foundpack) { /* reset default ksp and pc */
103           PetscCall(KSPSetType(red->ksp, KSPGMRES));
104           PetscCall(PCSetType(red->pc, PCBJACOBI));
105         } else {
106           PetscCall(PCFactorSetMatSolverType(red->pc, NULL));
107         }
108       }
109 
110       PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
111 
112       /* get working vectors xsub and ysub */
113       PetscCall(MatCreateVecs(red->pmats, &red->xsub, &red->ysub));
114 
115       /* create working vectors xdup and ydup.
116        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
117        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
118        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
119       PetscCall(MatGetLocalSize(red->pmats, &mloc_sub, NULL));
120       PetscCall(VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm), mloc_sub, PETSC_DECIDE, &red->xdup));
121       PetscCall(VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm), 1, mloc_sub, PETSC_DECIDE, NULL, &red->ydup));
122 
123       /* create vecscatters */
124       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
125         IS        is1, is2;
126         PetscInt *idx1, *idx2, i, j, k;
127 
128         PetscCall(MatCreateVecs(pc->pmat, &x, NULL));
129         PetscCall(VecGetSize(x, &M));
130         PetscCall(VecGetOwnershipRange(x, &mstart, &mend));
131         mlocal = mend - mstart;
132         PetscCall(PetscMalloc2(red->psubcomm->n * mlocal, &idx1, red->psubcomm->n * mlocal, &idx2));
133         j = 0;
134         for (k = 0; k < red->psubcomm->n; k++) {
135           for (i = mstart; i < mend; i++) {
136             idx1[j]   = i;
137             idx2[j++] = i + M * k;
138           }
139         }
140         PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx1, PETSC_COPY_VALUES, &is1));
141         PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx2, PETSC_COPY_VALUES, &is2));
142         PetscCall(VecScatterCreate(x, is1, red->xdup, is2, &red->scatterin));
143         PetscCall(ISDestroy(&is1));
144         PetscCall(ISDestroy(&is2));
145 
146         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
147         PetscCall(ISCreateStride(comm, mlocal, mstart + red->psubcomm->color * M, 1, &is1));
148         PetscCall(ISCreateStride(comm, mlocal, mstart, 1, &is2));
149         PetscCall(VecScatterCreate(red->xdup, is1, x, is2, &red->scatterout));
150         PetscCall(ISDestroy(&is1));
151         PetscCall(ISDestroy(&is2));
152         PetscCall(PetscFree2(idx1, idx2));
153         PetscCall(VecDestroy(&x));
154       }
155     } else { /* !red->useparallelmat */
156       PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
157     }
158   } else { /* pc->setupcalled */
159     if (red->useparallelmat) {
160       MatReuse reuse;
161       /* grab the parallel matrix and put it into the processes of a subcommunicator */
162       /*--------------------------------------------------------------------------*/
163       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
164         /* destroy old matrices */
165         PetscCall(MatDestroy(&red->pmats));
166         reuse = MAT_INITIAL_MATRIX;
167       } else {
168         reuse = MAT_REUSE_MATRIX;
169       }
170       PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, PetscSubcommChild(red->psubcomm), reuse, &red->pmats));
171       PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
172     } else { /* !red->useparallelmat */
173       PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
174     }
175   }
176 
177   if (pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(red->ksp));
178   PetscCall(KSPSetUp(red->ksp));
179 
180   /* Detect failure */
181   KSPConvergedReason redreason;
182   PetscCall(KSPGetConvergedReason(red->ksp, &redreason));
183   if (redreason) pc->failedreason = PC_SUBPC_ERROR;
184   PetscFunctionReturn(PETSC_SUCCESS);
185 }
186 
187 static PetscErrorCode PCApply_Redundant(PC pc, Vec x, Vec y)
188 {
189   PC_Redundant *red = (PC_Redundant *)pc->data;
190   PetscScalar  *array;
191 
192   PetscFunctionBegin;
193   if (!red->useparallelmat) {
194     PetscCall(KSPSolve(red->ksp, x, y));
195     PetscCall(KSPCheckSolve(red->ksp, pc, y));
196     PetscFunctionReturn(PETSC_SUCCESS);
197   }
198 
199   /* scatter x to xdup */
200   PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
201   PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
202 
203   /* place xdup's local array into xsub */
204   PetscCall(VecGetArray(red->xdup, &array));
205   PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
206 
207   /* apply preconditioner on each processor */
208   PetscCall(KSPSolve(red->ksp, red->xsub, red->ysub));
209   PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
210   PetscCall(VecResetArray(red->xsub));
211   PetscCall(VecRestoreArray(red->xdup, &array));
212 
213   /* place ysub's local array into ydup */
214   PetscCall(VecGetArray(red->ysub, &array));
215   PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
216 
217   /* scatter ydup to y */
218   PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
219   PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
220   PetscCall(VecResetArray(red->ydup));
221   PetscCall(VecRestoreArray(red->ysub, &array));
222   PetscFunctionReturn(PETSC_SUCCESS);
223 }
224 
225 static PetscErrorCode PCApplyTranspose_Redundant(PC pc, Vec x, Vec y)
226 {
227   PC_Redundant *red = (PC_Redundant *)pc->data;
228   PetscScalar  *array;
229 
230   PetscFunctionBegin;
231   if (!red->useparallelmat) {
232     PetscCall(KSPSolveTranspose(red->ksp, x, y));
233     PetscCall(KSPCheckSolve(red->ksp, pc, y));
234     PetscFunctionReturn(PETSC_SUCCESS);
235   }
236 
237   /* scatter x to xdup */
238   PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
239   PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
240 
241   /* place xdup's local array into xsub */
242   PetscCall(VecGetArray(red->xdup, &array));
243   PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
244 
245   /* apply preconditioner on each processor */
246   PetscCall(KSPSolveTranspose(red->ksp, red->xsub, red->ysub));
247   PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
248   PetscCall(VecResetArray(red->xsub));
249   PetscCall(VecRestoreArray(red->xdup, &array));
250 
251   /* place ysub's local array into ydup */
252   PetscCall(VecGetArray(red->ysub, &array));
253   PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
254 
255   /* scatter ydup to y */
256   PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
257   PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
258   PetscCall(VecResetArray(red->ydup));
259   PetscCall(VecRestoreArray(red->ysub, &array));
260   PetscFunctionReturn(PETSC_SUCCESS);
261 }
262 
263 static PetscErrorCode PCReset_Redundant(PC pc)
264 {
265   PC_Redundant *red = (PC_Redundant *)pc->data;
266 
267   PetscFunctionBegin;
268   if (red->useparallelmat) {
269     PetscCall(VecScatterDestroy(&red->scatterin));
270     PetscCall(VecScatterDestroy(&red->scatterout));
271     PetscCall(VecDestroy(&red->ysub));
272     PetscCall(VecDestroy(&red->xsub));
273     PetscCall(VecDestroy(&red->xdup));
274     PetscCall(VecDestroy(&red->ydup));
275   }
276   PetscCall(MatDestroy(&red->pmats));
277   PetscCall(KSPReset(red->ksp));
278   PetscFunctionReturn(PETSC_SUCCESS);
279 }
280 
281 static PetscErrorCode PCDestroy_Redundant(PC pc)
282 {
283   PC_Redundant *red = (PC_Redundant *)pc->data;
284 
285   PetscFunctionBegin;
286   PetscCall(PCReset_Redundant(pc));
287   PetscCall(KSPDestroy(&red->ksp));
288   PetscCall(PetscSubcommDestroy(&red->psubcomm));
289   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", NULL));
290   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", NULL));
292   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", NULL));
293   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", NULL));
294   PetscCall(PetscFree(pc->data));
295   PetscFunctionReturn(PETSC_SUCCESS);
296 }
297 
298 static PetscErrorCode PCSetFromOptions_Redundant(PC pc, PetscOptionItems *PetscOptionsObject)
299 {
300   PC_Redundant *red = (PC_Redundant *)pc->data;
301 
302   PetscFunctionBegin;
303   PetscOptionsHeadBegin(PetscOptionsObject, "Redundant options");
304   PetscCall(PetscOptionsInt("-pc_redundant_number", "Number of redundant pc", "PCRedundantSetNumber", red->nsubcomm, &red->nsubcomm, NULL));
305   PetscOptionsHeadEnd();
306   PetscFunctionReturn(PETSC_SUCCESS);
307 }
308 
309 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc, PetscInt nreds)
310 {
311   PC_Redundant *red = (PC_Redundant *)pc->data;
312 
313   PetscFunctionBegin;
314   red->nsubcomm = nreds;
315   PetscFunctionReturn(PETSC_SUCCESS);
316 }
317 
318 /*@
319    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
320 
321    Logically Collective
322 
323    Input Parameters:
324 +  pc - the preconditioner context
325 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
326                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
327 
328    Level: advanced
329 
330 .seealso: `PCREDUNDANT`
331 @*/
332 PetscErrorCode PCRedundantSetNumber(PC pc, PetscInt nredundant)
333 {
334   PetscFunctionBegin;
335   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
336   PetscCheck(nredundant > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "num of redundant pc %" PetscInt_FMT " must be positive", nredundant);
337   PetscTryMethod(pc, "PCRedundantSetNumber_C", (PC, PetscInt), (pc, nredundant));
338   PetscFunctionReturn(PETSC_SUCCESS);
339 }
340 
341 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc, VecScatter in, VecScatter out)
342 {
343   PC_Redundant *red = (PC_Redundant *)pc->data;
344 
345   PetscFunctionBegin;
346   PetscCall(PetscObjectReference((PetscObject)in));
347   PetscCall(VecScatterDestroy(&red->scatterin));
348 
349   red->scatterin = in;
350 
351   PetscCall(PetscObjectReference((PetscObject)out));
352   PetscCall(VecScatterDestroy(&red->scatterout));
353   red->scatterout = out;
354   PetscFunctionReturn(PETSC_SUCCESS);
355 }
356 
357 /*@
358    PCRedundantSetScatter - Sets the scatter used to copy values into the
359      redundant local solve and the scatter to move them back into the global
360      vector.
361 
362    Logically Collective
363 
364    Input Parameters:
365 +  pc - the preconditioner context
366 .  in - the scatter to move the values in
367 -  out - the scatter to move them out
368 
369    Level: advanced
370 
371 .seealso: `PCREDUNDANT`
372 @*/
373 PetscErrorCode PCRedundantSetScatter(PC pc, VecScatter in, VecScatter out)
374 {
375   PetscFunctionBegin;
376   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
377   PetscValidHeaderSpecific(in, PETSCSF_CLASSID, 2);
378   PetscValidHeaderSpecific(out, PETSCSF_CLASSID, 3);
379   PetscTryMethod(pc, "PCRedundantSetScatter_C", (PC, VecScatter, VecScatter), (pc, in, out));
380   PetscFunctionReturn(PETSC_SUCCESS);
381 }
382 
383 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc, KSP *innerksp)
384 {
385   PC_Redundant *red = (PC_Redundant *)pc->data;
386   MPI_Comm      comm, subcomm;
387   const char   *prefix;
388   PetscBool     issbaij;
389 
390   PetscFunctionBegin;
391   if (!red->psubcomm) {
392     PetscCall(PCGetOptionsPrefix(pc, &prefix));
393 
394     PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
395     PetscCall(PetscSubcommCreate(comm, &red->psubcomm));
396     PetscCall(PetscSubcommSetNumber(red->psubcomm, red->nsubcomm));
397     PetscCall(PetscSubcommSetType(red->psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
398 
399     PetscCall(PetscSubcommSetOptionsPrefix(red->psubcomm, prefix));
400     PetscCall(PetscSubcommSetFromOptions(red->psubcomm));
401 
402     /* create a new PC that processors in each subcomm have copy of */
403     subcomm = PetscSubcommChild(red->psubcomm);
404 
405     PetscCall(KSPCreate(subcomm, &red->ksp));
406     PetscCall(KSPSetErrorIfNotConverged(red->ksp, pc->erroriffailure));
407     PetscCall(PetscObjectIncrementTabLevel((PetscObject)red->ksp, (PetscObject)pc, 1));
408     PetscCall(KSPSetType(red->ksp, KSPPREONLY));
409     PetscCall(KSPGetPC(red->ksp, &red->pc));
410     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATSEQSBAIJ, &issbaij));
411     if (!issbaij) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATMPISBAIJ, &issbaij));
412     if (!issbaij) {
413       PetscCall(PCSetType(red->pc, PCLU));
414     } else {
415       PetscCall(PCSetType(red->pc, PCCHOLESKY));
416     }
417     if (red->shifttypeset) {
418       PetscCall(PCFactorSetShiftType(red->pc, red->shifttype));
419       red->shifttypeset = PETSC_FALSE;
420     }
421     PetscCall(KSPSetOptionsPrefix(red->ksp, prefix));
422     PetscCall(KSPAppendOptionsPrefix(red->ksp, "redundant_"));
423   }
424   *innerksp = red->ksp;
425   PetscFunctionReturn(PETSC_SUCCESS);
426 }
427 
428 /*@
429    PCRedundantGetKSP - Gets the less parallel `KSP` created by the redundant `PC`.
430 
431    Not Collective
432 
433    Input Parameter:
434 .  pc - the preconditioner context
435 
436    Output Parameter:
437 .  innerksp - the `KSP` on the smaller set of processes
438 
439    Level: advanced
440 
441 .seealso: `PCREDUNDANT`
442 @*/
443 PetscErrorCode PCRedundantGetKSP(PC pc, KSP *innerksp)
444 {
445   PetscFunctionBegin;
446   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
447   PetscValidPointer(innerksp, 2);
448   PetscUseMethod(pc, "PCRedundantGetKSP_C", (PC, KSP *), (pc, innerksp));
449   PetscFunctionReturn(PETSC_SUCCESS);
450 }
451 
452 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc, Mat *mat, Mat *pmat)
453 {
454   PC_Redundant *red = (PC_Redundant *)pc->data;
455 
456   PetscFunctionBegin;
457   if (mat) *mat = red->pmats;
458   if (pmat) *pmat = red->pmats;
459   PetscFunctionReturn(PETSC_SUCCESS);
460 }
461 
462 /*@
463    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
464 
465    Not Collective
466 
467    Input Parameter:
468 .  pc - the preconditioner context
469 
470    Output Parameters:
471 +  mat - the matrix
472 -  pmat - the (possibly different) preconditioner matrix
473 
474    Level: advanced
475 
476 .seealso: `PCREDUNDANT`
477 @*/
478 PetscErrorCode PCRedundantGetOperators(PC pc, Mat *mat, Mat *pmat)
479 {
480   PetscFunctionBegin;
481   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
482   if (mat) PetscValidPointer(mat, 2);
483   if (pmat) PetscValidPointer(pmat, 3);
484   PetscUseMethod(pc, "PCRedundantGetOperators_C", (PC, Mat *, Mat *), (pc, mat, pmat));
485   PetscFunctionReturn(PETSC_SUCCESS);
486 }
487 
488 /*MC
489      PCREDUNDANT - Runs a `KSP` solver with preconditioner for the entire problem on subgroups of processors
490 
491   Options Database Key:
492 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
493                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
494 
495    Level: intermediate
496 
497    Notes:
498    Options for the redundant preconditioners can be set using the options database prefix -redundant_
499 
500    The default `KSP` is preonly and the default `PC` is `PCLU` or `PCCHOLESKY` if Pmat is of type `MATSBAIJ`.
501 
502    `PCFactorSetShiftType()` applied to this `PC` will convey they shift type into the inner `PC` if it is factorization based.
503 
504    Developer Note:
505    `PCSetInitialGuessNonzero()` is not used by this class but likely should be.
506 
507 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PCRedundantSetScatter()`,
508           `PCRedundantGetKSP()`, `PCRedundantGetOperators()`, `PCRedundantSetNumber()`, `PCREDISTRIBUTE`
509 M*/
510 
511 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
512 {
513   PC_Redundant *red;
514   PetscMPIInt   size;
515 
516   PetscFunctionBegin;
517   PetscCall(PetscNew(&red));
518   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
519 
520   red->nsubcomm       = size;
521   red->useparallelmat = PETSC_TRUE;
522   pc->data            = (void *)red;
523 
524   pc->ops->apply          = PCApply_Redundant;
525   pc->ops->applytranspose = PCApplyTranspose_Redundant;
526   pc->ops->setup          = PCSetUp_Redundant;
527   pc->ops->destroy        = PCDestroy_Redundant;
528   pc->ops->reset          = PCReset_Redundant;
529   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
530   pc->ops->view           = PCView_Redundant;
531 
532   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", PCRedundantSetScatter_Redundant));
533   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", PCRedundantSetNumber_Redundant));
534   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", PCRedundantGetKSP_Redundant));
535   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", PCRedundantGetOperators_Redundant));
536   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", PCFactorSetShiftType_Redundant));
537   PetscFunctionReturn(PETSC_SUCCESS);
538 }
539