xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision d313477fcd2e54b54bd66b2cbb726472c9677787)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc-private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP          ksp;
10   PC           pc;                   /* actual preconditioner used on each processor */
11   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool    useparallelmat;
16   PetscSubcomm psubcomm;
17   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
18 } PC_Redundant;
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PCView_Redundant"
22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26   PetscBool      iascii,isstring;
27   PetscViewer    subviewer;
28 
29   PetscFunctionBegin;
30   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
31   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
32   if (iascii) {
33     if (!red->psubcomm) {
34       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
35     } else {
36       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
37       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
38       if (!red->psubcomm->color) { /* only view first redundant pc */
39         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
40         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
41         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
42       }
43       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
44     }
45   } else if (isstring) {
46     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
47   }
48   PetscFunctionReturn(0);
49 }
50 
51 #undef __FUNCT__
52 #define __FUNCT__ "PCSetUp_Redundant"
53 static PetscErrorCode PCSetUp_Redundant(PC pc)
54 {
55   PC_Redundant   *red = (PC_Redundant*)pc->data;
56   PetscErrorCode ierr;
57   PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
58   PetscMPIInt    size;
59   MatReuse       reuse = MAT_INITIAL_MATRIX;
60   MatStructure   str   = DIFFERENT_NONZERO_PATTERN;
61   MPI_Comm       comm,subcomm;
62   Vec            vec;
63   PetscMPIInt    subsize,subrank;
64   const char     *prefix;
65   const PetscInt *range;
66 
67   PetscFunctionBegin;
68   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
69   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
70   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
71 
72   if (!pc->setupcalled) {
73     if (!red->psubcomm) {
74       ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
75       ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
76       ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
77       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
78 
79       /* create a new PC that processors in each subcomm have copy of */
80       subcomm = red->psubcomm->comm;
81 
82       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
83       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
84       ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
85       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
86       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
87       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
88 
89       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
90       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
91       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
92     } else subcomm = red->psubcomm->comm;
93 
94     /* create working vectors xsub/ysub and xdup/ydup */
95     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
96     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
97 
98     /* get local size of xsub/ysub */
99     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
100     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
101     ierr = MatGetOwnershipRanges(pc->pmat,&range);CHKERRQ(ierr);
102     rstart_sub = range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
103     if (subrank+1 < subsize) rend_sub = range[red->psubcomm->n*(subrank+1)];
104     else rend_sub = m;
105 
106     mloc_sub = rend_sub - rstart_sub;
107     ierr     = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
108     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
109     ierr = VecCreateMPIWithArray(subcomm,1,mloc_sub,PETSC_DECIDE,NULL,&red->xsub);CHKERRQ(ierr);
110 
111     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
112        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
113     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
114     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);
115 
116     /* create vec scatters */
117     if (!red->scatterin) {
118       IS       is1,is2;
119       PetscInt *idx1,*idx2,i,j,k;
120 
121       ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr);
122       j    = 0;
123       for (k=0; k<red->psubcomm->n; k++) {
124         for (i=mstart; i<mend; i++) {
125           idx1[j]   = i;
126           idx2[j++] = i + m*k;
127         }
128       }
129       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
130       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
131       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
132       ierr = ISDestroy(&is1);CHKERRQ(ierr);
133       ierr = ISDestroy(&is2);CHKERRQ(ierr);
134 
135       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
136       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
137       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
138       ierr = ISDestroy(&is1);CHKERRQ(ierr);
139       ierr = ISDestroy(&is2);CHKERRQ(ierr);
140       ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
141     }
142   }
143   ierr = VecDestroy(&vec);CHKERRQ(ierr);
144 
145   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
146   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
147   if (size == 1) red->useparallelmat = PETSC_FALSE;
148 
149   if (red->useparallelmat) {
150     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
151       /* destroy old matrices */
152       ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
153     } else if (pc->setupcalled == 1) {
154       reuse = MAT_REUSE_MATRIX;
155       str   = SAME_NONZERO_PATTERN;
156     }
157 
158     /* grab the parallel matrix and put it into processors of a subcomminicator */
159     /*--------------------------------------------------------------------------*/
160     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
161     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
162     /* tell PC of the subcommunicator its operators */
163     ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr);
164   } else {
165     ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
166   }
167   if (pc->setfromoptionscalled) {
168     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
169   }
170   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "PCApply_Redundant"
176 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
177 {
178   PC_Redundant   *red = (PC_Redundant*)pc->data;
179   PetscErrorCode ierr;
180   PetscScalar    *array;
181 
182   PetscFunctionBegin;
183   /* scatter x to xdup */
184   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
185   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
186 
187   /* place xdup's local array into xsub */
188   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
189   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
190 
191   /* apply preconditioner on each processor */
192   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
193   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
194   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
195 
196   /* place ysub's local array into ydup */
197   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
198   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
199 
200   /* scatter ydup to y */
201   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
202   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
203   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
204   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
205   PetscFunctionReturn(0);
206 }
207 
208 #undef __FUNCT__
209 #define __FUNCT__ "PCReset_Redundant"
210 static PetscErrorCode PCReset_Redundant(PC pc)
211 {
212   PC_Redundant   *red = (PC_Redundant*)pc->data;
213   PetscErrorCode ierr;
214 
215   PetscFunctionBegin;
216   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
217   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
218   ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
219   ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
220   ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
221   ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
222   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
223   if (red->ksp) {ierr = KSPReset(red->ksp);CHKERRQ(ierr);}
224   PetscFunctionReturn(0);
225 }
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "PCDestroy_Redundant"
229 static PetscErrorCode PCDestroy_Redundant(PC pc)
230 {
231   PC_Redundant   *red = (PC_Redundant*)pc->data;
232   PetscErrorCode ierr;
233 
234   PetscFunctionBegin;
235   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
236   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
237   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
238   ierr = PetscFree(pc->data);CHKERRQ(ierr);
239   PetscFunctionReturn(0);
240 }
241 
242 #undef __FUNCT__
243 #define __FUNCT__ "PCSetFromOptions_Redundant"
244 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
245 {
246   PetscErrorCode ierr;
247   PC_Redundant   *red = (PC_Redundant*)pc->data;
248 
249   PetscFunctionBegin;
250   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
251   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
252   ierr = PetscOptionsTail();CHKERRQ(ierr);
253   PetscFunctionReturn(0);
254 }
255 
256 EXTERN_C_BEGIN
257 #undef __FUNCT__
258 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
259 PetscErrorCode  PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
260 {
261   PC_Redundant *red = (PC_Redundant*)pc->data;
262 
263   PetscFunctionBegin;
264   red->nsubcomm = nreds;
265   PetscFunctionReturn(0);
266 }
267 EXTERN_C_END
268 
269 #undef __FUNCT__
270 #define __FUNCT__ "PCRedundantSetNumber"
271 /*@
272    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
273 
274    Logically Collective on PC
275 
276    Input Parameters:
277 +  pc - the preconditioner context
278 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
279                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
280 
281    Level: advanced
282 
283 .keywords: PC, redundant solve
284 @*/
285 PetscErrorCode  PCRedundantSetNumber(PC pc,PetscInt nredundant)
286 {
287   PetscErrorCode ierr;
288 
289   PetscFunctionBegin;
290   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
291   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
292   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
293   PetscFunctionReturn(0);
294 }
295 
296 EXTERN_C_BEGIN
297 #undef __FUNCT__
298 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
299 PetscErrorCode  PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
300 {
301   PC_Redundant   *red = (PC_Redundant*)pc->data;
302   PetscErrorCode ierr;
303 
304   PetscFunctionBegin;
305   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
306   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
307 
308   red->scatterin  = in;
309 
310   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
311   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
312   red->scatterout = out;
313   PetscFunctionReturn(0);
314 }
315 EXTERN_C_END
316 
317 #undef __FUNCT__
318 #define __FUNCT__ "PCRedundantSetScatter"
319 /*@
320    PCRedundantSetScatter - Sets the scatter used to copy values into the
321      redundant local solve and the scatter to move them back into the global
322      vector.
323 
324    Logically Collective on PC
325 
326    Input Parameters:
327 +  pc - the preconditioner context
328 .  in - the scatter to move the values in
329 -  out - the scatter to move them out
330 
331    Level: advanced
332 
333 .keywords: PC, redundant solve
334 @*/
335 PetscErrorCode  PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
336 {
337   PetscErrorCode ierr;
338 
339   PetscFunctionBegin;
340   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
341   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
342   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
343   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
344   PetscFunctionReturn(0);
345 }
346 
347 EXTERN_C_BEGIN
348 #undef __FUNCT__
349 #define __FUNCT__ "PCRedundantGetKSP_Redundant"
350 PetscErrorCode  PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
351 {
352   PetscErrorCode ierr;
353   PC_Redundant   *red = (PC_Redundant*)pc->data;
354   MPI_Comm       comm,subcomm;
355   const char     *prefix;
356 
357   PetscFunctionBegin;
358   if (!red->psubcomm) {
359     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
360     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
361     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
362     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
363     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
364 
365     /* create a new PC that processors in each subcomm have copy of */
366     subcomm = red->psubcomm->comm;
367 
368     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
369     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
370     ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
371     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
372     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
373     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
374 
375     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
376     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
377     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
378   }
379   *innerksp = red->ksp;
380   PetscFunctionReturn(0);
381 }
382 EXTERN_C_END
383 
384 #undef __FUNCT__
385 #define __FUNCT__ "PCRedundantGetKSP"
386 /*@
387    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
388 
389    Not Collective
390 
391    Input Parameter:
392 .  pc - the preconditioner context
393 
394    Output Parameter:
395 .  innerksp - the KSP on the smaller set of processes
396 
397    Level: advanced
398 
399 .keywords: PC, redundant solve
400 @*/
401 PetscErrorCode  PCRedundantGetKSP(PC pc,KSP *innerksp)
402 {
403   PetscErrorCode ierr;
404 
405   PetscFunctionBegin;
406   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
407   PetscValidPointer(innerksp,2);
408   ierr = PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
409   PetscFunctionReturn(0);
410 }
411 
412 EXTERN_C_BEGIN
413 #undef __FUNCT__
414 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
415 PetscErrorCode  PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
416 {
417   PC_Redundant *red = (PC_Redundant*)pc->data;
418 
419   PetscFunctionBegin;
420   if (mat)  *mat  = red->pmats;
421   if (pmat) *pmat = red->pmats;
422   PetscFunctionReturn(0);
423 }
424 EXTERN_C_END
425 
426 #undef __FUNCT__
427 #define __FUNCT__ "PCRedundantGetOperators"
428 /*@
429    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
430 
431    Not Collective
432 
433    Input Parameter:
434 .  pc - the preconditioner context
435 
436    Output Parameters:
437 +  mat - the matrix
438 -  pmat - the (possibly different) preconditioner matrix
439 
440    Level: advanced
441 
442 .keywords: PC, redundant solve
443 @*/
444 PetscErrorCode  PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
445 {
446   PetscErrorCode ierr;
447 
448   PetscFunctionBegin;
449   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
450   if (mat)  PetscValidPointer(mat,2);
451   if (pmat) PetscValidPointer(pmat,3);
452   ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
453   PetscFunctionReturn(0);
454 }
455 
456 /* -------------------------------------------------------------------------------------*/
457 /*MC
458      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
459 
460      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
461 
462   Options Database:
463 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
464                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
465 
466    Level: intermediate
467 
468    Notes: The default KSP is preonly and the default PC is LU.
469 
470    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
471 
472 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
473            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
474 M*/
475 
476 EXTERN_C_BEGIN
477 #undef __FUNCT__
478 #define __FUNCT__ "PCCreate_Redundant"
479 PetscErrorCode  PCCreate_Redundant(PC pc)
480 {
481   PetscErrorCode ierr;
482   PC_Redundant   *red;
483   PetscMPIInt    size;
484 
485   PetscFunctionBegin;
486   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
487   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
488 
489   red->nsubcomm       = size;
490   red->useparallelmat = PETSC_TRUE;
491   pc->data            = (void*)red;
492 
493   pc->ops->apply          = PCApply_Redundant;
494   pc->ops->applytranspose = 0;
495   pc->ops->setup          = PCSetUp_Redundant;
496   pc->ops->destroy        = PCDestroy_Redundant;
497   pc->ops->reset          = PCReset_Redundant;
498   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
499   pc->ops->view           = PCView_Redundant;
500 
501   ierr                    = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
502                                                               PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
503   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant",
504                                            PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
505   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetKSP_C","PCRedundantGetKSP_Redundant",
506                                            PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
507   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
508                                            PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
509   PetscFunctionReturn(0);
510 }
511 EXTERN_C_END
512