xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 2205254efee3a00a594e5e2a3a70f74dcb40bc03)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc-private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP          ksp;
10   PC           pc;                   /* actual preconditioner used on each processor */
11   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of ((PetscObject)pc)->comm */
12   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool    useparallelmat;
16   PetscSubcomm psubcomm;
17   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
18 } PC_Redundant;
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PCView_Redundant"
22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26   PetscBool      iascii,isstring;
27   PetscViewer    subviewer;
28 
29   PetscFunctionBegin;
30   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
31   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
32   if (iascii) {
33     if (!red->psubcomm) {
34       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
35     } else {
36       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
37       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
38       if (!red->psubcomm->color) { /* only view first redundant pc */
39         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
40         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
41         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
42       }
43       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
44     }
45   } else if (isstring) {
46     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
47   }
48   PetscFunctionReturn(0);
49 }
50 
51 #undef __FUNCT__
52 #define __FUNCT__ "PCSetUp_Redundant"
53 static PetscErrorCode PCSetUp_Redundant(PC pc)
54 {
55   PC_Redundant   *red = (PC_Redundant*)pc->data;
56   PetscErrorCode ierr;
57   PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
58   PetscMPIInt    size;
59   MatReuse       reuse = MAT_INITIAL_MATRIX;
60   MatStructure   str   = DIFFERENT_NONZERO_PATTERN;
61   MPI_Comm       comm  = ((PetscObject)pc)->comm,subcomm;
62   Vec            vec;
63   PetscMPIInt    subsize,subrank;
64   const char     *prefix;
65   const PetscInt *range;
66 
67   PetscFunctionBegin;
68   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
69   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
70 
71   if (!pc->setupcalled) {
72     if (!red->psubcomm) {
73       ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
74       ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
75       ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
76       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
77 
78       /* create a new PC that processors in each subcomm have copy of */
79       subcomm = red->psubcomm->comm;
80 
81       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
82       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
83       ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
84       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
85       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
86       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
87 
88       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
89       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
90       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
91     } else subcomm = red->psubcomm->comm;
92 
93     /* create working vectors xsub/ysub and xdup/ydup */
94     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
95     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
96 
97     /* get local size of xsub/ysub */
98     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
99     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
100     ierr = MatGetOwnershipRanges(pc->pmat,&range);CHKERRQ(ierr);
101     rstart_sub = range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
102     if (subrank+1 < subsize) rend_sub = range[red->psubcomm->n*(subrank+1)];
103     else rend_sub = m;
104 
105     mloc_sub = rend_sub - rstart_sub;
106     ierr     = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
107     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
108     ierr = VecCreateMPIWithArray(subcomm,1,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);
109 
110     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
111        Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */
112     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
113     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,1,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
114 
115     /* create vec scatters */
116     if (!red->scatterin) {
117       IS       is1,is2;
118       PetscInt *idx1,*idx2,i,j,k;
119 
120       ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr);
121       j    = 0;
122       for (k=0; k<red->psubcomm->n; k++) {
123         for (i=mstart; i<mend; i++) {
124           idx1[j]   = i;
125           idx2[j++] = i + m*k;
126         }
127       }
128       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
129       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
130       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
131       ierr = ISDestroy(&is1);CHKERRQ(ierr);
132       ierr = ISDestroy(&is2);CHKERRQ(ierr);
133 
134       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
135       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
136       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
137       ierr = ISDestroy(&is1);CHKERRQ(ierr);
138       ierr = ISDestroy(&is2);CHKERRQ(ierr);
139       ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
140     }
141   }
142   ierr = VecDestroy(&vec);CHKERRQ(ierr);
143 
144   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
145   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
146   if (size == 1) red->useparallelmat = PETSC_FALSE;
147 
148   if (red->useparallelmat) {
149     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
150       /* destroy old matrices */
151       ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
152     } else if (pc->setupcalled == 1) {
153       reuse = MAT_REUSE_MATRIX;
154       str   = SAME_NONZERO_PATTERN;
155     }
156 
157     /* grab the parallel matrix and put it into processors of a subcomminicator */
158     /*--------------------------------------------------------------------------*/
159     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
160     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
161     /* tell PC of the subcommunicator its operators */
162     ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr);
163   } else {
164     ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
165   }
166   if (pc->setfromoptionscalled) {
167     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
168   }
169   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
170   PetscFunctionReturn(0);
171 }
172 
173 #undef __FUNCT__
174 #define __FUNCT__ "PCApply_Redundant"
175 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
176 {
177   PC_Redundant   *red = (PC_Redundant*)pc->data;
178   PetscErrorCode ierr;
179   PetscScalar    *array;
180 
181   PetscFunctionBegin;
182   /* scatter x to xdup */
183   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
184   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
185 
186   /* place xdup's local array into xsub */
187   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
188   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
189 
190   /* apply preconditioner on each processor */
191   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
192   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
193   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
194 
195   /* place ysub's local array into ydup */
196   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
197   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
198 
199   /* scatter ydup to y */
200   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
201   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
202   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
203   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
204   PetscFunctionReturn(0);
205 }
206 
207 #undef __FUNCT__
208 #define __FUNCT__ "PCReset_Redundant"
209 static PetscErrorCode PCReset_Redundant(PC pc)
210 {
211   PC_Redundant   *red = (PC_Redundant*)pc->data;
212   PetscErrorCode ierr;
213 
214   PetscFunctionBegin;
215   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
216   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
217   ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
218   ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
219   ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
220   ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
221   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
222   if (red->ksp) {ierr = KSPReset(red->ksp);CHKERRQ(ierr);}
223   PetscFunctionReturn(0);
224 }
225 
226 #undef __FUNCT__
227 #define __FUNCT__ "PCDestroy_Redundant"
228 static PetscErrorCode PCDestroy_Redundant(PC pc)
229 {
230   PC_Redundant   *red = (PC_Redundant*)pc->data;
231   PetscErrorCode ierr;
232 
233   PetscFunctionBegin;
234   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
235   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
236   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
237   ierr = PetscFree(pc->data);CHKERRQ(ierr);
238   PetscFunctionReturn(0);
239 }
240 
241 #undef __FUNCT__
242 #define __FUNCT__ "PCSetFromOptions_Redundant"
243 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
244 {
245   PetscErrorCode ierr;
246   PC_Redundant   *red = (PC_Redundant*)pc->data;
247 
248   PetscFunctionBegin;
249   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
250   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
251   ierr = PetscOptionsTail();CHKERRQ(ierr);
252   PetscFunctionReturn(0);
253 }
254 
255 EXTERN_C_BEGIN
256 #undef __FUNCT__
257 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
258 PetscErrorCode  PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
259 {
260   PC_Redundant *red = (PC_Redundant*)pc->data;
261 
262   PetscFunctionBegin;
263   red->nsubcomm = nreds;
264   PetscFunctionReturn(0);
265 }
266 EXTERN_C_END
267 
268 #undef __FUNCT__
269 #define __FUNCT__ "PCRedundantSetNumber"
270 /*@
271    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
272 
273    Logically Collective on PC
274 
275    Input Parameters:
276 +  pc - the preconditioner context
277 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
278                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
279 
280    Level: advanced
281 
282 .keywords: PC, redundant solve
283 @*/
284 PetscErrorCode  PCRedundantSetNumber(PC pc,PetscInt nredundant)
285 {
286   PetscErrorCode ierr;
287 
288   PetscFunctionBegin;
289   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
290   if (nredundant <= 0) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
291   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
292   PetscFunctionReturn(0);
293 }
294 
295 EXTERN_C_BEGIN
296 #undef __FUNCT__
297 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
298 PetscErrorCode  PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
299 {
300   PC_Redundant   *red = (PC_Redundant*)pc->data;
301   PetscErrorCode ierr;
302 
303   PetscFunctionBegin;
304   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
305   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
306 
307   red->scatterin  = in;
308 
309   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
310   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
311   red->scatterout = out;
312   PetscFunctionReturn(0);
313 }
314 EXTERN_C_END
315 
316 #undef __FUNCT__
317 #define __FUNCT__ "PCRedundantSetScatter"
318 /*@
319    PCRedundantSetScatter - Sets the scatter used to copy values into the
320      redundant local solve and the scatter to move them back into the global
321      vector.
322 
323    Logically Collective on PC
324 
325    Input Parameters:
326 +  pc - the preconditioner context
327 .  in - the scatter to move the values in
328 -  out - the scatter to move them out
329 
330    Level: advanced
331 
332 .keywords: PC, redundant solve
333 @*/
334 PetscErrorCode  PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
335 {
336   PetscErrorCode ierr;
337 
338   PetscFunctionBegin;
339   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
340   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
341   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
342   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
343   PetscFunctionReturn(0);
344 }
345 
346 EXTERN_C_BEGIN
347 #undef __FUNCT__
348 #define __FUNCT__ "PCRedundantGetKSP_Redundant"
349 PetscErrorCode  PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
350 {
351   PetscErrorCode ierr;
352   PC_Redundant   *red = (PC_Redundant*)pc->data;
353   MPI_Comm       comm,subcomm;
354   const char     *prefix;
355 
356   PetscFunctionBegin;
357   if (!red->psubcomm) {
358     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
359     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
360     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
361     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
362     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
363 
364     /* create a new PC that processors in each subcomm have copy of */
365     subcomm = red->psubcomm->comm;
366 
367     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
368     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
369     ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
370     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
371     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
372     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
373 
374     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
375     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
376     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
377   }
378   *innerksp = red->ksp;
379   PetscFunctionReturn(0);
380 }
381 EXTERN_C_END
382 
383 #undef __FUNCT__
384 #define __FUNCT__ "PCRedundantGetKSP"
385 /*@
386    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
387 
388    Not Collective
389 
390    Input Parameter:
391 .  pc - the preconditioner context
392 
393    Output Parameter:
394 .  innerksp - the KSP on the smaller set of processes
395 
396    Level: advanced
397 
398 .keywords: PC, redundant solve
399 @*/
400 PetscErrorCode  PCRedundantGetKSP(PC pc,KSP *innerksp)
401 {
402   PetscErrorCode ierr;
403 
404   PetscFunctionBegin;
405   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
406   PetscValidPointer(innerksp,2);
407   ierr = PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
408   PetscFunctionReturn(0);
409 }
410 
411 EXTERN_C_BEGIN
412 #undef __FUNCT__
413 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
414 PetscErrorCode  PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
415 {
416   PC_Redundant *red = (PC_Redundant*)pc->data;
417 
418   PetscFunctionBegin;
419   if (mat)  *mat  = red->pmats;
420   if (pmat) *pmat = red->pmats;
421   PetscFunctionReturn(0);
422 }
423 EXTERN_C_END
424 
425 #undef __FUNCT__
426 #define __FUNCT__ "PCRedundantGetOperators"
427 /*@
428    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
429 
430    Not Collective
431 
432    Input Parameter:
433 .  pc - the preconditioner context
434 
435    Output Parameters:
436 +  mat - the matrix
437 -  pmat - the (possibly different) preconditioner matrix
438 
439    Level: advanced
440 
441 .keywords: PC, redundant solve
442 @*/
443 PetscErrorCode  PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
444 {
445   PetscErrorCode ierr;
446 
447   PetscFunctionBegin;
448   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
449   if (mat)  PetscValidPointer(mat,2);
450   if (pmat) PetscValidPointer(pmat,3);
451   ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
452   PetscFunctionReturn(0);
453 }
454 
455 /* -------------------------------------------------------------------------------------*/
456 /*MC
457      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
458 
459      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
460 
461   Options Database:
462 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
463                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
464 
465    Level: intermediate
466 
467    Notes: The default KSP is preonly and the default PC is LU.
468 
469    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
470 
471 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
472            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
473 M*/
474 
475 EXTERN_C_BEGIN
476 #undef __FUNCT__
477 #define __FUNCT__ "PCCreate_Redundant"
478 PetscErrorCode  PCCreate_Redundant(PC pc)
479 {
480   PetscErrorCode ierr;
481   PC_Redundant   *red;
482   PetscMPIInt    size;
483 
484   PetscFunctionBegin;
485   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
486   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
487 
488   red->nsubcomm       = size;
489   red->useparallelmat = PETSC_TRUE;
490   pc->data            = (void*)red;
491 
492   pc->ops->apply          = PCApply_Redundant;
493   pc->ops->applytranspose = 0;
494   pc->ops->setup          = PCSetUp_Redundant;
495   pc->ops->destroy        = PCDestroy_Redundant;
496   pc->ops->reset          = PCReset_Redundant;
497   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
498   pc->ops->view           = PCView_Redundant;
499 
500   ierr                    = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
501                                                               PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
502   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant",
503                                            PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
504   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetKSP_C","PCRedundantGetKSP_Redundant",
505                                            PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
506   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
507                                            PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
508   PetscFunctionReturn(0);
509 }
510 EXTERN_C_END
511