xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 5c8f6a953e7ed1c81f507d64aebddb11080b60e9)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc-private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP          ksp;
10   PC           pc;                   /* actual preconditioner used on each processor */
11   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of ((PetscObject)pc)->comm */
12   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool    useparallelmat;
16   PetscSubcomm psubcomm;
17   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
18 } PC_Redundant;
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PCView_Redundant"
22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26   PetscBool      iascii,isstring;
27   PetscViewer    subviewer;
28 
29   PetscFunctionBegin;
30   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
31   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
32   if (iascii) {
33     if (!red->psubcomm) {
34       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
35     } else {
36       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
37       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
38       if (!red->psubcomm->color) { /* only view first redundant pc */
39         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
40         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
41         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
42       }
43       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
44     }
45   } else if (isstring) {
46     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
47   }
48   PetscFunctionReturn(0);
49 }
50 
51 #undef __FUNCT__
52 #define __FUNCT__ "PCSetUp_Redundant"
53 static PetscErrorCode PCSetUp_Redundant(PC pc)
54 {
55   PC_Redundant   *red = (PC_Redundant*)pc->data;
56   PetscErrorCode ierr;
57   PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
58   PetscMPIInt    size;
59   MatReuse       reuse = MAT_INITIAL_MATRIX;
60   MatStructure   str = DIFFERENT_NONZERO_PATTERN;
61   MPI_Comm       comm = ((PetscObject)pc)->comm,subcomm;
62   Vec            vec;
63   PetscMPIInt    subsize,subrank;
64   const char     *prefix;
65   const PetscInt *range;
66 
67   PetscFunctionBegin;
68   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
69   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
70 
71   if (!pc->setupcalled) {
72     if (!red->psubcomm) {
73       ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
74       ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
75       ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
76       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
77 
78       /* create a new PC that processors in each subcomm have copy of */
79       subcomm = red->psubcomm->comm;
80       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
81       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
82       ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
83       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
84       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
85       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
86 
87       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
88       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
89       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
90     } else {
91        subcomm = red->psubcomm->comm;
92     }
93 
94     /* create working vectors xsub/ysub and xdup/ydup */
95     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
96     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
97 
98     /* get local size of xsub/ysub */
99     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
100     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
101     ierr = MatGetOwnershipRanges(pc->pmat,&range);CHKERRQ(ierr);
102     rstart_sub = range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
103     if (subrank+1 < subsize) {
104       rend_sub = range[red->psubcomm->n*(subrank+1)];
105     } else {
106       rend_sub = m;
107     }
108     mloc_sub = rend_sub - rstart_sub;
109     ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
110     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
111     ierr = VecCreateMPIWithArray(subcomm,1,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);
112 
113     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
114        Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */
115     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
116     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,1,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
117 
118     /* create vec scatters */
119     if (!red->scatterin) {
120       IS       is1,is2;
121       PetscInt *idx1,*idx2,i,j,k;
122 
123       ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr);
124       j = 0;
125       for (k=0; k<red->psubcomm->n; k++) {
126         for (i=mstart; i<mend; i++) {
127           idx1[j]   = i;
128           idx2[j++] = i + m*k;
129         }
130       }
131       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
132       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
133       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
134       ierr = ISDestroy(&is1);CHKERRQ(ierr);
135       ierr = ISDestroy(&is2);CHKERRQ(ierr);
136 
137       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
138       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
139       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
140       ierr = ISDestroy(&is1);CHKERRQ(ierr);
141       ierr = ISDestroy(&is2);CHKERRQ(ierr);
142       ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
143     }
144   }
145   ierr = VecDestroy(&vec);CHKERRQ(ierr);
146 
147   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
148   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
149   if (size == 1) {
150     red->useparallelmat = PETSC_FALSE;
151   }
152 
153   if (red->useparallelmat) {
154     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
155       /* destroy old matrices */
156       ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
157     } else if (pc->setupcalled == 1) {
158       reuse = MAT_REUSE_MATRIX;
159       str   = SAME_NONZERO_PATTERN;
160     }
161 
162     /* grab the parallel matrix and put it into processors of a subcomminicator */
163     /*--------------------------------------------------------------------------*/
164     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
165     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
166     /* tell PC of the subcommunicator its operators */
167     ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr);
168   } else {
169     ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
170   }
171   if (pc->setfromoptionscalled) {
172     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
173   }
174   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
175   PetscFunctionReturn(0);
176 }
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "PCApply_Redundant"
180 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
181 {
182   PC_Redundant   *red = (PC_Redundant*)pc->data;
183   PetscErrorCode ierr;
184   PetscScalar    *array;
185 
186   PetscFunctionBegin;
187   /* scatter x to xdup */
188   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
189   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
190 
191   /* place xdup's local array into xsub */
192   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
193   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
194 
195   /* apply preconditioner on each processor */
196   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
197   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
198   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
199 
200   /* place ysub's local array into ydup */
201   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
202   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
203 
204   /* scatter ydup to y */
205   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
206   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
207   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
208   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
209   PetscFunctionReturn(0);
210 }
211 
212 #undef __FUNCT__
213 #define __FUNCT__ "PCReset_Redundant"
214 static PetscErrorCode PCReset_Redundant(PC pc)
215 {
216   PC_Redundant   *red = (PC_Redundant*)pc->data;
217   PetscErrorCode ierr;
218 
219   PetscFunctionBegin;
220   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
221   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
222   ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
223   ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
224   ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
225   ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
226   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
227   if (red->ksp) {ierr = KSPReset(red->ksp);CHKERRQ(ierr);}
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "PCDestroy_Redundant"
233 static PetscErrorCode PCDestroy_Redundant(PC pc)
234 {
235   PC_Redundant   *red = (PC_Redundant*)pc->data;
236   PetscErrorCode ierr;
237 
238   PetscFunctionBegin;
239   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
240   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
241   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
242   ierr = PetscFree(pc->data);CHKERRQ(ierr);
243   PetscFunctionReturn(0);
244 }
245 
246 #undef __FUNCT__
247 #define __FUNCT__ "PCSetFromOptions_Redundant"
248 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
249 {
250   PetscErrorCode ierr;
251   PC_Redundant   *red = (PC_Redundant*)pc->data;
252 
253   PetscFunctionBegin;
254   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
255   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
256   ierr = PetscOptionsTail();CHKERRQ(ierr);
257   PetscFunctionReturn(0);
258 }
259 
260 EXTERN_C_BEGIN
261 #undef __FUNCT__
262 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
263 PetscErrorCode  PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
264 {
265   PC_Redundant *red = (PC_Redundant*)pc->data;
266 
267   PetscFunctionBegin;
268   red->nsubcomm = nreds;
269   PetscFunctionReturn(0);
270 }
271 EXTERN_C_END
272 
273 #undef __FUNCT__
274 #define __FUNCT__ "PCRedundantSetNumber"
275 /*@
276    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
277 
278    Logically Collective on PC
279 
280    Input Parameters:
281 +  pc - the preconditioner context
282 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
283                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
284 
285    Level: advanced
286 
287 .keywords: PC, redundant solve
288 @*/
289 PetscErrorCode  PCRedundantSetNumber(PC pc,PetscInt nredundant)
290 {
291   PetscErrorCode ierr;
292 
293   PetscFunctionBegin;
294   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
295   if (nredundant <= 0) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
296   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
297   PetscFunctionReturn(0);
298 }
299 
300 EXTERN_C_BEGIN
301 #undef __FUNCT__
302 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
303 PetscErrorCode  PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
304 {
305   PC_Redundant   *red = (PC_Redundant*)pc->data;
306   PetscErrorCode ierr;
307 
308   PetscFunctionBegin;
309   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
310   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
311   red->scatterin  = in;
312   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
313   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
314   red->scatterout = out;
315   PetscFunctionReturn(0);
316 }
317 EXTERN_C_END
318 
319 #undef __FUNCT__
320 #define __FUNCT__ "PCRedundantSetScatter"
321 /*@
322    PCRedundantSetScatter - Sets the scatter used to copy values into the
323      redundant local solve and the scatter to move them back into the global
324      vector.
325 
326    Logically Collective on PC
327 
328    Input Parameters:
329 +  pc - the preconditioner context
330 .  in - the scatter to move the values in
331 -  out - the scatter to move them out
332 
333    Level: advanced
334 
335 .keywords: PC, redundant solve
336 @*/
337 PetscErrorCode  PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
338 {
339   PetscErrorCode ierr;
340 
341   PetscFunctionBegin;
342   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
343   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
344   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
345   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
346   PetscFunctionReturn(0);
347 }
348 
349 EXTERN_C_BEGIN
350 #undef __FUNCT__
351 #define __FUNCT__ "PCRedundantGetKSP_Redundant"
352 PetscErrorCode  PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
353 {
354   PetscErrorCode ierr;
355   PC_Redundant   *red = (PC_Redundant*)pc->data;
356   MPI_Comm       comm,subcomm;
357   const char     *prefix;
358 
359   PetscFunctionBegin;
360   if (!red->psubcomm) {
361     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
362     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
363     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
364     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
365     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
366 
367     /* create a new PC that processors in each subcomm have copy of */
368     subcomm = red->psubcomm->comm;
369     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
370     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
371     ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
372     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
373     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
374     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
375 
376     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
377     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
378     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
379   }
380   *innerksp = red->ksp;
381   PetscFunctionReturn(0);
382 }
383 EXTERN_C_END
384 
385 #undef __FUNCT__
386 #define __FUNCT__ "PCRedundantGetKSP"
387 /*@
388    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
389 
390    Not Collective
391 
392    Input Parameter:
393 .  pc - the preconditioner context
394 
395    Output Parameter:
396 .  innerksp - the KSP on the smaller set of processes
397 
398    Level: advanced
399 
400 .keywords: PC, redundant solve
401 @*/
402 PetscErrorCode  PCRedundantGetKSP(PC pc,KSP *innerksp)
403 {
404   PetscErrorCode ierr;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
408   PetscValidPointer(innerksp,2);
409   ierr = PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
410   PetscFunctionReturn(0);
411 }
412 
413 EXTERN_C_BEGIN
414 #undef __FUNCT__
415 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
416 PetscErrorCode  PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
417 {
418   PC_Redundant *red = (PC_Redundant*)pc->data;
419 
420   PetscFunctionBegin;
421   if (mat)  *mat  = red->pmats;
422   if (pmat) *pmat = red->pmats;
423   PetscFunctionReturn(0);
424 }
425 EXTERN_C_END
426 
427 #undef __FUNCT__
428 #define __FUNCT__ "PCRedundantGetOperators"
429 /*@
430    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
431 
432    Not Collective
433 
434    Input Parameter:
435 .  pc - the preconditioner context
436 
437    Output Parameters:
438 +  mat - the matrix
439 -  pmat - the (possibly different) preconditioner matrix
440 
441    Level: advanced
442 
443 .keywords: PC, redundant solve
444 @*/
445 PetscErrorCode  PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
446 {
447   PetscErrorCode ierr;
448 
449   PetscFunctionBegin;
450   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
451   if (mat)  PetscValidPointer(mat,2);
452   if (pmat) PetscValidPointer(pmat,3);
453   ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
454   PetscFunctionReturn(0);
455 }
456 
457 /* -------------------------------------------------------------------------------------*/
458 /*MC
459      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
460 
461      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
462 
463   Options Database:
464 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
465                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
466 
467    Level: intermediate
468 
469    Notes: The default KSP is preonly and the default PC is LU.
470 
471    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
472 
473 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
474            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
475 M*/
476 
477 EXTERN_C_BEGIN
478 #undef __FUNCT__
479 #define __FUNCT__ "PCCreate_Redundant"
480 PetscErrorCode  PCCreate_Redundant(PC pc)
481 {
482   PetscErrorCode ierr;
483   PC_Redundant   *red;
484   PetscMPIInt    size;
485 
486   PetscFunctionBegin;
487   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
488   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
489   red->nsubcomm       = size;
490   red->useparallelmat = PETSC_TRUE;
491   pc->data            = (void*)red;
492 
493   pc->ops->apply           = PCApply_Redundant;
494   pc->ops->applytranspose  = 0;
495   pc->ops->setup           = PCSetUp_Redundant;
496   pc->ops->destroy         = PCDestroy_Redundant;
497   pc->ops->reset           = PCReset_Redundant;
498   pc->ops->setfromoptions  = PCSetFromOptions_Redundant;
499   pc->ops->view            = PCView_Redundant;
500   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
501                     PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
502   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant",
503                     PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
504   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetKSP_C","PCRedundantGetKSP_Redundant",
505                     PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
506   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
507                     PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
508   PetscFunctionReturn(0);
509 }
510 EXTERN_C_END
511