xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision e91c6855ec8eedcfe894fb1dc8d9ee5acd2335f4)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
5 */
6 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
7 #include "petscksp.h"           /*I "petscksp.h" I*/
8 
9 typedef struct {
10   KSP          ksp;
11   PC           pc;                   /* actual preconditioner used on each processor */
12   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of ((PetscObject)pc)->comm */
13   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
14   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
15   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
16   PetscBool    useparallelmat;
17   PetscSubcomm psubcomm;
18   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
19 } PC_Redundant;
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "PCView_Redundant"
23 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
24 {
25   PC_Redundant   *red = (PC_Redundant*)pc->data;
26   PetscErrorCode ierr;
27   PetscBool      iascii,isstring;
28   PetscViewer    subviewer;
29 
30   PetscFunctionBegin;
31   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
32   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
33   if (iascii) {
34     if (!red->psubcomm) {
35       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
36     } else {
37       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
38       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
39       if (!red->psubcomm->color) { /* only view first redundant pc */
40 	ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
41 	ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
42 	ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
43       }
44       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
45     }
46   } else if (isstring) {
47     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
48   } else {
49     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 #undef __FUNCT__
55 #define __FUNCT__ "PCSetUp_Redundant"
56 static PetscErrorCode PCSetUp_Redundant(PC pc)
57 {
58   PC_Redundant   *red = (PC_Redundant*)pc->data;
59   PetscErrorCode ierr;
60   PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
61   PetscMPIInt    size;
62   MatReuse       reuse = MAT_INITIAL_MATRIX;
63   MatStructure   str = DIFFERENT_NONZERO_PATTERN;
64   MPI_Comm       comm = ((PetscObject)pc)->comm,subcomm;
65   Vec            vec;
66   PetscMPIInt    subsize,subrank;
67   const char     *prefix;
68   const PetscInt *range;
69 
70   PetscFunctionBegin;
71   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
72   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
73 
74   if (!pc->setupcalled) {
75     if (!red->psubcomm) {
76       ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
77       ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
78       ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
79       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
80 
81       /* create a new PC that processors in each subcomm have copy of */
82       subcomm = red->psubcomm->comm;
83       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
84       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
85       ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
86       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
87       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
88       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
89 
90       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
91       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
92       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
93     } else {
94        subcomm = red->psubcomm->comm;
95     }
96 
97     /* create working vectors xsub/ysub and xdup/ydup */
98     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
99     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
100 
101     /* get local size of xsub/ysub */
102     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
103     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
104     ierr = MatGetOwnershipRanges(pc->pmat,&range);CHKERRQ(ierr);
105     rstart_sub = range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
106     if (subrank+1 < subsize){
107       rend_sub = range[red->psubcomm->n*(subrank+1)];
108     } else {
109       rend_sub = m;
110     }
111     mloc_sub = rend_sub - rstart_sub;
112     ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
113     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
114     ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);
115 
116     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
117        Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */
118     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
119     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
120 
121     /* create vec scatters */
122     if (!red->scatterin){
123       IS       is1,is2;
124       PetscInt *idx1,*idx2,i,j,k;
125 
126       ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr);
127       j = 0;
128       for (k=0; k<red->psubcomm->n; k++){
129         for (i=mstart; i<mend; i++){
130           idx1[j]   = i;
131           idx2[j++] = i + m*k;
132         }
133       }
134       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
135       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
136       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
137       ierr = ISDestroy(is1);CHKERRQ(ierr);
138       ierr = ISDestroy(is2);CHKERRQ(ierr);
139 
140       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
141       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
142       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
143       ierr = ISDestroy(is1);CHKERRQ(ierr);
144       ierr = ISDestroy(is2);CHKERRQ(ierr);
145       ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
146     }
147   }
148   ierr = VecDestroy(vec);CHKERRQ(ierr);
149 
150   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
151   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
152   if (size == 1) {
153     red->useparallelmat = PETSC_FALSE;
154   }
155 
156   if (red->useparallelmat) {
157     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
158       /* destroy old matrices */
159       if (red->pmats) {
160         ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
161       }
162     } else if (pc->setupcalled == 1) {
163       reuse = MAT_REUSE_MATRIX;
164       str   = SAME_NONZERO_PATTERN;
165     }
166 
167     /* grab the parallel matrix and put it into processors of a subcomminicator */
168     /*--------------------------------------------------------------------------*/
169     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
170     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
171     /* tell PC of the subcommunicator its operators */
172     ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr);
173   } else {
174     ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
175   }
176   if (pc->setfromoptionscalled){
177     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
178   }
179   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
180   PetscFunctionReturn(0);
181 }
182 
183 #undef __FUNCT__
184 #define __FUNCT__ "PCApply_Redundant"
185 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
186 {
187   PC_Redundant   *red = (PC_Redundant*)pc->data;
188   PetscErrorCode ierr;
189   PetscScalar    *array;
190 
191   PetscFunctionBegin;
192   /* scatter x to xdup */
193   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
194   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
195 
196   /* place xdup's local array into xsub */
197   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
198   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
199 
200   /* apply preconditioner on each processor */
201   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
202   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
203   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
204 
205   /* place ysub's local array into ydup */
206   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
207   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
208 
209   /* scatter ydup to y */
210   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
211   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
212   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
213   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
214   PetscFunctionReturn(0);
215 }
216 
217 #undef __FUNCT__
218 #define __FUNCT__ "PCDestroy_Redundant"
219 static PetscErrorCode PCDestroy_Redundant(PC pc)
220 {
221   PC_Redundant   *red = (PC_Redundant*)pc->data;
222   PetscErrorCode ierr;
223 
224   PetscFunctionBegin;
225   if (red->scatterin)  {ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr);}
226   if (red->scatterout) {ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr);}
227   if (red->ysub)       {ierr = VecDestroy(red->ysub);CHKERRQ(ierr);}
228   if (red->xsub)       {ierr = VecDestroy(red->xsub);CHKERRQ(ierr);}
229   if (red->xdup)       {ierr = VecDestroy(red->xdup);CHKERRQ(ierr);}
230   if (red->ydup)       {ierr = VecDestroy(red->ydup);CHKERRQ(ierr);}
231   if (red->pmats) {
232     ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
233   }
234   if (red->ksp) {ierr = KSPDestroy(red->ksp);CHKERRQ(ierr);}
235   if (red->psubcomm) {ierr = PetscSubcommDestroy(red->psubcomm);CHKERRQ(ierr);}
236   ierr = PetscFree(pc->data);CHKERRQ(ierr);
237   PetscFunctionReturn(0);
238 }
239 
240 #undef __FUNCT__
241 #define __FUNCT__ "PCSetFromOptions_Redundant"
242 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
243 {
244   PetscErrorCode ierr;
245   PC_Redundant   *red = (PC_Redundant*)pc->data;
246 
247   PetscFunctionBegin;
248   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
249   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
250   ierr = PetscOptionsTail();CHKERRQ(ierr);
251   PetscFunctionReturn(0);
252 }
253 
254 EXTERN_C_BEGIN
255 #undef __FUNCT__
256 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
257 PetscErrorCode  PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
258 {
259   PC_Redundant *red = (PC_Redundant*)pc->data;
260 
261   PetscFunctionBegin;
262   red->nsubcomm = nreds;
263   PetscFunctionReturn(0);
264 }
265 EXTERN_C_END
266 
267 #undef __FUNCT__
268 #define __FUNCT__ "PCRedundantSetNumber"
269 /*@
270    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
271 
272    Logically Collective on PC
273 
274    Input Parameters:
275 +  pc - the preconditioner context
276 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
277                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
278 
279    Level: advanced
280 
281 .keywords: PC, redundant solve
282 @*/
283 PetscErrorCode  PCRedundantSetNumber(PC pc,PetscInt nredundant)
284 {
285   PetscErrorCode ierr;
286 
287   PetscFunctionBegin;
288   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
289   if (nredundant <= 0) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
290   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
291   PetscFunctionReturn(0);
292 }
293 
294 EXTERN_C_BEGIN
295 #undef __FUNCT__
296 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
297 PetscErrorCode  PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
298 {
299   PC_Redundant   *red = (PC_Redundant*)pc->data;
300   PetscErrorCode ierr;
301 
302   PetscFunctionBegin;
303   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
304   if (red->scatterin) { ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr); }
305   red->scatterin  = in;
306   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
307   if (red->scatterout) { ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr); }
308   red->scatterout = out;
309   PetscFunctionReturn(0);
310 }
311 EXTERN_C_END
312 
313 #undef __FUNCT__
314 #define __FUNCT__ "PCRedundantSetScatter"
315 /*@
316    PCRedundantSetScatter - Sets the scatter used to copy values into the
317      redundant local solve and the scatter to move them back into the global
318      vector.
319 
320    Logically Collective on PC
321 
322    Input Parameters:
323 +  pc - the preconditioner context
324 .  in - the scatter to move the values in
325 -  out - the scatter to move them out
326 
327    Level: advanced
328 
329 .keywords: PC, redundant solve
330 @*/
331 PetscErrorCode  PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
332 {
333   PetscErrorCode ierr;
334 
335   PetscFunctionBegin;
336   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
337   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
338   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
339   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
340   PetscFunctionReturn(0);
341 }
342 
343 EXTERN_C_BEGIN
344 #undef __FUNCT__
345 #define __FUNCT__ "PCRedundantGetKSP_Redundant"
346 PetscErrorCode  PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
347 {
348   PetscErrorCode ierr;
349   PC_Redundant   *red = (PC_Redundant*)pc->data;
350   MPI_Comm       comm,subcomm;
351   const char     *prefix;
352 
353   PetscFunctionBegin;
354   if (!red->psubcomm) {
355     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
356     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
357     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
358     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
359     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
360 
361     /* create a new PC that processors in each subcomm have copy of */
362     subcomm = red->psubcomm->comm;
363     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
364     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
365     ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
366     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
367     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
368     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
369 
370     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
371     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
372     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
373   }
374   *innerksp = red->ksp;
375   PetscFunctionReturn(0);
376 }
377 EXTERN_C_END
378 
379 #undef __FUNCT__
380 #define __FUNCT__ "PCRedundantGetKSP"
381 /*@
382    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
383 
384    Not Collective
385 
386    Input Parameter:
387 .  pc - the preconditioner context
388 
389    Output Parameter:
390 .  innerksp - the KSP on the smaller set of processes
391 
392    Level: advanced
393 
394 .keywords: PC, redundant solve
395 @*/
396 PetscErrorCode  PCRedundantGetKSP(PC pc,KSP *innerksp)
397 {
398   PetscErrorCode ierr;
399 
400   PetscFunctionBegin;
401   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
402   PetscValidPointer(innerksp,2);
403   ierr = PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
404   PetscFunctionReturn(0);
405 }
406 
407 EXTERN_C_BEGIN
408 #undef __FUNCT__
409 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
410 PetscErrorCode  PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
411 {
412   PC_Redundant *red = (PC_Redundant*)pc->data;
413 
414   PetscFunctionBegin;
415   if (mat)  *mat  = red->pmats;
416   if (pmat) *pmat = red->pmats;
417   PetscFunctionReturn(0);
418 }
419 EXTERN_C_END
420 
421 #undef __FUNCT__
422 #define __FUNCT__ "PCRedundantGetOperators"
423 /*@
424    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
425 
426    Not Collective
427 
428    Input Parameter:
429 .  pc - the preconditioner context
430 
431    Output Parameters:
432 +  mat - the matrix
433 -  pmat - the (possibly different) preconditioner matrix
434 
435    Level: advanced
436 
437 .keywords: PC, redundant solve
438 @*/
439 PetscErrorCode  PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
440 {
441   PetscErrorCode ierr;
442 
443   PetscFunctionBegin;
444   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
445   if (mat)  PetscValidPointer(mat,2);
446   if (pmat) PetscValidPointer(pmat,3);
447   ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
448   PetscFunctionReturn(0);
449 }
450 
451 /* -------------------------------------------------------------------------------------*/
452 /*MC
453      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
454 
455      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
456 
457   Options Database:
458 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
459                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
460 
461    Level: intermediate
462 
463    Notes: The default KSP is preonly and the default PC is LU.
464 
465    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
466 
467 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
468            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
469 M*/
470 
471 EXTERN_C_BEGIN
472 #undef __FUNCT__
473 #define __FUNCT__ "PCCreate_Redundant"
474 PetscErrorCode  PCCreate_Redundant(PC pc)
475 {
476   PetscErrorCode ierr;
477   PC_Redundant   *red;
478   PetscMPIInt    size;
479 
480   PetscFunctionBegin;
481   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
482   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
483   red->nsubcomm       = size;
484   red->useparallelmat = PETSC_TRUE;
485   pc->data            = (void*)red;
486 
487   pc->ops->apply           = PCApply_Redundant;
488   pc->ops->applytranspose  = 0;
489   pc->ops->setup           = PCSetUp_Redundant;
490   pc->ops->destroy         = PCDestroy_Redundant;
491   pc->ops->setfromoptions  = PCSetFromOptions_Redundant;
492   pc->ops->view            = PCView_Redundant;
493   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
494                     PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
495   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant",
496                     PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
497   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetKSP_C","PCRedundantGetKSP_Redundant",
498                     PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
499   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
500                     PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
501   PetscFunctionReturn(0);
502 }
503 EXTERN_C_END
504