xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision bf5f66d4dc657cf31c357d0abdcb5ee2f8e34b2a)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
5 */
6 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
7 #include "petscksp.h"
8 
9 typedef struct {
10   KSP          ksp;
11   PC           pc;                   /* actual preconditioner used on each processor */
12   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of ((PetscObject)pc)->comm */
13   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
14   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
15   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
16   PetscBool    useparallelmat;
17   PetscSubcomm psubcomm;
18   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
19 } PC_Redundant;
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "PCView_Redundant"
23 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
24 {
25   PC_Redundant   *red = (PC_Redundant*)pc->data;
26   PetscErrorCode ierr;
27   PetscBool      iascii,isstring;
28   PetscViewer    subviewer;
29 
30   PetscFunctionBegin;
31   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
32   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
33   if (iascii) {
34     if (!red->psubcomm) {
35       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
36     } else {
37       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
38       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
39       if (!red->psubcomm->color) { /* only view first redundant pc */
40 	ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
41 	ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
42 	ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
43       }
44       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
45     }
46   } else if (isstring) {
47     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
48   } else {
49     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 #undef __FUNCT__
55 #define __FUNCT__ "PCSetUp_Redundant"
56 static PetscErrorCode PCSetUp_Redundant(PC pc)
57 {
58   PC_Redundant   *red = (PC_Redundant*)pc->data;
59   PetscErrorCode ierr;
60   PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
61   PetscMPIInt    size;
62   MatReuse       reuse = MAT_INITIAL_MATRIX;
63   MatStructure   str = DIFFERENT_NONZERO_PATTERN;
64   MPI_Comm       comm = ((PetscObject)pc)->comm,subcomm;
65   Vec            vec;
66   PetscMPIInt    subsize,subrank;
67   const char     *prefix;
68   const PetscInt *range;
69 
70   PetscFunctionBegin;
71   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
72   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
73 
74   if (!pc->setupcalled) {
75     if (!red->psubcomm) {
76       ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
77       ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
78       ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
79       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
80 
81       /* create a new PC that processors in each subcomm have copy of */
82       subcomm = red->psubcomm->comm;
83       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
84       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
85       ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
86       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
87       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
88       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
89 
90       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
91       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
92       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
93     } else {
94        subcomm = red->psubcomm->comm;
95     }
96 
97     /* create working vectors xsub/ysub and xdup/ydup */
98     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
99     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
100 
101     /* get local size of xsub/ysub */
102     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
103     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
104     ierr = MatGetOwnershipRanges(pc->pmat,&range);CHKERRQ(ierr);
105     rstart_sub = range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
106     if (subrank+1 < subsize){
107       rend_sub = range[red->psubcomm->n*(subrank+1)];
108     } else {
109       rend_sub = m;
110     }
111     mloc_sub = rend_sub - rstart_sub;
112     ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
113     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
114     ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);
115 
116     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
117        Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */
118     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
119     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
120 
121     /* create vec scatters */
122     if (!red->scatterin){
123       IS       is1,is2;
124       PetscInt *idx1,*idx2,i,j,k;
125 
126       ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr);
127       j = 0;
128       for (k=0; k<red->psubcomm->n; k++){
129         for (i=mstart; i<mend; i++){
130           idx1[j]   = i;
131           idx2[j++] = i + m*k;
132         }
133       }
134       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
135       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
136       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
137       ierr = ISDestroy(is1);CHKERRQ(ierr);
138       ierr = ISDestroy(is2);CHKERRQ(ierr);
139 
140       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
141       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
142       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
143       ierr = ISDestroy(is1);CHKERRQ(ierr);
144       ierr = ISDestroy(is2);CHKERRQ(ierr);
145       ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
146     }
147   }
148   ierr = VecDestroy(vec);CHKERRQ(ierr);
149 
150   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
151   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
152   if (size == 1) {
153     red->useparallelmat = PETSC_FALSE;
154   }
155 
156   if (red->useparallelmat) {
157     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
158       /* destroy old matrices */
159       if (red->pmats) {
160         ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
161       }
162     } else if (pc->setupcalled == 1) {
163       reuse = MAT_REUSE_MATRIX;
164       str   = SAME_NONZERO_PATTERN;
165     }
166 
167     /* grab the parallel matrix and put it into processors of a subcomminicator */
168     /*--------------------------------------------------------------------------*/
169     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
170     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
171     /* tell PC of the subcommunicator its operators */
172     ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr);
173   } else {
174     ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
175   }
176   if (pc->setfromoptionscalled){
177     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
178   }
179   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
180   PetscFunctionReturn(0);
181 }
182 
183 #undef __FUNCT__
184 #define __FUNCT__ "PCApply_Redundant"
185 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
186 {
187   PC_Redundant   *red = (PC_Redundant*)pc->data;
188   PetscErrorCode ierr;
189   PetscScalar    *array;
190 
191   PetscFunctionBegin;
192   /* scatter x to xdup */
193   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
194   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
195 
196   /* place xdup's local array into xsub */
197   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
198   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
199 
200   /* apply preconditioner on each processor */
201   ierr = PCApply(red->pc,red->xsub,red->ysub);CHKERRQ(ierr);
202   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
203   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
204 
205   /* place ysub's local array into ydup */
206   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
207   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
208 
209   /* scatter ydup to y */
210   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
211   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
212   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
213   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
214   PetscFunctionReturn(0);
215 }
216 
217 #undef __FUNCT__
218 #define __FUNCT__ "PCDestroy_Redundant"
219 static PetscErrorCode PCDestroy_Redundant(PC pc)
220 {
221   PC_Redundant   *red = (PC_Redundant*)pc->data;
222   PetscErrorCode ierr;
223 
224   PetscFunctionBegin;
225   if (red->scatterin)  {ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr);}
226   if (red->scatterout) {ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr);}
227   if (red->ysub)       {ierr = VecDestroy(red->ysub);CHKERRQ(ierr);}
228   if (red->xsub)       {ierr = VecDestroy(red->xsub);CHKERRQ(ierr);}
229   if (red->xdup)       {ierr = VecDestroy(red->xdup);CHKERRQ(ierr);}
230   if (red->ydup)       {ierr = VecDestroy(red->ydup);CHKERRQ(ierr);}
231   if (red->pmats) {
232     ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
233   }
234   if (red->ksp) {ierr = KSPDestroy(red->ksp);CHKERRQ(ierr);}
235   if (red->psubcomm) {ierr = PetscSubcommDestroy(red->psubcomm);CHKERRQ(ierr);}
236   ierr = PetscFree(red);CHKERRQ(ierr);
237   PetscFunctionReturn(0);
238 }
239 
240 #undef __FUNCT__
241 #define __FUNCT__ "PCSetFromOptions_Redundant"
242 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
243 {
244   PetscErrorCode ierr;
245   PC_Redundant   *red = (PC_Redundant*)pc->data;
246 
247   PetscFunctionBegin;
248   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
249   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
250   ierr = PetscOptionsTail();CHKERRQ(ierr);
251   PetscFunctionReturn(0);
252 }
253 
254 EXTERN_C_BEGIN
255 #undef __FUNCT__
256 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
257 PetscErrorCode  PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
258 {
259   PC_Redundant *red = (PC_Redundant*)pc->data;
260 
261   PetscFunctionBegin;
262   red->nsubcomm = nreds;
263   PetscFunctionReturn(0);
264 }
265 EXTERN_C_END
266 
267 #undef __FUNCT__
268 #define __FUNCT__ "PCRedundantSetNumber"
269 /*@
270    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
271 
272    Logically Collective on PC
273 
274    Input Parameters:
275 +  pc - the preconditioner context
276 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
277                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
278 
279    Level: advanced
280 
281 .keywords: PC, redundant solve
282 @*/
283 PetscErrorCode  PCRedundantSetNumber(PC pc,PetscInt nredundant)
284 {
285   PetscErrorCode ierr;
286 
287   PetscFunctionBegin;
288   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
289   if (nredundant <= 0) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
290   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
291   PetscFunctionReturn(0);
292 }
293 
294 EXTERN_C_BEGIN
295 #undef __FUNCT__
296 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
297 PetscErrorCode  PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
298 {
299   PC_Redundant   *red = (PC_Redundant*)pc->data;
300   PetscErrorCode ierr;
301 
302   PetscFunctionBegin;
303   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
304   if (red->scatterin) { ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr); }
305   red->scatterin  = in;
306   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
307   if (red->scatterout) { ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr); }
308   red->scatterout = out;
309   PetscFunctionReturn(0);
310 }
311 EXTERN_C_END
312 
313 #undef __FUNCT__
314 #define __FUNCT__ "PCRedundantSetScatter"
315 /*@
316    PCRedundantSetScatter - Sets the scatter used to copy values into the
317      redundant local solve and the scatter to move them back into the global
318      vector.
319 
320    Logically Collective on PC
321 
322    Input Parameters:
323 +  pc - the preconditioner context
324 .  in - the scatter to move the values in
325 -  out - the scatter to move them out
326 
327    Level: advanced
328 
329 .keywords: PC, redundant solve
330 @*/
331 PetscErrorCode  PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
332 {
333   PetscErrorCode ierr;
334 
335   PetscFunctionBegin;
336   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
337   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
338   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
339   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
340   PetscFunctionReturn(0);
341 }
342 
343 EXTERN_C_BEGIN
344 #undef __FUNCT__
345 #define __FUNCT__ "PCRedundantGetPC_Redundant"
346 PetscErrorCode  PCRedundantGetPC_Redundant(PC pc,PC *innerpc)
347 {
348   PetscErrorCode ierr;
349   PC_Redundant   *red = (PC_Redundant*)pc->data;
350   MPI_Comm       comm,subcomm;
351   const char     *prefix;
352 
353   PetscFunctionBegin;
354   if (!red->psubcomm) {
355     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
356     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
357     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
358     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
359     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
360 
361     /* create a new PC that processors in each subcomm have copy of */
362     subcomm = red->psubcomm->comm;
363     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
364     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
365     ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
366     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
367     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
368     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
369 
370     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
371     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
372     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
373   }
374 
375   ierr = KSPGetPC(red->ksp,innerpc);CHKERRQ(ierr);
376   PetscFunctionReturn(0);
377 }
378 EXTERN_C_END
379 
380 #undef __FUNCT__
381 #define __FUNCT__ "PCRedundantGetPC"
382 /*@
383    PCRedundantGetPC - Gets the sequential PC created by the redundant PC.
384 
385    Not Collective
386 
387    Input Parameter:
388 .  pc - the preconditioner context
389 
390    Output Parameter:
391 .  innerpc - the sequential PC
392 
393    Level: advanced
394 
395 .keywords: PC, redundant solve
396 @*/
397 PetscErrorCode  PCRedundantGetPC(PC pc,PC *innerpc)
398 {
399   PetscErrorCode ierr;
400 
401   PetscFunctionBegin;
402   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
403   PetscValidPointer(innerpc,2);
404   ierr = PetscTryMethod(pc,"PCRedundantGetPC_C",(PC,PC*),(pc,innerpc));CHKERRQ(ierr);
405   PetscFunctionReturn(0);
406 }
407 
408 EXTERN_C_BEGIN
409 #undef __FUNCT__
410 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
411 PetscErrorCode  PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
412 {
413   PC_Redundant *red = (PC_Redundant*)pc->data;
414 
415   PetscFunctionBegin;
416   if (mat)  *mat  = red->pmats;
417   if (pmat) *pmat = red->pmats;
418   PetscFunctionReturn(0);
419 }
420 EXTERN_C_END
421 
422 #undef __FUNCT__
423 #define __FUNCT__ "PCRedundantGetOperators"
424 /*@
425    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
426 
427    Not Collective
428 
429    Input Parameter:
430 .  pc - the preconditioner context
431 
432    Output Parameters:
433 +  mat - the matrix
434 -  pmat - the (possibly different) preconditioner matrix
435 
436    Level: advanced
437 
438 .keywords: PC, redundant solve
439 @*/
440 PetscErrorCode  PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
441 {
442   PetscErrorCode ierr;
443 
444   PetscFunctionBegin;
445   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
446   if (mat)  PetscValidPointer(mat,2);
447   if (pmat) PetscValidPointer(pmat,3);
448   ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
449   PetscFunctionReturn(0);
450 }
451 
452 /* -------------------------------------------------------------------------------------*/
453 /*MC
454      PCREDUNDANT - Runs a preconditioner for the entire problem on subgroups of processors
455 
456      Options for the redundant preconditioners can be set with -redundant_pc_xxx
457 
458   Options Database:
459 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
460                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
461 
462    Level: intermediate
463 
464    Developer Notes: Should this be a KSP? Or maybe it should be an ability of the base KSP class to
465      do redundant solves with the actual KSP and PC living on subcommunicators? Note that PCSetInitialGuessNonzero()
466      is not used by this class but likely should be.
467 
468 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
469            PCRedundantGetPC(), PCRedundantGetOperators(), PCRedundantSetNumber()
470 M*/
471 
472 EXTERN_C_BEGIN
473 #undef __FUNCT__
474 #define __FUNCT__ "PCCreate_Redundant"
475 PetscErrorCode  PCCreate_Redundant(PC pc)
476 {
477   PetscErrorCode ierr;
478   PC_Redundant   *red;
479   PetscMPIInt    size;
480 
481   PetscFunctionBegin;
482   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
483   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
484   red->nsubcomm       = size;
485   red->useparallelmat = PETSC_TRUE;
486   pc->data            = (void*)red;
487 
488   pc->ops->apply           = PCApply_Redundant;
489   pc->ops->applytranspose  = 0;
490   pc->ops->setup           = PCSetUp_Redundant;
491   pc->ops->destroy         = PCDestroy_Redundant;
492   pc->ops->setfromoptions  = PCSetFromOptions_Redundant;
493   pc->ops->view            = PCView_Redundant;
494   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
495                     PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
496   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant",
497                     PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
498   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetPC_C","PCRedundantGetPC_Redundant",
499                     PCRedundantGetPC_Redundant);CHKERRQ(ierr);
500   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
501                     PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
502   PetscFunctionReturn(0);
503 }
504 EXTERN_C_END
505