xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision cc85fe4ded5189db5e5e073ce90ef04de0003fdb)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc-private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP          ksp;
10   PC           pc;                   /* actual preconditioner used on each processor */
11   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool    useparallelmat;
16   PetscSubcomm psubcomm;
17   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
18 } PC_Redundant;
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PCView_Redundant"
22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26   PetscBool      iascii,isstring;
27   PetscViewer    subviewer;
28 
29   PetscFunctionBegin;
30   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
31   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
32   if (iascii) {
33     if (!red->psubcomm) {
34       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
35     } else {
36       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
37       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
38       if (!red->psubcomm->color) { /* only view first redundant pc */
39         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
40         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
41         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
42       }
43       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
44     }
45   } else if (isstring) {
46     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
47   }
48   PetscFunctionReturn(0);
49 }
50 
51 #undef __FUNCT__
52 #define __FUNCT__ "PCSetUp_Redundant"
53 static PetscErrorCode PCSetUp_Redundant(PC pc)
54 {
55   PC_Redundant   *red = (PC_Redundant*)pc->data;
56   PetscErrorCode ierr;
57   PetscInt       mstart,mend,mlocal,M;
58   PetscMPIInt    size;
59   MPI_Comm       comm,subcomm;
60   Vec            x;
61   const char     *prefix;
62 
63   PetscFunctionBegin;
64   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
65 
66   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
67   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
68   if (size == 1) red->useparallelmat = PETSC_FALSE;
69 
70   if (!pc->setupcalled) {
71     PetscInt mloc_sub;
72     if (!red->psubcomm) {
73       ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
74       ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
75       ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
76       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
77 
78       /* create a new PC that processors in each subcomm have copy of */
79       subcomm = red->psubcomm->comm;
80 
81       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
82       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
83       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
84       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
85       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
86       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
87 
88       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
89       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
90       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
91     } else {
92       subcomm = red->psubcomm->comm;
93     }
94 
95     if (red->useparallelmat) {
96       /* grab the parallel matrix and put it into processors of a subcomminicator */
97       ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);CHKERRQ(ierr);
98       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr);
99 
100       /* get working vectors xsub and ysub */
101       ierr = MatGetVecs(red->pmats,&red->xsub,&red->ysub);CHKERRQ(ierr);
102 
103       /* create working vectors xdup and ydup.
104        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
105        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
106        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
107       ierr = MatGetLocalSize(red->pmats,&mloc_sub,NULL);CHKERRQ(ierr);
108       ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
109       ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);
110 
111       /* create vecscatters */
112       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
113         IS       is1,is2;
114         PetscInt *idx1,*idx2,i,j,k;
115 
116         ierr = MatGetVecs(pc->pmat,&x,0);CHKERRQ(ierr);
117         ierr = VecGetSize(x,&M);CHKERRQ(ierr);
118         ierr = VecGetOwnershipRange(x,&mstart,&mend);CHKERRQ(ierr);
119         mlocal = mend - mstart;
120         ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr);
121         j    = 0;
122         for (k=0; k<red->psubcomm->n; k++) {
123           for (i=mstart; i<mend; i++) {
124             idx1[j]   = i;
125             idx2[j++] = i + M*k;
126           }
127         }
128         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
129         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
130         ierr = VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
131         ierr = ISDestroy(&is1);CHKERRQ(ierr);
132         ierr = ISDestroy(&is2);CHKERRQ(ierr);
133 
134         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
135         ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);CHKERRQ(ierr);
136         ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
137         ierr = VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);CHKERRQ(ierr);
138         ierr = ISDestroy(&is1);CHKERRQ(ierr);
139         ierr = ISDestroy(&is2);CHKERRQ(ierr);
140         ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
141         ierr = VecDestroy(&x);CHKERRQ(ierr);
142       }
143     } else { /* !red->useparallelmat */
144       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
145     }
146   } else { /* pc->setupcalled */
147     if (red->useparallelmat) {
148       MatReuse       reuse;
149       /* grab the parallel matrix and put it into processors of a subcomminicator */
150       /*--------------------------------------------------------------------------*/
151       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
152         /* destroy old matrices */
153         ierr  = MatDestroy(&red->pmats);CHKERRQ(ierr);
154         reuse = MAT_INITIAL_MATRIX;
155       } else {
156         reuse = MAT_REUSE_MATRIX;
157       }
158       ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,reuse,&red->pmats);CHKERRQ(ierr);
159       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,pc->flag);CHKERRQ(ierr);
160     } else { /* !red->useparallelmat */
161       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
162     }
163   }
164 
165   if (pc->setfromoptionscalled) {
166     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
167   }
168   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
169   PetscFunctionReturn(0);
170 }
171 
172 #undef __FUNCT__
173 #define __FUNCT__ "PCApply_Redundant"
174 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
175 {
176   PC_Redundant   *red = (PC_Redundant*)pc->data;
177   PetscErrorCode ierr;
178   PetscScalar    *array;
179 
180   PetscFunctionBegin;
181   if (!red->useparallelmat) {
182     ierr = KSPSolve(red->ksp,x,y);CHKERRQ(ierr);
183     PetscFunctionReturn(0);
184   }
185 
186   /* scatter x to xdup */
187   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
188   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
189 
190   /* place xdup's local array into xsub */
191   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
192   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
193 
194   /* apply preconditioner on each processor */
195   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
196   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
197   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
198 
199   /* place ysub's local array into ydup */
200   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
201   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
202 
203   /* scatter ydup to y */
204   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
205   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
206   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
207   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
208   PetscFunctionReturn(0);
209 }
210 
211 #undef __FUNCT__
212 #define __FUNCT__ "PCReset_Redundant"
213 static PetscErrorCode PCReset_Redundant(PC pc)
214 {
215   PC_Redundant   *red = (PC_Redundant*)pc->data;
216   PetscErrorCode ierr;
217 
218   PetscFunctionBegin;
219   if (red->useparallelmat) {
220     ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
221     ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
222     ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
223     ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
224     ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
225     ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
226   }
227   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
228   ierr = KSPReset(red->ksp);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 #undef __FUNCT__
233 #define __FUNCT__ "PCDestroy_Redundant"
234 static PetscErrorCode PCDestroy_Redundant(PC pc)
235 {
236   PC_Redundant   *red = (PC_Redundant*)pc->data;
237   PetscErrorCode ierr;
238 
239   PetscFunctionBegin;
240   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
241   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
242   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
243   ierr = PetscFree(pc->data);CHKERRQ(ierr);
244   PetscFunctionReturn(0);
245 }
246 
247 #undef __FUNCT__
248 #define __FUNCT__ "PCSetFromOptions_Redundant"
249 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
250 {
251   PetscErrorCode ierr;
252   PC_Redundant   *red = (PC_Redundant*)pc->data;
253 
254   PetscFunctionBegin;
255   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
256   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
257   ierr = PetscOptionsTail();CHKERRQ(ierr);
258   PetscFunctionReturn(0);
259 }
260 
261 #undef __FUNCT__
262 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
263 static PetscErrorCode  PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
264 {
265   PC_Redundant *red = (PC_Redundant*)pc->data;
266 
267   PetscFunctionBegin;
268   red->nsubcomm = nreds;
269   PetscFunctionReturn(0);
270 }
271 
272 #undef __FUNCT__
273 #define __FUNCT__ "PCRedundantSetNumber"
274 /*@
275    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
276 
277    Logically Collective on PC
278 
279    Input Parameters:
280 +  pc - the preconditioner context
281 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
282                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
283 
284    Level: advanced
285 
286 .keywords: PC, redundant solve
287 @*/
288 PetscErrorCode  PCRedundantSetNumber(PC pc,PetscInt nredundant)
289 {
290   PetscErrorCode ierr;
291 
292   PetscFunctionBegin;
293   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
294   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
295   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
296   PetscFunctionReturn(0);
297 }
298 
299 #undef __FUNCT__
300 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
301 static PetscErrorCode  PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
302 {
303   PC_Redundant   *red = (PC_Redundant*)pc->data;
304   PetscErrorCode ierr;
305 
306   PetscFunctionBegin;
307   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
308   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
309 
310   red->scatterin  = in;
311 
312   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
313   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
314   red->scatterout = out;
315   PetscFunctionReturn(0);
316 }
317 
318 #undef __FUNCT__
319 #define __FUNCT__ "PCRedundantSetScatter"
320 /*@
321    PCRedundantSetScatter - Sets the scatter used to copy values into the
322      redundant local solve and the scatter to move them back into the global
323      vector.
324 
325    Logically Collective on PC
326 
327    Input Parameters:
328 +  pc - the preconditioner context
329 .  in - the scatter to move the values in
330 -  out - the scatter to move them out
331 
332    Level: advanced
333 
334 .keywords: PC, redundant solve
335 @*/
336 PetscErrorCode  PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
337 {
338   PetscErrorCode ierr;
339 
340   PetscFunctionBegin;
341   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
342   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
343   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
344   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
345   PetscFunctionReturn(0);
346 }
347 
348 #undef __FUNCT__
349 #define __FUNCT__ "PCRedundantGetKSP_Redundant"
350 static PetscErrorCode  PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
351 {
352   PetscErrorCode ierr;
353   PC_Redundant   *red = (PC_Redundant*)pc->data;
354   MPI_Comm       comm,subcomm;
355   const char     *prefix;
356 
357   PetscFunctionBegin;
358   if (!red->psubcomm) {
359     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
360     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
361     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
362     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
363     ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
364 
365     /* create a new PC that processors in each subcomm have copy of */
366     subcomm = red->psubcomm->comm;
367 
368     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
369     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
370     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
371     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
372     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
373     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
374 
375     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
376     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
377     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
378   }
379   *innerksp = red->ksp;
380   PetscFunctionReturn(0);
381 }
382 
383 #undef __FUNCT__
384 #define __FUNCT__ "PCRedundantGetKSP"
385 /*@
386    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
387 
388    Not Collective
389 
390    Input Parameter:
391 .  pc - the preconditioner context
392 
393    Output Parameter:
394 .  innerksp - the KSP on the smaller set of processes
395 
396    Level: advanced
397 
398 .keywords: PC, redundant solve
399 @*/
400 PetscErrorCode  PCRedundantGetKSP(PC pc,KSP *innerksp)
401 {
402   PetscErrorCode ierr;
403 
404   PetscFunctionBegin;
405   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
406   PetscValidPointer(innerksp,2);
407   ierr = PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
408   PetscFunctionReturn(0);
409 }
410 
411 #undef __FUNCT__
412 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
413 static PetscErrorCode  PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
414 {
415   PC_Redundant *red = (PC_Redundant*)pc->data;
416 
417   PetscFunctionBegin;
418   if (mat)  *mat  = red->pmats;
419   if (pmat) *pmat = red->pmats;
420   PetscFunctionReturn(0);
421 }
422 
423 #undef __FUNCT__
424 #define __FUNCT__ "PCRedundantGetOperators"
425 /*@
426    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
427 
428    Not Collective
429 
430    Input Parameter:
431 .  pc - the preconditioner context
432 
433    Output Parameters:
434 +  mat - the matrix
435 -  pmat - the (possibly different) preconditioner matrix
436 
437    Level: advanced
438 
439 .keywords: PC, redundant solve
440 @*/
441 PetscErrorCode  PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
442 {
443   PetscErrorCode ierr;
444 
445   PetscFunctionBegin;
446   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
447   if (mat)  PetscValidPointer(mat,2);
448   if (pmat) PetscValidPointer(pmat,3);
449   ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
450   PetscFunctionReturn(0);
451 }
452 
453 /* -------------------------------------------------------------------------------------*/
454 /*MC
455      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
456 
457      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
458 
459   Options Database:
460 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
461                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
462 
463    Level: intermediate
464 
465    Notes: The default KSP is preonly and the default PC is LU.
466 
467    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
468 
469 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
470            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
471 M*/
472 
473 #undef __FUNCT__
474 #define __FUNCT__ "PCCreate_Redundant"
475 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
476 {
477   PetscErrorCode ierr;
478   PC_Redundant   *red;
479   PetscMPIInt    size;
480 
481   PetscFunctionBegin;
482   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
483   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
484 
485   red->nsubcomm       = size;
486   red->useparallelmat = PETSC_TRUE;
487   pc->data            = (void*)red;
488 
489   pc->ops->apply          = PCApply_Redundant;
490   pc->ops->applytranspose = 0;
491   pc->ops->setup          = PCSetUp_Redundant;
492   pc->ops->destroy        = PCDestroy_Redundant;
493   pc->ops->reset          = PCReset_Redundant;
494   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
495   pc->ops->view           = PCView_Redundant;
496 
497   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
498   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
499   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
500   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
501   PetscFunctionReturn(0);
502 }
503 
504