xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision fb8e56e08d4d0bfe9fc63603ca1f7fddd68abbdb)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc-private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP          ksp;
10   PC           pc;                   /* actual preconditioner used on each processor */
11   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool    useparallelmat;
16   PetscSubcomm psubcomm;
17   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
18 } PC_Redundant;
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PCView_Redundant"
22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26   PetscBool      iascii,isstring;
27   PetscViewer    subviewer;
28 
29   PetscFunctionBegin;
30   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
31   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
32   if (iascii) {
33     if (!red->psubcomm) {
34       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
35     } else {
36       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
37       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
38       if (!red->psubcomm->color) { /* only view first redundant pc */
39         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
40         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
41         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
42       }
43       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
44     }
45   } else if (isstring) {
46     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
47   }
48   PetscFunctionReturn(0);
49 }
50 
51 #undef __FUNCT__
52 #define __FUNCT__ "PCSetUp_Redundant"
53 static PetscErrorCode PCSetUp_Redundant(PC pc)
54 {
55   PC_Redundant   *red = (PC_Redundant*)pc->data;
56   PetscErrorCode ierr;
57   PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
58   PetscMPIInt    size;
59   MatReuse       reuse = MAT_INITIAL_MATRIX;
60   MatStructure   str   = DIFFERENT_NONZERO_PATTERN;
61   MPI_Comm       comm,subcomm;
62   Vec            vec;
63   PetscMPIInt    subsize,subrank;
64   const char     *prefix;
65   const PetscInt *range;
66 
67   PetscFunctionBegin;
68   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
69   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
70   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
71 
72   if (!pc->setupcalled) {
73     if (!red->psubcomm) {
74       ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
75       ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
76       ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
77       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
78 
79       /* create a new PC that processors in each subcomm have copy of */
80       subcomm = red->psubcomm->comm;
81 
82       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
83       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
84       ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
85       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
86       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
87       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
88 
89       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
90       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
91       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
92     } else subcomm = red->psubcomm->comm;
93 
94     /* create working vectors xsub/ysub and xdup/ydup */
95     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
96     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
97 
98     /* get local size of xsub/ysub */
99     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
100     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
101     ierr = MatGetOwnershipRanges(pc->pmat,&range);CHKERRQ(ierr);
102     rstart_sub = range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
103     if (subrank+1 < subsize) rend_sub = range[red->psubcomm->n*(subrank+1)];
104     else rend_sub = m;
105 
106     mloc_sub = rend_sub - rstart_sub;
107     ierr     = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
108     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
109     ierr = VecCreateMPIWithArray(subcomm,1,mloc_sub,PETSC_DECIDE,NULL,&red->xsub);CHKERRQ(ierr);
110 
111     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
112        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
113     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
114     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);
115 
116     /* create vec scatters */
117     if (!red->scatterin) {
118       IS       is1,is2;
119       PetscInt *idx1,*idx2,i,j,k;
120 
121       ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr);
122       j    = 0;
123       for (k=0; k<red->psubcomm->n; k++) {
124         for (i=mstart; i<mend; i++) {
125           idx1[j]   = i;
126           idx2[j++] = i + m*k;
127         }
128       }
129       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
130       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
131       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
132       ierr = ISDestroy(&is1);CHKERRQ(ierr);
133       ierr = ISDestroy(&is2);CHKERRQ(ierr);
134 
135       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
136       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
137       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
138       ierr = ISDestroy(&is1);CHKERRQ(ierr);
139       ierr = ISDestroy(&is2);CHKERRQ(ierr);
140       ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
141     }
142   }
143   ierr = VecDestroy(&vec);CHKERRQ(ierr);
144 
145   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
146   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
147   if (size == 1) red->useparallelmat = PETSC_FALSE;
148 
149   if (red->useparallelmat) {
150     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
151       /* destroy old matrices */
152       ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
153     } else if (pc->setupcalled == 1) {
154       reuse = MAT_REUSE_MATRIX;
155       str   = SAME_NONZERO_PATTERN;
156     }
157 
158     /* grab the parallel matrix and put it into processors of a subcomminicator */
159     /*--------------------------------------------------------------------------*/
160     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
161     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
162     /* tell PC of the subcommunicator its operators */
163     ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr);
164   } else {
165     ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
166   }
167   if (pc->setfromoptionscalled) {
168     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
169   }
170   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "PCApply_Redundant"
176 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
177 {
178   PC_Redundant   *red = (PC_Redundant*)pc->data;
179   PetscErrorCode ierr;
180   PetscScalar    *array;
181 
182   PetscFunctionBegin;
183   /* scatter x to xdup */
184   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
185   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
186 
187   /* place xdup's local array into xsub */
188   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
189   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
190 
191   /* apply preconditioner on each processor */
192   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
193   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
194   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
195 
196   /* place ysub's local array into ydup */
197   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
198   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
199 
200   /* scatter ydup to y */
201   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
202   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
203   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
204   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
205   PetscFunctionReturn(0);
206 }
207 
208 #undef __FUNCT__
209 #define __FUNCT__ "PCReset_Redundant"
210 static PetscErrorCode PCReset_Redundant(PC pc)
211 {
212   PC_Redundant   *red = (PC_Redundant*)pc->data;
213   PetscErrorCode ierr;
214 
215   PetscFunctionBegin;
216   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
217   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
218   ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
219   ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
220   ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
221   ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
222   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
223   if (red->ksp) {ierr = KSPReset(red->ksp);CHKERRQ(ierr);}
224   PetscFunctionReturn(0);
225 }
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "PCDestroy_Redundant"
229 static PetscErrorCode PCDestroy_Redundant(PC pc)
230 {
231   PC_Redundant   *red = (PC_Redundant*)pc->data;
232   PetscErrorCode ierr;
233 
234   PetscFunctionBegin;
235   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
236   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
237   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
238   ierr = PetscFree(pc->data);CHKERRQ(ierr);
239   PetscFunctionReturn(0);
240 }
241 
242 #undef __FUNCT__
243 #define __FUNCT__ "PCSetFromOptions_Redundant"
244 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
245 {
246   PetscErrorCode ierr;
247   PC_Redundant   *red = (PC_Redundant*)pc->data;
248 
249   PetscFunctionBegin;
250   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
251   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
252   ierr = PetscOptionsTail();CHKERRQ(ierr);
253   PetscFunctionReturn(0);
254 }
255 
256 #undef __FUNCT__
257 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
258 static PetscErrorCode  PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
259 {
260   PC_Redundant *red = (PC_Redundant*)pc->data;
261 
262   PetscFunctionBegin;
263   red->nsubcomm = nreds;
264   PetscFunctionReturn(0);
265 }
266 
267 #undef __FUNCT__
268 #define __FUNCT__ "PCRedundantSetNumber"
269 /*@
270    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
271 
272    Logically Collective on PC
273 
274    Input Parameters:
275 +  pc - the preconditioner context
276 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
277                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
278 
279    Level: advanced
280 
281 .keywords: PC, redundant solve
282 @*/
283 PetscErrorCode  PCRedundantSetNumber(PC pc,PetscInt nredundant)
284 {
285   PetscErrorCode ierr;
286 
287   PetscFunctionBegin;
288   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
289   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
290   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
291   PetscFunctionReturn(0);
292 }
293 
294 #undef __FUNCT__
295 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
296 static PetscErrorCode  PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
297 {
298   PC_Redundant   *red = (PC_Redundant*)pc->data;
299   PetscErrorCode ierr;
300 
301   PetscFunctionBegin;
302   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
303   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
304 
305   red->scatterin  = in;
306 
307   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
308   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
309   red->scatterout = out;
310   PetscFunctionReturn(0);
311 }
312 
313 #undef __FUNCT__
314 #define __FUNCT__ "PCRedundantSetScatter"
315 /*@
316    PCRedundantSetScatter - Sets the scatter used to copy values into the
317      redundant local solve and the scatter to move them back into the global
318      vector.
319 
320    Logically Collective on PC
321 
322    Input Parameters:
323 +  pc - the preconditioner context
324 .  in - the scatter to move the values in
325 -  out - the scatter to move them out
326 
327    Level: advanced
328 
329 .keywords: PC, redundant solve
330 @*/
331 PetscErrorCode  PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
332 {
333   PetscErrorCode ierr;
334 
335   PetscFunctionBegin;
336   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
337   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
338   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
339   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
340   PetscFunctionReturn(0);
341 }
342 
343 #undef __FUNCT__
344 #define __FUNCT__ "PCRedundantGetKSP_Redundant"
345 static PetscErrorCode  PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
346 {
347   PetscErrorCode ierr;
348   PC_Redundant   *red = (PC_Redundant*)pc->data;
349   MPI_Comm       comm,subcomm;
350   const char     *prefix;
351 
352   PetscFunctionBegin;
353   if (!red->psubcomm) {
354     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
355     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
356     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
357     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
358     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
359 
360     /* create a new PC that processors in each subcomm have copy of */
361     subcomm = red->psubcomm->comm;
362 
363     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
364     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
365     ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
366     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
367     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
368     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
369 
370     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
371     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
372     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
373   }
374   *innerksp = red->ksp;
375   PetscFunctionReturn(0);
376 }
377 
378 #undef __FUNCT__
379 #define __FUNCT__ "PCRedundantGetKSP"
380 /*@
381    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
382 
383    Not Collective
384 
385    Input Parameter:
386 .  pc - the preconditioner context
387 
388    Output Parameter:
389 .  innerksp - the KSP on the smaller set of processes
390 
391    Level: advanced
392 
393 .keywords: PC, redundant solve
394 @*/
395 PetscErrorCode  PCRedundantGetKSP(PC pc,KSP *innerksp)
396 {
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
401   PetscValidPointer(innerksp,2);
402   ierr = PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
403   PetscFunctionReturn(0);
404 }
405 
406 #undef __FUNCT__
407 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
408 static PetscErrorCode  PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
409 {
410   PC_Redundant *red = (PC_Redundant*)pc->data;
411 
412   PetscFunctionBegin;
413   if (mat)  *mat  = red->pmats;
414   if (pmat) *pmat = red->pmats;
415   PetscFunctionReturn(0);
416 }
417 
418 #undef __FUNCT__
419 #define __FUNCT__ "PCRedundantGetOperators"
420 /*@
421    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
422 
423    Not Collective
424 
425    Input Parameter:
426 .  pc - the preconditioner context
427 
428    Output Parameters:
429 +  mat - the matrix
430 -  pmat - the (possibly different) preconditioner matrix
431 
432    Level: advanced
433 
434 .keywords: PC, redundant solve
435 @*/
436 PetscErrorCode  PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
437 {
438   PetscErrorCode ierr;
439 
440   PetscFunctionBegin;
441   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
442   if (mat)  PetscValidPointer(mat,2);
443   if (pmat) PetscValidPointer(pmat,3);
444   ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
445   PetscFunctionReturn(0);
446 }
447 
448 /* -------------------------------------------------------------------------------------*/
449 /*MC
450      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
451 
452      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
453 
454   Options Database:
455 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
456                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
457 
458    Level: intermediate
459 
460    Notes: The default KSP is preonly and the default PC is LU.
461 
462    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
463 
464 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
465            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
466 M*/
467 
468 #undef __FUNCT__
469 #define __FUNCT__ "PCCreate_Redundant"
470 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
471 {
472   PetscErrorCode ierr;
473   PC_Redundant   *red;
474   PetscMPIInt    size;
475 
476   PetscFunctionBegin;
477   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
478   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
479 
480   red->nsubcomm       = size;
481   red->useparallelmat = PETSC_TRUE;
482   pc->data            = (void*)red;
483 
484   pc->ops->apply          = PCApply_Redundant;
485   pc->ops->applytranspose = 0;
486   pc->ops->setup          = PCSetUp_Redundant;
487   pc->ops->destroy        = PCDestroy_Redundant;
488   pc->ops->reset          = PCReset_Redundant;
489   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
490   pc->ops->view           = PCView_Redundant;
491 
492   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
493   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
494   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
495   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
496   PetscFunctionReturn(0);
497 }
498 
499