xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 7b6bb2c608b6fc6714ef38fda02c2dbb91c82665)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <private/pcimpl.h>     /*I "petscpc.h" I*/
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP          ksp;
10   PC           pc;                   /* actual preconditioner used on each processor */
11   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of ((PetscObject)pc)->comm */
12   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool    useparallelmat;
16   PetscSubcomm psubcomm;
17   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
18 } PC_Redundant;
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PCView_Redundant"
22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26   PetscBool      iascii,isstring;
27   PetscViewer    subviewer;
28 
29   PetscFunctionBegin;
30   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
31   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
32   if (iascii) {
33     if (!red->psubcomm) {
34       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
35     } else {
36       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
37       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
38       if (!red->psubcomm->color) { /* only view first redundant pc */
39 	ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
40 	ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
41 	ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
42       }
43       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
44     }
45   } else if (isstring) {
46     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
47   } else {
48     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
49   }
50   PetscFunctionReturn(0);
51 }
52 
53 #undef __FUNCT__
54 #define __FUNCT__ "PCSetUp_Redundant"
55 static PetscErrorCode PCSetUp_Redundant(PC pc)
56 {
57   PC_Redundant   *red = (PC_Redundant*)pc->data;
58   PetscErrorCode ierr;
59   PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
60   PetscMPIInt    size;
61   MatReuse       reuse = MAT_INITIAL_MATRIX;
62   MatStructure   str = DIFFERENT_NONZERO_PATTERN;
63   MPI_Comm       comm = ((PetscObject)pc)->comm,subcomm;
64   Vec            vec;
65   PetscMPIInt    subsize,subrank;
66   const char     *prefix;
67   const PetscInt *range;
68 
69   PetscFunctionBegin;
70   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
71   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
72 
73   if (!pc->setupcalled) {
74     if (!red->psubcomm) {
75       ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
76       ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
77       ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
78       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
79 
80       /* create a new PC that processors in each subcomm have copy of */
81       subcomm = red->psubcomm->comm;
82       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
83       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
84       ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
85       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
86       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
87       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
88 
89       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
90       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
91       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
92     } else {
93        subcomm = red->psubcomm->comm;
94     }
95 
96     /* create working vectors xsub/ysub and xdup/ydup */
97     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
98     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
99 
100     /* get local size of xsub/ysub */
101     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
102     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
103     ierr = MatGetOwnershipRanges(pc->pmat,&range);CHKERRQ(ierr);
104     rstart_sub = range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
105     if (subrank+1 < subsize){
106       rend_sub = range[red->psubcomm->n*(subrank+1)];
107     } else {
108       rend_sub = m;
109     }
110     mloc_sub = rend_sub - rstart_sub;
111     ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
112     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
113     ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);
114 
115     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
116        Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */
117     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
118     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
119 
120     /* create vec scatters */
121     if (!red->scatterin){
122       IS       is1,is2;
123       PetscInt *idx1,*idx2,i,j,k;
124 
125       ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr);
126       j = 0;
127       for (k=0; k<red->psubcomm->n; k++){
128         for (i=mstart; i<mend; i++){
129           idx1[j]   = i;
130           idx2[j++] = i + m*k;
131         }
132       }
133       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
134       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
135       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
136       ierr = ISDestroy(&is1);CHKERRQ(ierr);
137       ierr = ISDestroy(&is2);CHKERRQ(ierr);
138 
139       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
140       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
141       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
142       ierr = ISDestroy(&is1);CHKERRQ(ierr);
143       ierr = ISDestroy(&is2);CHKERRQ(ierr);
144       ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
145     }
146   }
147   ierr = VecDestroy(&vec);CHKERRQ(ierr);
148 
149   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
150   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
151   if (size == 1) {
152     red->useparallelmat = PETSC_FALSE;
153   }
154 
155   if (red->useparallelmat) {
156     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
157       /* destroy old matrices */
158       ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
159     } else if (pc->setupcalled == 1) {
160       reuse = MAT_REUSE_MATRIX;
161       str   = SAME_NONZERO_PATTERN;
162     }
163 
164     /* grab the parallel matrix and put it into processors of a subcomminicator */
165     /*--------------------------------------------------------------------------*/
166     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
167     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
168     /* tell PC of the subcommunicator its operators */
169     ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr);
170   } else {
171     ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
172   }
173   if (pc->setfromoptionscalled){
174     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
175   }
176   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
177   PetscFunctionReturn(0);
178 }
179 
180 #undef __FUNCT__
181 #define __FUNCT__ "PCApply_Redundant"
182 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
183 {
184   PC_Redundant   *red = (PC_Redundant*)pc->data;
185   PetscErrorCode ierr;
186   PetscScalar    *array;
187 
188   PetscFunctionBegin;
189   /* scatter x to xdup */
190   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
191   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
192 
193   /* place xdup's local array into xsub */
194   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
195   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
196 
197   /* apply preconditioner on each processor */
198   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
199   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
200   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
201 
202   /* place ysub's local array into ydup */
203   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
204   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
205 
206   /* scatter ydup to y */
207   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
208   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
209   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
210   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
211   PetscFunctionReturn(0);
212 }
213 
214 #undef __FUNCT__
215 #define __FUNCT__ "PCDestroy_Redundant"
216 static PetscErrorCode PCDestroy_Redundant(PC pc)
217 {
218   PC_Redundant   *red = (PC_Redundant*)pc->data;
219   PetscErrorCode ierr;
220 
221   PetscFunctionBegin;
222   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
223   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
224   ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
225   ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
226   ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
227   ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
228   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
229   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
230   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
231   ierr = PetscFree(pc->data);CHKERRQ(ierr);
232   PetscFunctionReturn(0);
233 }
234 
235 #undef __FUNCT__
236 #define __FUNCT__ "PCSetFromOptions_Redundant"
237 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
238 {
239   PetscErrorCode ierr;
240   PC_Redundant   *red = (PC_Redundant*)pc->data;
241 
242   PetscFunctionBegin;
243   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
244   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
245   ierr = PetscOptionsTail();CHKERRQ(ierr);
246   PetscFunctionReturn(0);
247 }
248 
249 EXTERN_C_BEGIN
250 #undef __FUNCT__
251 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
252 PetscErrorCode  PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
253 {
254   PC_Redundant *red = (PC_Redundant*)pc->data;
255 
256   PetscFunctionBegin;
257   red->nsubcomm = nreds;
258   PetscFunctionReturn(0);
259 }
260 EXTERN_C_END
261 
262 #undef __FUNCT__
263 #define __FUNCT__ "PCRedundantSetNumber"
264 /*@
265    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
266 
267    Logically Collective on PC
268 
269    Input Parameters:
270 +  pc - the preconditioner context
271 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
272                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
273 
274    Level: advanced
275 
276 .keywords: PC, redundant solve
277 @*/
278 PetscErrorCode  PCRedundantSetNumber(PC pc,PetscInt nredundant)
279 {
280   PetscErrorCode ierr;
281 
282   PetscFunctionBegin;
283   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
284   if (nredundant <= 0) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
285   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
286   PetscFunctionReturn(0);
287 }
288 
289 EXTERN_C_BEGIN
290 #undef __FUNCT__
291 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
292 PetscErrorCode  PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
293 {
294   PC_Redundant   *red = (PC_Redundant*)pc->data;
295   PetscErrorCode ierr;
296 
297   PetscFunctionBegin;
298   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
299   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
300   red->scatterin  = in;
301   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
302   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
303   red->scatterout = out;
304   PetscFunctionReturn(0);
305 }
306 EXTERN_C_END
307 
308 #undef __FUNCT__
309 #define __FUNCT__ "PCRedundantSetScatter"
310 /*@
311    PCRedundantSetScatter - Sets the scatter used to copy values into the
312      redundant local solve and the scatter to move them back into the global
313      vector.
314 
315    Logically Collective on PC
316 
317    Input Parameters:
318 +  pc - the preconditioner context
319 .  in - the scatter to move the values in
320 -  out - the scatter to move them out
321 
322    Level: advanced
323 
324 .keywords: PC, redundant solve
325 @*/
326 PetscErrorCode  PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
327 {
328   PetscErrorCode ierr;
329 
330   PetscFunctionBegin;
331   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
332   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
333   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
334   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
335   PetscFunctionReturn(0);
336 }
337 
338 EXTERN_C_BEGIN
339 #undef __FUNCT__
340 #define __FUNCT__ "PCRedundantGetKSP_Redundant"
341 PetscErrorCode  PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
342 {
343   PetscErrorCode ierr;
344   PC_Redundant   *red = (PC_Redundant*)pc->data;
345   MPI_Comm       comm,subcomm;
346   const char     *prefix;
347 
348   PetscFunctionBegin;
349   if (!red->psubcomm) {
350     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
351     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
352     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
353     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
354     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
355 
356     /* create a new PC that processors in each subcomm have copy of */
357     subcomm = red->psubcomm->comm;
358     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
359     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
360     ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
361     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
362     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
363     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
364 
365     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
366     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
367     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
368   }
369   *innerksp = red->ksp;
370   PetscFunctionReturn(0);
371 }
372 EXTERN_C_END
373 
374 #undef __FUNCT__
375 #define __FUNCT__ "PCRedundantGetKSP"
376 /*@
377    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
378 
379    Not Collective
380 
381    Input Parameter:
382 .  pc - the preconditioner context
383 
384    Output Parameter:
385 .  innerksp - the KSP on the smaller set of processes
386 
387    Level: advanced
388 
389 .keywords: PC, redundant solve
390 @*/
391 PetscErrorCode  PCRedundantGetKSP(PC pc,KSP *innerksp)
392 {
393   PetscErrorCode ierr;
394 
395   PetscFunctionBegin;
396   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
397   PetscValidPointer(innerksp,2);
398   ierr = PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
399   PetscFunctionReturn(0);
400 }
401 
402 EXTERN_C_BEGIN
403 #undef __FUNCT__
404 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
405 PetscErrorCode  PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
406 {
407   PC_Redundant *red = (PC_Redundant*)pc->data;
408 
409   PetscFunctionBegin;
410   if (mat)  *mat  = red->pmats;
411   if (pmat) *pmat = red->pmats;
412   PetscFunctionReturn(0);
413 }
414 EXTERN_C_END
415 
416 #undef __FUNCT__
417 #define __FUNCT__ "PCRedundantGetOperators"
418 /*@
419    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
420 
421    Not Collective
422 
423    Input Parameter:
424 .  pc - the preconditioner context
425 
426    Output Parameters:
427 +  mat - the matrix
428 -  pmat - the (possibly different) preconditioner matrix
429 
430    Level: advanced
431 
432 .keywords: PC, redundant solve
433 @*/
434 PetscErrorCode  PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
435 {
436   PetscErrorCode ierr;
437 
438   PetscFunctionBegin;
439   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
440   if (mat)  PetscValidPointer(mat,2);
441   if (pmat) PetscValidPointer(pmat,3);
442   ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
443   PetscFunctionReturn(0);
444 }
445 
446 /* -------------------------------------------------------------------------------------*/
447 /*MC
448      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
449 
450      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
451 
452   Options Database:
453 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
454                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
455 
456    Level: intermediate
457 
458    Notes: The default KSP is preonly and the default PC is LU.
459 
460    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
461 
462 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
463            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
464 M*/
465 
466 EXTERN_C_BEGIN
467 #undef __FUNCT__
468 #define __FUNCT__ "PCCreate_Redundant"
469 PetscErrorCode  PCCreate_Redundant(PC pc)
470 {
471   PetscErrorCode ierr;
472   PC_Redundant   *red;
473   PetscMPIInt    size;
474 
475   PetscFunctionBegin;
476   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
477   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
478   red->nsubcomm       = size;
479   red->useparallelmat = PETSC_TRUE;
480   pc->data            = (void*)red;
481 
482   pc->ops->apply           = PCApply_Redundant;
483   pc->ops->applytranspose  = 0;
484   pc->ops->setup           = PCSetUp_Redundant;
485   pc->ops->destroy         = PCDestroy_Redundant;
486   pc->ops->setfromoptions  = PCSetFromOptions_Redundant;
487   pc->ops->view            = PCView_Redundant;
488   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
489                     PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
490   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant",
491                     PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
492   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetKSP_C","PCRedundantGetKSP_Redundant",
493                     PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
494   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
495                     PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
496   PetscFunctionReturn(0);
497 }
498 EXTERN_C_END
499