xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 3ff96bc97e95df46fbdbdf211ddbac0ba3669e66)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
5 */
6 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
7 #include "petscksp.h"
8 
9 typedef struct {
10   KSP          ksp;
11   PC           pc;                   /* actual preconditioner used on each processor */
12   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of ((PetscObject)pc)->comm */
13   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
14   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
15   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
16   PetscTruth   useparallelmat;
17   PetscSubcomm psubcomm;
18   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
19 } PC_Redundant;
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "PCView_Redundant"
23 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
24 {
25   PC_Redundant   *red = (PC_Redundant*)pc->data;
26   PetscErrorCode ierr;
27   PetscMPIInt    rank;
28   PetscTruth     iascii,isstring;
29   PetscViewer    subviewer;
30 
31   PetscFunctionBegin;
32   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
33   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
34   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
35   if (iascii) {
36     if (!red->psubcomm) {
37       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
38     } else {
39       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
40       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
41       if (red->psubcomm->color) { /* only view first redundant pc */
42 	ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
43 	ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
44 	ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
45       }
46       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
47     }
48   } else if (isstring) {
49     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
50   } else {
51     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #include "private/matimpl.h"        /*I "petscmat.h" I*/
57 #undef __FUNCT__
58 #define __FUNCT__ "PCSetUp_Redundant"
59 static PetscErrorCode PCSetUp_Redundant(PC pc)
60 {
61   PC_Redundant   *red = (PC_Redundant*)pc->data;
62   PetscErrorCode ierr;
63   PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
64   PetscMPIInt    size;
65   MatReuse       reuse = MAT_INITIAL_MATRIX;
66   MatStructure   str = DIFFERENT_NONZERO_PATTERN;
67   MPI_Comm       comm = ((PetscObject)pc)->comm,subcomm;
68   Vec            vec;
69   PetscMPIInt    subsize,subrank;
70   const char     *prefix;
71 
72   PetscFunctionBegin;
73   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
74   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
75 
76   if (!pc->setupcalled) {
77     if (!red->psubcomm) {
78       ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr);
79       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
80 
81       /* create a new PC that processors in each subcomm have copy of */
82       subcomm = red->psubcomm->comm;
83       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
84       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
85       ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
86       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
87       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
88       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
89 
90       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
91       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
92       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
93     } else {
94        subcomm = red->psubcomm->comm;
95     }
96 
97     /* create working vectors xsub/ysub and xdup/ydup */
98     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
99     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
100 
101     /* get local size of xsub/ysub */
102     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
103     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
104     rstart_sub = pc->pmat->rmap->range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
105     if (subrank+1 < subsize){
106       rend_sub = pc->pmat->rmap->range[red->psubcomm->n*(subrank+1)];
107     } else {
108       rend_sub = m;
109     }
110     mloc_sub = rend_sub - rstart_sub;
111     ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
112     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
113     ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);
114 
115     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
116        Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */
117     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
118     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
119 
120     /* create vec scatters */
121     if (!red->scatterin){
122       IS       is1,is2;
123       PetscInt *idx1,*idx2,i,j,k;
124 
125       ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr);
126       j = 0;
127       for (k=0; k<red->psubcomm->n; k++){
128         for (i=mstart; i<mend; i++){
129           idx1[j]   = i;
130           idx2[j++] = i + m*k;
131         }
132       }
133       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,&is1);CHKERRQ(ierr);
134       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,&is2);CHKERRQ(ierr);
135       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
136       ierr = ISDestroy(is1);CHKERRQ(ierr);
137       ierr = ISDestroy(is2);CHKERRQ(ierr);
138 
139       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
140       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
141       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
142       ierr = ISDestroy(is1);CHKERRQ(ierr);
143       ierr = ISDestroy(is2);CHKERRQ(ierr);
144       ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
145     }
146   }
147   ierr = VecDestroy(vec);CHKERRQ(ierr);
148 
149   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
150   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
151   if (size == 1) {
152     red->useparallelmat = PETSC_FALSE;
153   }
154 
155   if (red->useparallelmat) {
156     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
157       /* destroy old matrices */
158       if (red->pmats) {
159         ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
160       }
161     } else if (pc->setupcalled == 1) {
162       reuse = MAT_REUSE_MATRIX;
163       str   = SAME_NONZERO_PATTERN;
164     }
165 
166     /* grab the parallel matrix and put it into processors of a subcomminicator */
167     /*--------------------------------------------------------------------------*/
168     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
169     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
170     /* tell PC of the subcommunicator its operators */
171     ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr);
172   } else {
173     ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
174   }
175   if (pc->setfromoptionscalled){
176     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
177   }
178   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
179   PetscFunctionReturn(0);
180 }
181 
182 #undef __FUNCT__
183 #define __FUNCT__ "PCApply_Redundant"
184 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
185 {
186   PC_Redundant   *red = (PC_Redundant*)pc->data;
187   PetscErrorCode ierr;
188   PetscScalar    *array;
189 
190   PetscFunctionBegin;
191   /* scatter x to xdup */
192   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
193   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
194 
195   /* place xdup's local array into xsub */
196   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
197   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
198 
199   /* apply preconditioner on each processor */
200   ierr = PCApply(red->pc,red->xsub,red->ysub);CHKERRQ(ierr);
201   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
202   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
203 
204   /* place ysub's local array into ydup */
205   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
206   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
207 
208   /* scatter ydup to y */
209   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
210   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
211   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
212   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
213   PetscFunctionReturn(0);
214 }
215 
216 #undef __FUNCT__
217 #define __FUNCT__ "PCDestroy_Redundant"
218 static PetscErrorCode PCDestroy_Redundant(PC pc)
219 {
220   PC_Redundant   *red = (PC_Redundant*)pc->data;
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   if (red->scatterin)  {ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr);}
225   if (red->scatterout) {ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr);}
226   if (red->ysub)       {ierr = VecDestroy(red->ysub);CHKERRQ(ierr);}
227   if (red->xsub)       {ierr = VecDestroy(red->xsub);CHKERRQ(ierr);}
228   if (red->xdup)       {ierr = VecDestroy(red->xdup);CHKERRQ(ierr);}
229   if (red->ydup)       {ierr = VecDestroy(red->ydup);CHKERRQ(ierr);}
230   if (red->pmats) {
231     ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
232   }
233   if (red->psubcomm) {ierr = PetscSubcommDestroy(red->psubcomm);CHKERRQ(ierr);}
234   if (red->ksp) {ierr = KSPDestroy(red->ksp);CHKERRQ(ierr);}
235   ierr = PetscFree(red);CHKERRQ(ierr);
236   PetscFunctionReturn(0);
237 }
238 
239 #undef __FUNCT__
240 #define __FUNCT__ "PCSetFromOptions_Redundant"
241 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
242 {
243   PetscErrorCode ierr;
244   PC_Redundant   *red = (PC_Redundant*)pc->data;
245 
246   PetscFunctionBegin;
247   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
248   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
249   ierr = PetscOptionsTail();CHKERRQ(ierr);
250   PetscFunctionReturn(0);
251 }
252 
253 EXTERN_C_BEGIN
254 #undef __FUNCT__
255 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
256 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
257 {
258   PC_Redundant *red = (PC_Redundant*)pc->data;
259 
260   PetscFunctionBegin;
261   red->nsubcomm = nreds;
262   PetscFunctionReturn(0);
263 }
264 EXTERN_C_END
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "PCRedundantSetNumber"
268 /*@
269    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
270 
271    Collective on PC
272 
273    Input Parameters:
274 +  pc - the preconditioner context
275 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
276                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
277 
278    Level: advanced
279 
280 .keywords: PC, redundant solve
281 @*/
282 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber(PC pc,PetscInt nredundant)
283 {
284   PetscErrorCode ierr,(*f)(PC,PetscInt);
285 
286   PetscFunctionBegin;
287   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
288   if (nredundant <= 0) SETERRQ1(PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
289   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetNumber_C",(void (**)(void))&f);CHKERRQ(ierr);
290   if (f) {
291     ierr = (*f)(pc,nredundant);CHKERRQ(ierr);
292   }
293   PetscFunctionReturn(0);
294 }
295 
296 EXTERN_C_BEGIN
297 #undef __FUNCT__
298 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
299 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
300 {
301   PC_Redundant   *red = (PC_Redundant*)pc->data;
302   PetscErrorCode ierr;
303 
304   PetscFunctionBegin;
305   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
306   if (red->scatterin) { ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr); }
307   red->scatterin  = in;
308   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
309   if (red->scatterout) { ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr); }
310   red->scatterout = out;
311   PetscFunctionReturn(0);
312 }
313 EXTERN_C_END
314 
315 #undef __FUNCT__
316 #define __FUNCT__ "PCRedundantSetScatter"
317 /*@
318    PCRedundantSetScatter - Sets the scatter used to copy values into the
319      redundant local solve and the scatter to move them back into the global
320      vector.
321 
322    Collective on PC
323 
324    Input Parameters:
325 +  pc - the preconditioner context
326 .  in - the scatter to move the values in
327 -  out - the scatter to move them out
328 
329    Level: advanced
330 
331 .keywords: PC, redundant solve
332 @*/
333 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
334 {
335   PetscErrorCode ierr,(*f)(PC,VecScatter,VecScatter);
336 
337   PetscFunctionBegin;
338   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
339   PetscValidHeaderSpecific(in,VEC_SCATTER_COOKIE,2);
340   PetscValidHeaderSpecific(out,VEC_SCATTER_COOKIE,3);
341   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetScatter_C",(void (**)(void))&f);CHKERRQ(ierr);
342   if (f) {
343     ierr = (*f)(pc,in,out);CHKERRQ(ierr);
344   }
345   PetscFunctionReturn(0);
346 }
347 
348 EXTERN_C_BEGIN
349 #undef __FUNCT__
350 #define __FUNCT__ "PCRedundantGetPC_Redundant"
351 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC_Redundant(PC pc,PC *innerpc)
352 {
353   PetscErrorCode ierr;
354   PC_Redundant   *red = (PC_Redundant*)pc->data;
355   MPI_Comm       comm,subcomm;
356   const char     *prefix;
357 
358   PetscFunctionBegin;
359   if (!red->psubcomm) {
360     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
361     ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr);
362     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
363 
364     /* create a new PC that processors in each subcomm have copy of */
365     subcomm = red->psubcomm->comm;
366     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
367     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
368     ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
369     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
370     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
371     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
372 
373     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
374     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
375     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
376   }
377 
378   ierr = KSPGetPC(red->ksp,innerpc);CHKERRQ(ierr);
379   PetscFunctionReturn(0);
380 }
381 EXTERN_C_END
382 
383 #undef __FUNCT__
384 #define __FUNCT__ "PCRedundantGetPC"
385 /*@
386    PCRedundantGetPC - Gets the sequential PC created by the redundant PC.
387 
388    Not Collective
389 
390    Input Parameter:
391 .  pc - the preconditioner context
392 
393    Output Parameter:
394 .  innerpc - the sequential PC
395 
396    Level: advanced
397 
398 .keywords: PC, redundant solve
399 @*/
400 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC(PC pc,PC *innerpc)
401 {
402   PetscErrorCode ierr,(*f)(PC,PC*);
403 
404   PetscFunctionBegin;
405   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
406   PetscValidPointer(innerpc,2);
407   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetPC_C",(void (**)(void))&f);CHKERRQ(ierr);
408   if (f) {
409     ierr = (*f)(pc,innerpc);CHKERRQ(ierr);
410   }
411   PetscFunctionReturn(0);
412 }
413 
414 EXTERN_C_BEGIN
415 #undef __FUNCT__
416 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
417 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
418 {
419   PC_Redundant *red = (PC_Redundant*)pc->data;
420 
421   PetscFunctionBegin;
422   if (mat)  *mat  = red->pmats;
423   if (pmat) *pmat = red->pmats;
424   PetscFunctionReturn(0);
425 }
426 EXTERN_C_END
427 
428 #undef __FUNCT__
429 #define __FUNCT__ "PCRedundantGetOperators"
430 /*@
431    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
432 
433    Not Collective
434 
435    Input Parameter:
436 .  pc - the preconditioner context
437 
438    Output Parameters:
439 +  mat - the matrix
440 -  pmat - the (possibly different) preconditioner matrix
441 
442    Level: advanced
443 
444 .keywords: PC, redundant solve
445 @*/
446 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
447 {
448   PetscErrorCode ierr,(*f)(PC,Mat*,Mat*);
449 
450   PetscFunctionBegin;
451   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
452   if (mat)  PetscValidPointer(mat,2);
453   if (pmat) PetscValidPointer(pmat,3);
454   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetOperators_C",(void (**)(void))&f);CHKERRQ(ierr);
455   if (f) {
456     ierr = (*f)(pc,mat,pmat);CHKERRQ(ierr);
457   }
458   PetscFunctionReturn(0);
459 }
460 
461 /* -------------------------------------------------------------------------------------*/
462 /*MC
463      PCREDUNDANT - Runs a preconditioner for the entire problem on subgroups of processors
464 
465      Options for the redundant preconditioners can be set with -redundant_pc_xxx
466 
467   Options Database:
468 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
469                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
470 
471    Level: intermediate
472 
473 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
474            PCRedundantGetPC(), PCRedundantGetOperators(), PCRedundantSetNumber()
475 M*/
476 
477 EXTERN_C_BEGIN
478 #undef __FUNCT__
479 #define __FUNCT__ "PCCreate_Redundant"
480 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_Redundant(PC pc)
481 {
482   PetscErrorCode ierr;
483   PC_Redundant   *red;
484   PetscMPIInt    size;
485 
486   PetscFunctionBegin;
487   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
488   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
489   red->nsubcomm       = size;
490   red->useparallelmat = PETSC_TRUE;
491   pc->data            = (void*)red;
492 
493   pc->ops->apply           = PCApply_Redundant;
494   pc->ops->applytranspose  = 0;
495   pc->ops->setup           = PCSetUp_Redundant;
496   pc->ops->destroy         = PCDestroy_Redundant;
497   pc->ops->setfromoptions  = PCSetFromOptions_Redundant;
498   pc->ops->view            = PCView_Redundant;
499   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
500                     PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
501   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant",
502                     PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
503   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetPC_C","PCRedundantGetPC_Redundant",
504                     PCRedundantGetPC_Redundant);CHKERRQ(ierr);
505   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
506                     PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
507   PetscFunctionReturn(0);
508 }
509 EXTERN_C_END
510