xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision dd2cd8d73f22f6c4ff65b606e93d00fe4b08e8e4)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
5 */
6 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
7 #include "petscksp.h"
8 
9 typedef struct {
10   KSP          ksp;
11   PC           pc;                   /* actual preconditioner used on each processor */
12   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of ((PetscObject)pc)->comm */
13   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
14   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
15   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
16   PetscTruth   useparallelmat;
17   PetscSubcomm psubcomm;
18   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
19 } PC_Redundant;
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "PCView_Redundant"
23 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
24 {
25   PC_Redundant   *red = (PC_Redundant*)pc->data;
26   PetscErrorCode ierr;
27   PetscMPIInt    rank;
28   PetscTruth     iascii,isstring;
29   PetscViewer    subviewer;
30 
31   PetscFunctionBegin;
32   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
33   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
34   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
35   if (iascii) {
36     if (!red->psubcomm) {
37       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
38     } else {
39       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
40       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
41       if (red->psubcomm->color) { /* only view first redundant pc */
42 	ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
43 	ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
44 	ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
45       }
46       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
47     }
48   } else if (isstring) {
49     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
50   } else {
51     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "PCSetUp_Redundant"
58 static PetscErrorCode PCSetUp_Redundant(PC pc)
59 {
60   PC_Redundant   *red = (PC_Redundant*)pc->data;
61   PetscErrorCode ierr;
62   PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
63   PetscMPIInt    size;
64   MatReuse       reuse = MAT_INITIAL_MATRIX;
65   MatStructure   str = DIFFERENT_NONZERO_PATTERN;
66   MPI_Comm       comm = ((PetscObject)pc)->comm,subcomm;
67   Vec            vec;
68   PetscMPIInt    subsize,subrank;
69   const char     *prefix;
70   const PetscInt *range;
71 
72   PetscFunctionBegin;
73   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
74   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
75 
76   if (!pc->setupcalled) {
77     if (!red->psubcomm) {
78       ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr);
79       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
80 
81       /* create a new PC that processors in each subcomm have copy of */
82       subcomm = red->psubcomm->comm;
83       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
84       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
85       ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
86       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
87       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
88       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
89 
90       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
91       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
92       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
93     } else {
94        subcomm = red->psubcomm->comm;
95     }
96 
97     /* create working vectors xsub/ysub and xdup/ydup */
98     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
99     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
100 
101     /* get local size of xsub/ysub */
102     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
103     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
104     ierr = MatGetOwnershipRanges(pc->pmat,&range);CHKERRQ(ierr);
105     rstart_sub = range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
106     if (subrank+1 < subsize){
107       rend_sub = range[red->psubcomm->n*(subrank+1)];
108     } else {
109       rend_sub = m;
110     }
111     mloc_sub = rend_sub - rstart_sub;
112     ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
113     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
114     ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);
115 
116     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
117        Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */
118     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
119     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
120 
121     /* create vec scatters */
122     if (!red->scatterin){
123       IS       is1,is2;
124       PetscInt *idx1,*idx2,i,j,k;
125 
126       ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr);
127       j = 0;
128       for (k=0; k<red->psubcomm->n; k++){
129         for (i=mstart; i<mend; i++){
130           idx1[j]   = i;
131           idx2[j++] = i + m*k;
132         }
133       }
134       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,&is1);CHKERRQ(ierr);
135       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,&is2);CHKERRQ(ierr);
136       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
137       ierr = ISDestroy(is1);CHKERRQ(ierr);
138       ierr = ISDestroy(is2);CHKERRQ(ierr);
139 
140       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
141       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
142       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
143       ierr = ISDestroy(is1);CHKERRQ(ierr);
144       ierr = ISDestroy(is2);CHKERRQ(ierr);
145       ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
146     }
147   }
148   ierr = VecDestroy(vec);CHKERRQ(ierr);
149 
150   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
151   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
152   if (size == 1) {
153     red->useparallelmat = PETSC_FALSE;
154   }
155 
156   if (red->useparallelmat) {
157     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
158       /* destroy old matrices */
159       if (red->pmats) {
160         ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
161       }
162     } else if (pc->setupcalled == 1) {
163       reuse = MAT_REUSE_MATRIX;
164       str   = SAME_NONZERO_PATTERN;
165     }
166 
167     /* grab the parallel matrix and put it into processors of a subcomminicator */
168     /*--------------------------------------------------------------------------*/
169     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
170     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
171     /* tell PC of the subcommunicator its operators */
172     ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr);
173   } else {
174     ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
175   }
176   if (pc->setfromoptionscalled){
177     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
178   }
179   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
180   PetscFunctionReturn(0);
181 }
182 
183 #undef __FUNCT__
184 #define __FUNCT__ "PCApply_Redundant"
185 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
186 {
187   PC_Redundant   *red = (PC_Redundant*)pc->data;
188   PetscErrorCode ierr;
189   PetscScalar    *array;
190 
191   PetscFunctionBegin;
192   /* scatter x to xdup */
193   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
194   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
195 
196   /* place xdup's local array into xsub */
197   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
198   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
199 
200   /* apply preconditioner on each processor */
201   ierr = PCApply(red->pc,red->xsub,red->ysub);CHKERRQ(ierr);
202   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
203   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
204 
205   /* place ysub's local array into ydup */
206   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
207   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
208 
209   /* scatter ydup to y */
210   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
211   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
212   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
213   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
214   PetscFunctionReturn(0);
215 }
216 
217 #undef __FUNCT__
218 #define __FUNCT__ "PCDestroy_Redundant"
219 static PetscErrorCode PCDestroy_Redundant(PC pc)
220 {
221   PC_Redundant   *red = (PC_Redundant*)pc->data;
222   PetscErrorCode ierr;
223 
224   PetscFunctionBegin;
225   if (red->scatterin)  {ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr);}
226   if (red->scatterout) {ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr);}
227   if (red->ysub)       {ierr = VecDestroy(red->ysub);CHKERRQ(ierr);}
228   if (red->xsub)       {ierr = VecDestroy(red->xsub);CHKERRQ(ierr);}
229   if (red->xdup)       {ierr = VecDestroy(red->xdup);CHKERRQ(ierr);}
230   if (red->ydup)       {ierr = VecDestroy(red->ydup);CHKERRQ(ierr);}
231   if (red->pmats) {
232     ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
233   }
234   if (red->psubcomm) {ierr = PetscSubcommDestroy(red->psubcomm);CHKERRQ(ierr);}
235   if (red->ksp) {ierr = KSPDestroy(red->ksp);CHKERRQ(ierr);}
236   ierr = PetscFree(red);CHKERRQ(ierr);
237   PetscFunctionReturn(0);
238 }
239 
240 #undef __FUNCT__
241 #define __FUNCT__ "PCSetFromOptions_Redundant"
242 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
243 {
244   PetscErrorCode ierr;
245   PC_Redundant   *red = (PC_Redundant*)pc->data;
246 
247   PetscFunctionBegin;
248   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
249   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
250   ierr = PetscOptionsTail();CHKERRQ(ierr);
251   PetscFunctionReturn(0);
252 }
253 
254 EXTERN_C_BEGIN
255 #undef __FUNCT__
256 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
257 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
258 {
259   PC_Redundant *red = (PC_Redundant*)pc->data;
260 
261   PetscFunctionBegin;
262   red->nsubcomm = nreds;
263   PetscFunctionReturn(0);
264 }
265 EXTERN_C_END
266 
267 #undef __FUNCT__
268 #define __FUNCT__ "PCRedundantSetNumber"
269 /*@
270    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
271 
272    Collective on PC
273 
274    Input Parameters:
275 +  pc - the preconditioner context
276 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
277                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
278 
279    Level: advanced
280 
281 .keywords: PC, redundant solve
282 @*/
283 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber(PC pc,PetscInt nredundant)
284 {
285   PetscErrorCode ierr,(*f)(PC,PetscInt);
286 
287   PetscFunctionBegin;
288   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
289   if (nredundant <= 0) SETERRQ1(PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
290   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetNumber_C",(void (**)(void))&f);CHKERRQ(ierr);
291   if (f) {
292     ierr = (*f)(pc,nredundant);CHKERRQ(ierr);
293   }
294   PetscFunctionReturn(0);
295 }
296 
297 EXTERN_C_BEGIN
298 #undef __FUNCT__
299 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
300 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
301 {
302   PC_Redundant   *red = (PC_Redundant*)pc->data;
303   PetscErrorCode ierr;
304 
305   PetscFunctionBegin;
306   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
307   if (red->scatterin) { ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr); }
308   red->scatterin  = in;
309   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
310   if (red->scatterout) { ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr); }
311   red->scatterout = out;
312   PetscFunctionReturn(0);
313 }
314 EXTERN_C_END
315 
316 #undef __FUNCT__
317 #define __FUNCT__ "PCRedundantSetScatter"
318 /*@
319    PCRedundantSetScatter - Sets the scatter used to copy values into the
320      redundant local solve and the scatter to move them back into the global
321      vector.
322 
323    Collective on PC
324 
325    Input Parameters:
326 +  pc - the preconditioner context
327 .  in - the scatter to move the values in
328 -  out - the scatter to move them out
329 
330    Level: advanced
331 
332 .keywords: PC, redundant solve
333 @*/
334 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
335 {
336   PetscErrorCode ierr,(*f)(PC,VecScatter,VecScatter);
337 
338   PetscFunctionBegin;
339   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
340   PetscValidHeaderSpecific(in,VEC_SCATTER_COOKIE,2);
341   PetscValidHeaderSpecific(out,VEC_SCATTER_COOKIE,3);
342   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetScatter_C",(void (**)(void))&f);CHKERRQ(ierr);
343   if (f) {
344     ierr = (*f)(pc,in,out);CHKERRQ(ierr);
345   }
346   PetscFunctionReturn(0);
347 }
348 
349 EXTERN_C_BEGIN
350 #undef __FUNCT__
351 #define __FUNCT__ "PCRedundantGetPC_Redundant"
352 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC_Redundant(PC pc,PC *innerpc)
353 {
354   PetscErrorCode ierr;
355   PC_Redundant   *red = (PC_Redundant*)pc->data;
356   MPI_Comm       comm,subcomm;
357   const char     *prefix;
358 
359   PetscFunctionBegin;
360   if (!red->psubcomm) {
361     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
362     ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr);
363     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
364 
365     /* create a new PC that processors in each subcomm have copy of */
366     subcomm = red->psubcomm->comm;
367     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
368     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
369     ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
370     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
371     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
372     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
373 
374     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
375     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
376     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
377   }
378 
379   ierr = KSPGetPC(red->ksp,innerpc);CHKERRQ(ierr);
380   PetscFunctionReturn(0);
381 }
382 EXTERN_C_END
383 
384 #undef __FUNCT__
385 #define __FUNCT__ "PCRedundantGetPC"
386 /*@
387    PCRedundantGetPC - Gets the sequential PC created by the redundant PC.
388 
389    Not Collective
390 
391    Input Parameter:
392 .  pc - the preconditioner context
393 
394    Output Parameter:
395 .  innerpc - the sequential PC
396 
397    Level: advanced
398 
399 .keywords: PC, redundant solve
400 @*/
401 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC(PC pc,PC *innerpc)
402 {
403   PetscErrorCode ierr,(*f)(PC,PC*);
404 
405   PetscFunctionBegin;
406   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
407   PetscValidPointer(innerpc,2);
408   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetPC_C",(void (**)(void))&f);CHKERRQ(ierr);
409   if (f) {
410     ierr = (*f)(pc,innerpc);CHKERRQ(ierr);
411   }
412   PetscFunctionReturn(0);
413 }
414 
415 EXTERN_C_BEGIN
416 #undef __FUNCT__
417 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
418 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
419 {
420   PC_Redundant *red = (PC_Redundant*)pc->data;
421 
422   PetscFunctionBegin;
423   if (mat)  *mat  = red->pmats;
424   if (pmat) *pmat = red->pmats;
425   PetscFunctionReturn(0);
426 }
427 EXTERN_C_END
428 
429 #undef __FUNCT__
430 #define __FUNCT__ "PCRedundantGetOperators"
431 /*@
432    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
433 
434    Not Collective
435 
436    Input Parameter:
437 .  pc - the preconditioner context
438 
439    Output Parameters:
440 +  mat - the matrix
441 -  pmat - the (possibly different) preconditioner matrix
442 
443    Level: advanced
444 
445 .keywords: PC, redundant solve
446 @*/
447 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
448 {
449   PetscErrorCode ierr,(*f)(PC,Mat*,Mat*);
450 
451   PetscFunctionBegin;
452   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
453   if (mat)  PetscValidPointer(mat,2);
454   if (pmat) PetscValidPointer(pmat,3);
455   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetOperators_C",(void (**)(void))&f);CHKERRQ(ierr);
456   if (f) {
457     ierr = (*f)(pc,mat,pmat);CHKERRQ(ierr);
458   }
459   PetscFunctionReturn(0);
460 }
461 
462 /* -------------------------------------------------------------------------------------*/
463 /*MC
464      PCREDUNDANT - Runs a preconditioner for the entire problem on subgroups of processors
465 
466      Options for the redundant preconditioners can be set with -redundant_pc_xxx
467 
468   Options Database:
469 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
470                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
471 
472    Level: intermediate
473 
474 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
475            PCRedundantGetPC(), PCRedundantGetOperators(), PCRedundantSetNumber()
476 M*/
477 
478 EXTERN_C_BEGIN
479 #undef __FUNCT__
480 #define __FUNCT__ "PCCreate_Redundant"
481 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_Redundant(PC pc)
482 {
483   PetscErrorCode ierr;
484   PC_Redundant   *red;
485   PetscMPIInt    size;
486 
487   PetscFunctionBegin;
488   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
489   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
490   red->nsubcomm       = size;
491   red->useparallelmat = PETSC_TRUE;
492   pc->data            = (void*)red;
493 
494   pc->ops->apply           = PCApply_Redundant;
495   pc->ops->applytranspose  = 0;
496   pc->ops->setup           = PCSetUp_Redundant;
497   pc->ops->destroy         = PCDestroy_Redundant;
498   pc->ops->setfromoptions  = PCSetFromOptions_Redundant;
499   pc->ops->view            = PCView_Redundant;
500   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
501                     PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
502   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant",
503                     PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
504   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetPC_C","PCRedundantGetPC_Redundant",
505                     PCRedundantGetPC_Redundant);CHKERRQ(ierr);
506   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
507                     PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
508   PetscFunctionReturn(0);
509 }
510 EXTERN_C_END
511