xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision f3fe499b4cc4d64bf04aa4f5e4963dcc4eb56541)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
5 */
6 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
7 #include "petscksp.h"
8 
9 typedef struct {
10   KSP          ksp;
11   PC           pc;                   /* actual preconditioner used on each processor */
12   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of ((PetscObject)pc)->comm */
13   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
14   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
15   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
16   PetscTruth   useparallelmat;
17   PetscSubcomm psubcomm;
18   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
19 } PC_Redundant;
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "PCView_Redundant"
23 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
24 {
25   PC_Redundant   *red = (PC_Redundant*)pc->data;
26   PetscErrorCode ierr;
27   PetscTruth     iascii,isstring;
28   PetscViewer    subviewer;
29 
30   PetscFunctionBegin;
31   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
32   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
33   if (iascii) {
34     if (!red->psubcomm) {
35       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
36     } else {
37       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
38       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
39       if (red->psubcomm->color) { /* only view first redundant pc */
40 	ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
41 	ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
42 	ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
43       }
44       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
45     }
46   } else if (isstring) {
47     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
48   } else {
49     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 #undef __FUNCT__
55 #define __FUNCT__ "PCSetUp_Redundant"
56 static PetscErrorCode PCSetUp_Redundant(PC pc)
57 {
58   PC_Redundant   *red = (PC_Redundant*)pc->data;
59   PetscErrorCode ierr;
60   PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
61   PetscMPIInt    size;
62   MatReuse       reuse = MAT_INITIAL_MATRIX;
63   MatStructure   str = DIFFERENT_NONZERO_PATTERN;
64   MPI_Comm       comm = ((PetscObject)pc)->comm,subcomm;
65   Vec            vec;
66   PetscMPIInt    subsize,subrank;
67   const char     *prefix;
68   const PetscInt *range;
69 
70   PetscFunctionBegin;
71   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
72   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
73 
74   if (!pc->setupcalled) {
75     if (!red->psubcomm) {
76       ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr);
77       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
78 
79       /* create a new PC that processors in each subcomm have copy of */
80       subcomm = red->psubcomm->comm;
81       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
82       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
83       ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
84       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
85       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
86       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
87 
88       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
89       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
90       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
91     } else {
92        subcomm = red->psubcomm->comm;
93     }
94 
95     /* create working vectors xsub/ysub and xdup/ydup */
96     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
97     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
98 
99     /* get local size of xsub/ysub */
100     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
101     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
102     ierr = MatGetOwnershipRanges(pc->pmat,&range);CHKERRQ(ierr);
103     rstart_sub = range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
104     if (subrank+1 < subsize){
105       rend_sub = range[red->psubcomm->n*(subrank+1)];
106     } else {
107       rend_sub = m;
108     }
109     mloc_sub = rend_sub - rstart_sub;
110     ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
111     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
112     ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);
113 
114     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
115        Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */
116     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
117     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
118 
119     /* create vec scatters */
120     if (!red->scatterin){
121       IS       is1,is2;
122       PetscInt *idx1,*idx2,i,j,k;
123 
124       ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr);
125       j = 0;
126       for (k=0; k<red->psubcomm->n; k++){
127         for (i=mstart; i<mend; i++){
128           idx1[j]   = i;
129           idx2[j++] = i + m*k;
130         }
131       }
132       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,&is1);CHKERRQ(ierr);
133       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,&is2);CHKERRQ(ierr);
134       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
135       ierr = ISDestroy(is1);CHKERRQ(ierr);
136       ierr = ISDestroy(is2);CHKERRQ(ierr);
137 
138       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
139       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
140       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
141       ierr = ISDestroy(is1);CHKERRQ(ierr);
142       ierr = ISDestroy(is2);CHKERRQ(ierr);
143       ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
144     }
145   }
146   ierr = VecDestroy(vec);CHKERRQ(ierr);
147 
148   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
149   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
150   if (size == 1) {
151     red->useparallelmat = PETSC_FALSE;
152   }
153 
154   if (red->useparallelmat) {
155     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
156       /* destroy old matrices */
157       if (red->pmats) {
158         ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
159       }
160     } else if (pc->setupcalled == 1) {
161       reuse = MAT_REUSE_MATRIX;
162       str   = SAME_NONZERO_PATTERN;
163     }
164 
165     /* grab the parallel matrix and put it into processors of a subcomminicator */
166     /*--------------------------------------------------------------------------*/
167     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
168     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
169     /* tell PC of the subcommunicator its operators */
170     ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr);
171   } else {
172     ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
173   }
174   if (pc->setfromoptionscalled){
175     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
176   }
177   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
178   PetscFunctionReturn(0);
179 }
180 
181 #undef __FUNCT__
182 #define __FUNCT__ "PCApply_Redundant"
183 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
184 {
185   PC_Redundant   *red = (PC_Redundant*)pc->data;
186   PetscErrorCode ierr;
187   PetscScalar    *array;
188 
189   PetscFunctionBegin;
190   /* scatter x to xdup */
191   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
192   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
193 
194   /* place xdup's local array into xsub */
195   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
196   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
197 
198   /* apply preconditioner on each processor */
199   ierr = PCApply(red->pc,red->xsub,red->ysub);CHKERRQ(ierr);
200   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
201   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
202 
203   /* place ysub's local array into ydup */
204   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
205   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
206 
207   /* scatter ydup to y */
208   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
209   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
210   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
211   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
212   PetscFunctionReturn(0);
213 }
214 
215 #undef __FUNCT__
216 #define __FUNCT__ "PCDestroy_Redundant"
217 static PetscErrorCode PCDestroy_Redundant(PC pc)
218 {
219   PC_Redundant   *red = (PC_Redundant*)pc->data;
220   PetscErrorCode ierr;
221 
222   PetscFunctionBegin;
223   if (red->scatterin)  {ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr);}
224   if (red->scatterout) {ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr);}
225   if (red->ysub)       {ierr = VecDestroy(red->ysub);CHKERRQ(ierr);}
226   if (red->xsub)       {ierr = VecDestroy(red->xsub);CHKERRQ(ierr);}
227   if (red->xdup)       {ierr = VecDestroy(red->xdup);CHKERRQ(ierr);}
228   if (red->ydup)       {ierr = VecDestroy(red->ydup);CHKERRQ(ierr);}
229   if (red->pmats) {
230     ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
231   }
232   if (red->ksp) {ierr = KSPDestroy(red->ksp);CHKERRQ(ierr);}
233   if (red->psubcomm) {ierr = PetscSubcommDestroy(red->psubcomm);CHKERRQ(ierr);}
234   ierr = PetscFree(red);CHKERRQ(ierr);
235   PetscFunctionReturn(0);
236 }
237 
238 #undef __FUNCT__
239 #define __FUNCT__ "PCSetFromOptions_Redundant"
240 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
241 {
242   PetscErrorCode ierr;
243   PC_Redundant   *red = (PC_Redundant*)pc->data;
244 
245   PetscFunctionBegin;
246   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
247   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
248   ierr = PetscOptionsTail();CHKERRQ(ierr);
249   PetscFunctionReturn(0);
250 }
251 
252 EXTERN_C_BEGIN
253 #undef __FUNCT__
254 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
255 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
256 {
257   PC_Redundant *red = (PC_Redundant*)pc->data;
258 
259   PetscFunctionBegin;
260   red->nsubcomm = nreds;
261   PetscFunctionReturn(0);
262 }
263 EXTERN_C_END
264 
265 #undef __FUNCT__
266 #define __FUNCT__ "PCRedundantSetNumber"
267 /*@
268    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
269 
270    Logically Collective on PC
271 
272    Input Parameters:
273 +  pc - the preconditioner context
274 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
275                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
276 
277    Level: advanced
278 
279 .keywords: PC, redundant solve
280 @*/
281 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber(PC pc,PetscInt nredundant)
282 {
283   PetscErrorCode ierr,(*f)(PC,PetscInt);
284 
285   PetscFunctionBegin;
286   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
287   if (nredundant <= 0) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
288   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetNumber_C",(void (**)(void))&f);CHKERRQ(ierr);
289   if (f) {
290     ierr = (*f)(pc,nredundant);CHKERRQ(ierr);
291   }
292   PetscFunctionReturn(0);
293 }
294 
295 EXTERN_C_BEGIN
296 #undef __FUNCT__
297 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
298 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
299 {
300   PC_Redundant   *red = (PC_Redundant*)pc->data;
301   PetscErrorCode ierr;
302 
303   PetscFunctionBegin;
304   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
305   if (red->scatterin) { ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr); }
306   red->scatterin  = in;
307   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
308   if (red->scatterout) { ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr); }
309   red->scatterout = out;
310   PetscFunctionReturn(0);
311 }
312 EXTERN_C_END
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "PCRedundantSetScatter"
316 /*@
317    PCRedundantSetScatter - Sets the scatter used to copy values into the
318      redundant local solve and the scatter to move them back into the global
319      vector.
320 
321    Logically Collective on PC
322 
323    Input Parameters:
324 +  pc - the preconditioner context
325 .  in - the scatter to move the values in
326 -  out - the scatter to move them out
327 
328    Level: advanced
329 
330 .keywords: PC, redundant solve
331 @*/
332 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
333 {
334   PetscErrorCode ierr,(*f)(PC,VecScatter,VecScatter);
335 
336   PetscFunctionBegin;
337   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
338   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
339   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
340   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetScatter_C",(void (**)(void))&f);CHKERRQ(ierr);
341   if (f) {
342     ierr = (*f)(pc,in,out);CHKERRQ(ierr);
343   }
344   PetscFunctionReturn(0);
345 }
346 
347 EXTERN_C_BEGIN
348 #undef __FUNCT__
349 #define __FUNCT__ "PCRedundantGetPC_Redundant"
350 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC_Redundant(PC pc,PC *innerpc)
351 {
352   PetscErrorCode ierr;
353   PC_Redundant   *red = (PC_Redundant*)pc->data;
354   MPI_Comm       comm,subcomm;
355   const char     *prefix;
356 
357   PetscFunctionBegin;
358   if (!red->psubcomm) {
359     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
360     ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr);
361     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
362 
363     /* create a new PC that processors in each subcomm have copy of */
364     subcomm = red->psubcomm->comm;
365     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
366     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
367     ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
368     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
369     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
370     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
371 
372     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
373     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
374     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
375   }
376 
377   ierr = KSPGetPC(red->ksp,innerpc);CHKERRQ(ierr);
378   PetscFunctionReturn(0);
379 }
380 EXTERN_C_END
381 
382 #undef __FUNCT__
383 #define __FUNCT__ "PCRedundantGetPC"
384 /*@
385    PCRedundantGetPC - Gets the sequential PC created by the redundant PC.
386 
387    Not Collective
388 
389    Input Parameter:
390 .  pc - the preconditioner context
391 
392    Output Parameter:
393 .  innerpc - the sequential PC
394 
395    Level: advanced
396 
397 .keywords: PC, redundant solve
398 @*/
399 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC(PC pc,PC *innerpc)
400 {
401   PetscErrorCode ierr,(*f)(PC,PC*);
402 
403   PetscFunctionBegin;
404   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
405   PetscValidPointer(innerpc,2);
406   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetPC_C",(void (**)(void))&f);CHKERRQ(ierr);
407   if (f) {
408     ierr = (*f)(pc,innerpc);CHKERRQ(ierr);
409   }
410   PetscFunctionReturn(0);
411 }
412 
413 EXTERN_C_BEGIN
414 #undef __FUNCT__
415 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
416 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
417 {
418   PC_Redundant *red = (PC_Redundant*)pc->data;
419 
420   PetscFunctionBegin;
421   if (mat)  *mat  = red->pmats;
422   if (pmat) *pmat = red->pmats;
423   PetscFunctionReturn(0);
424 }
425 EXTERN_C_END
426 
427 #undef __FUNCT__
428 #define __FUNCT__ "PCRedundantGetOperators"
429 /*@
430    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
431 
432    Not Collective
433 
434    Input Parameter:
435 .  pc - the preconditioner context
436 
437    Output Parameters:
438 +  mat - the matrix
439 -  pmat - the (possibly different) preconditioner matrix
440 
441    Level: advanced
442 
443 .keywords: PC, redundant solve
444 @*/
445 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
446 {
447   PetscErrorCode ierr,(*f)(PC,Mat*,Mat*);
448 
449   PetscFunctionBegin;
450   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
451   if (mat)  PetscValidPointer(mat,2);
452   if (pmat) PetscValidPointer(pmat,3);
453   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetOperators_C",(void (**)(void))&f);CHKERRQ(ierr);
454   if (f) {
455     ierr = (*f)(pc,mat,pmat);CHKERRQ(ierr);
456   }
457   PetscFunctionReturn(0);
458 }
459 
460 /* -------------------------------------------------------------------------------------*/
461 /*MC
462      PCREDUNDANT - Runs a preconditioner for the entire problem on subgroups of processors
463 
464      Options for the redundant preconditioners can be set with -redundant_pc_xxx
465 
466   Options Database:
467 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
468                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
469 
470    Level: intermediate
471 
472 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
473            PCRedundantGetPC(), PCRedundantGetOperators(), PCRedundantSetNumber()
474 M*/
475 
476 EXTERN_C_BEGIN
477 #undef __FUNCT__
478 #define __FUNCT__ "PCCreate_Redundant"
479 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_Redundant(PC pc)
480 {
481   PetscErrorCode ierr;
482   PC_Redundant   *red;
483   PetscMPIInt    size;
484 
485   PetscFunctionBegin;
486   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
487   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
488   red->nsubcomm       = size;
489   red->useparallelmat = PETSC_TRUE;
490   pc->data            = (void*)red;
491 
492   pc->ops->apply           = PCApply_Redundant;
493   pc->ops->applytranspose  = 0;
494   pc->ops->setup           = PCSetUp_Redundant;
495   pc->ops->destroy         = PCDestroy_Redundant;
496   pc->ops->setfromoptions  = PCSetFromOptions_Redundant;
497   pc->ops->view            = PCView_Redundant;
498   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
499                     PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
500   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant",
501                     PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
502   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetPC_C","PCRedundantGetPC_Redundant",
503                     PCRedundantGetPC_Redundant);CHKERRQ(ierr);
504   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
505                     PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
506   PetscFunctionReturn(0);
507 }
508 EXTERN_C_END
509