xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 5e71baeff2f3138f93cd4f5927dfd596eb8325cc)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP                ksp;
10   PC                 pc;                   /* actual preconditioner used on each processor */
11   Vec                xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec                xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat                pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter         scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool          useparallelmat;
16   PetscSubcomm       psubcomm;
17   PetscInt           nsubcomm;             /* num of data structure PetscSubcomm */
18   PetscBool          shifttypeset;
19   MatFactorShiftType shifttype;
20 } PC_Redundant;
21 
22 PetscErrorCode  PCFactorSetShiftType_Redundant(PC pc,MatFactorShiftType shifttype)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   if (red->ksp) {
29     PC pc;
30     ierr = KSPGetPC(red->ksp,&pc);CHKERRQ(ierr);
31     ierr = PCFactorSetShiftType(pc,shifttype);CHKERRQ(ierr);
32   } else {
33     red->shifttypeset = PETSC_TRUE;
34     red->shifttype    = shifttype;
35   }
36   PetscFunctionReturn(0);
37 }
38 
39 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
40 {
41   PC_Redundant   *red = (PC_Redundant*)pc->data;
42   PetscErrorCode ierr;
43   PetscBool      iascii,isstring;
44   PetscViewer    subviewer;
45 
46   PetscFunctionBegin;
47   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
48   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
49   if (iascii) {
50     if (!red->psubcomm) {
51       ierr = PetscViewerASCIIPrintf(viewer,"  Not yet setup\n");CHKERRQ(ierr);
52     } else {
53       ierr = PetscViewerASCIIPrintf(viewer,"  First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
54       ierr = PetscViewerGetSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
55       if (!red->psubcomm->color) { /* only view first redundant pc */
56         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
57         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
58         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
59       }
60       ierr = PetscViewerRestoreSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
61     }
62   } else if (isstring) {
63     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
64   }
65   PetscFunctionReturn(0);
66 }
67 
68 static PetscErrorCode PCSetUp_Redundant(PC pc)
69 {
70   PC_Redundant   *red = (PC_Redundant*)pc->data;
71   PetscErrorCode ierr;
72   PetscInt       mstart,mend,mlocal,M;
73   PetscMPIInt    size;
74   MPI_Comm       comm,subcomm;
75   Vec            x;
76 
77   PetscFunctionBegin;
78   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
79 
80   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
81   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
82   if (size == 1) red->useparallelmat = PETSC_FALSE;
83 
84   if (!pc->setupcalled) {
85     PetscInt mloc_sub;
86     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
87       KSP ksp;
88       ierr = PCRedundantGetKSP(pc,&ksp);CHKERRQ(ierr);
89     }
90     subcomm = PetscSubcommChild(red->psubcomm);
91 
92     if (red->useparallelmat) {
93       /* grab the parallel matrix and put it into processors of a subcomminicator */
94       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);CHKERRQ(ierr);
95 
96       ierr = MPI_Comm_size(subcomm,&size);CHKERRQ(ierr);
97       if (size > 1) {
98         PetscBool foundpack;
99         ierr = MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_LU,&foundpack);CHKERRQ(ierr);
100         if (!foundpack) { /* reset default ksp and pc */
101           ierr = KSPSetType(red->ksp,KSPGMRES);CHKERRQ(ierr);
102           ierr = PCSetType(red->pc,PCBJACOBI);CHKERRQ(ierr);
103         } else {
104           ierr = PCFactorSetMatSolverPackage(red->pc,NULL);CHKERRQ(ierr);
105         }
106       }
107 
108       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
109 
110       /* get working vectors xsub and ysub */
111       ierr = MatCreateVecs(red->pmats,&red->xsub,&red->ysub);CHKERRQ(ierr);
112 
113       /* create working vectors xdup and ydup.
114        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
115        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
116        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
117       ierr = MatGetLocalSize(red->pmats,&mloc_sub,NULL);CHKERRQ(ierr);
118       ierr = VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
119       ierr = VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);
120 
121       /* create vecscatters */
122       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
123         IS       is1,is2;
124         PetscInt *idx1,*idx2,i,j,k;
125 
126         ierr = MatCreateVecs(pc->pmat,&x,0);CHKERRQ(ierr);
127         ierr = VecGetSize(x,&M);CHKERRQ(ierr);
128         ierr = VecGetOwnershipRange(x,&mstart,&mend);CHKERRQ(ierr);
129         mlocal = mend - mstart;
130         ierr = PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);CHKERRQ(ierr);
131         j    = 0;
132         for (k=0; k<red->psubcomm->n; k++) {
133           for (i=mstart; i<mend; i++) {
134             idx1[j]   = i;
135             idx2[j++] = i + M*k;
136           }
137         }
138         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
139         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
140         ierr = VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
141         ierr = ISDestroy(&is1);CHKERRQ(ierr);
142         ierr = ISDestroy(&is2);CHKERRQ(ierr);
143 
144         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
145         ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);CHKERRQ(ierr);
146         ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
147         ierr = VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);CHKERRQ(ierr);
148         ierr = ISDestroy(&is1);CHKERRQ(ierr);
149         ierr = ISDestroy(&is2);CHKERRQ(ierr);
150         ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
151         ierr = VecDestroy(&x);CHKERRQ(ierr);
152       }
153     } else { /* !red->useparallelmat */
154       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
155     }
156   } else { /* pc->setupcalled */
157     if (red->useparallelmat) {
158       MatReuse       reuse;
159       /* grab the parallel matrix and put it into processors of a subcomminicator */
160       /*--------------------------------------------------------------------------*/
161       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
162         /* destroy old matrices */
163         ierr  = MatDestroy(&red->pmats);CHKERRQ(ierr);
164         reuse = MAT_INITIAL_MATRIX;
165       } else {
166         reuse = MAT_REUSE_MATRIX;
167       }
168       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats);CHKERRQ(ierr);
169       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
170     } else { /* !red->useparallelmat */
171       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
172     }
173   }
174 
175   if (pc->setfromoptionscalled) {
176     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
177   }
178   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
179   PetscFunctionReturn(0);
180 }
181 
182 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
183 {
184   PC_Redundant   *red = (PC_Redundant*)pc->data;
185   PetscErrorCode ierr;
186   PetscScalar    *array;
187 
188   PetscFunctionBegin;
189   if (!red->useparallelmat) {
190     ierr = KSPSolve(red->ksp,x,y);CHKERRQ(ierr);
191     PetscFunctionReturn(0);
192   }
193 
194   /* scatter x to xdup */
195   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
196   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
197 
198   /* place xdup's local array into xsub */
199   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
200   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
201 
202   /* apply preconditioner on each processor */
203   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
204   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
205   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
206 
207   /* place ysub's local array into ydup */
208   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
209   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
210 
211   /* scatter ydup to y */
212   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
213   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
214   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
215   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
216   PetscFunctionReturn(0);
217 }
218 
219 static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
220 {
221   PC_Redundant   *red = (PC_Redundant*)pc->data;
222   PetscErrorCode ierr;
223   PetscScalar    *array;
224 
225   PetscFunctionBegin;
226   if (!red->useparallelmat) {
227     ierr = KSPSolveTranspose(red->ksp,x,y);CHKERRQ(ierr);
228     PetscFunctionReturn(0);
229   }
230 
231   /* scatter x to xdup */
232   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
233   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
234 
235   /* place xdup's local array into xsub */
236   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
237   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
238 
239   /* apply preconditioner on each processor */
240   ierr = KSPSolveTranspose(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
241   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
242   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
243 
244   /* place ysub's local array into ydup */
245   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
246   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
247 
248   /* scatter ydup to y */
249   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
250   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
251   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
252   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
253   PetscFunctionReturn(0);
254 }
255 
256 static PetscErrorCode PCReset_Redundant(PC pc)
257 {
258   PC_Redundant   *red = (PC_Redundant*)pc->data;
259   PetscErrorCode ierr;
260 
261   PetscFunctionBegin;
262   if (red->useparallelmat) {
263     ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
264     ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
265     ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
266     ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
267     ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
268     ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
269   }
270   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
271   ierr = KSPReset(red->ksp);CHKERRQ(ierr);
272   PetscFunctionReturn(0);
273 }
274 
275 static PetscErrorCode PCDestroy_Redundant(PC pc)
276 {
277   PC_Redundant   *red = (PC_Redundant*)pc->data;
278   PetscErrorCode ierr;
279 
280   PetscFunctionBegin;
281   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
282   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
283   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
284   ierr = PetscFree(pc->data);CHKERRQ(ierr);
285   PetscFunctionReturn(0);
286 }
287 
288 static PetscErrorCode PCSetFromOptions_Redundant(PetscOptionItems *PetscOptionsObject,PC pc)
289 {
290   PetscErrorCode ierr;
291   PC_Redundant   *red = (PC_Redundant*)pc->data;
292 
293   PetscFunctionBegin;
294   ierr = PetscOptionsHead(PetscOptionsObject,"Redundant options");CHKERRQ(ierr);
295   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
296   ierr = PetscOptionsTail();CHKERRQ(ierr);
297   PetscFunctionReturn(0);
298 }
299 
300 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
301 {
302   PC_Redundant *red = (PC_Redundant*)pc->data;
303 
304   PetscFunctionBegin;
305   red->nsubcomm = nreds;
306   PetscFunctionReturn(0);
307 }
308 
309 /*@
310    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
311 
312    Logically Collective on PC
313 
314    Input Parameters:
315 +  pc - the preconditioner context
316 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
317                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
318 
319    Level: advanced
320 
321 .keywords: PC, redundant solve
322 @*/
323 PetscErrorCode PCRedundantSetNumber(PC pc,PetscInt nredundant)
324 {
325   PetscErrorCode ierr;
326 
327   PetscFunctionBegin;
328   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
329   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
330   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
331   PetscFunctionReturn(0);
332 }
333 
334 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
335 {
336   PC_Redundant   *red = (PC_Redundant*)pc->data;
337   PetscErrorCode ierr;
338 
339   PetscFunctionBegin;
340   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
341   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
342 
343   red->scatterin  = in;
344 
345   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
346   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
347   red->scatterout = out;
348   PetscFunctionReturn(0);
349 }
350 
351 /*@
352    PCRedundantSetScatter - Sets the scatter used to copy values into the
353      redundant local solve and the scatter to move them back into the global
354      vector.
355 
356    Logically Collective on PC
357 
358    Input Parameters:
359 +  pc - the preconditioner context
360 .  in - the scatter to move the values in
361 -  out - the scatter to move them out
362 
363    Level: advanced
364 
365 .keywords: PC, redundant solve
366 @*/
367 PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
368 {
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
373   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
374   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
375   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
376   PetscFunctionReturn(0);
377 }
378 
379 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
380 {
381   PetscErrorCode ierr;
382   PC_Redundant   *red = (PC_Redundant*)pc->data;
383   MPI_Comm       comm,subcomm;
384   const char     *prefix;
385 
386   PetscFunctionBegin;
387   if (!red->psubcomm) {
388     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
389 
390     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
391     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
392     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
393     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
394 
395     ierr = PetscSubcommSetOptionsPrefix(red->psubcomm,prefix);CHKERRQ(ierr);
396     ierr = PetscSubcommSetFromOptions(red->psubcomm);CHKERRQ(ierr);
397     ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
398 
399     /* create a new PC that processors in each subcomm have copy of */
400     subcomm = PetscSubcommChild(red->psubcomm);
401 
402     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
403     ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr);
404     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
405     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
406     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
407     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
408     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
409     if (red->shifttypeset) {
410       ierr = PCFactorSetShiftType(red->pc,red->shifttype);CHKERRQ(ierr);
411       red->shifttypeset = PETSC_FALSE;
412     }
413     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
414     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
415   }
416   *innerksp = red->ksp;
417   PetscFunctionReturn(0);
418 }
419 
420 /*@
421    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
422 
423    Not Collective
424 
425    Input Parameter:
426 .  pc - the preconditioner context
427 
428    Output Parameter:
429 .  innerksp - the KSP on the smaller set of processes
430 
431    Level: advanced
432 
433 .keywords: PC, redundant solve
434 @*/
435 PetscErrorCode PCRedundantGetKSP(PC pc,KSP *innerksp)
436 {
437   PetscErrorCode ierr;
438 
439   PetscFunctionBegin;
440   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
441   PetscValidPointer(innerksp,2);
442   ierr = PetscUseMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
443   PetscFunctionReturn(0);
444 }
445 
446 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
447 {
448   PC_Redundant *red = (PC_Redundant*)pc->data;
449 
450   PetscFunctionBegin;
451   if (mat)  *mat  = red->pmats;
452   if (pmat) *pmat = red->pmats;
453   PetscFunctionReturn(0);
454 }
455 
456 /*@
457    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
458 
459    Not Collective
460 
461    Input Parameter:
462 .  pc - the preconditioner context
463 
464    Output Parameters:
465 +  mat - the matrix
466 -  pmat - the (possibly different) preconditioner matrix
467 
468    Level: advanced
469 
470 .keywords: PC, redundant solve
471 @*/
472 PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
473 {
474   PetscErrorCode ierr;
475 
476   PetscFunctionBegin;
477   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
478   if (mat)  PetscValidPointer(mat,2);
479   if (pmat) PetscValidPointer(pmat,3);
480   ierr = PetscUseMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
481   PetscFunctionReturn(0);
482 }
483 
484 /* -------------------------------------------------------------------------------------*/
485 /*MC
486      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
487 
488      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
489 
490   Options Database:
491 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
492                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
493 
494    Level: intermediate
495 
496    Notes: The default KSP is preonly and the default PC is LU.
497 
498    PCFactorSetShiftType() applied to this PC will convey they shift type into the inner PC if it is factorization based.
499 
500    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
501 
502 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
503            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
504 M*/
505 
506 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
507 {
508   PetscErrorCode ierr;
509   PC_Redundant   *red;
510   PetscMPIInt    size;
511 
512   PetscFunctionBegin;
513   ierr = PetscNewLog(pc,&red);CHKERRQ(ierr);
514   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
515 
516   red->nsubcomm       = size;
517   red->useparallelmat = PETSC_TRUE;
518   pc->data            = (void*)red;
519 
520   pc->ops->apply          = PCApply_Redundant;
521   pc->ops->applytranspose = PCApplyTranspose_Redundant;
522   pc->ops->setup          = PCSetUp_Redundant;
523   pc->ops->destroy        = PCDestroy_Redundant;
524   pc->ops->reset          = PCReset_Redundant;
525   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
526   pc->ops->view           = PCView_Redundant;
527 
528   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
529   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
530   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
531   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
532   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCFactorSetShiftType_C",PCFactorSetShiftType_Redundant);CHKERRQ(ierr);
533   PetscFunctionReturn(0);
534 }
535 
536