xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 90e035378b7ae2ceaeb17618b77f7bb7d863b669)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP                ksp;
10   PC                 pc;                   /* actual preconditioner used on each processor */
11   Vec                xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec                xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat                pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter         scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool          useparallelmat;
16   PetscSubcomm       psubcomm;
17   PetscInt           nsubcomm;             /* num of data structure PetscSubcomm */
18   PetscBool          shifttypeset;
19   MatFactorShiftType shifttype;
20 } PC_Redundant;
21 
22 PetscErrorCode  PCFactorSetShiftType_Redundant(PC pc,MatFactorShiftType shifttype)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   if (red->ksp) {
29     PC pc;
30     ierr = KSPGetPC(red->ksp,&pc);CHKERRQ(ierr);
31     ierr = PCFactorSetShiftType(pc,shifttype);CHKERRQ(ierr);
32   } else {
33     red->shifttypeset = PETSC_TRUE;
34     red->shifttype    = shifttype;
35   }
36   PetscFunctionReturn(0);
37 }
38 
39 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
40 {
41   PC_Redundant   *red = (PC_Redundant*)pc->data;
42   PetscErrorCode ierr;
43   PetscBool      iascii,isstring;
44   PetscViewer    subviewer;
45 
46   PetscFunctionBegin;
47   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
48   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
49   if (iascii) {
50     if (!red->psubcomm) {
51       ierr = PetscViewerASCIIPrintf(viewer,"  Not yet setup\n");CHKERRQ(ierr);
52     } else {
53       ierr = PetscViewerASCIIPrintf(viewer,"  First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
54       ierr = PetscViewerGetSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
55       if (!red->psubcomm->color) { /* only view first redundant pc */
56         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
57         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
58         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
59       }
60       ierr = PetscViewerRestoreSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
61     }
62   } else if (isstring) {
63     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
64   }
65   PetscFunctionReturn(0);
66 }
67 
68 #include <../src/mat/impls/aij/mpi/mpiaij.h>
69 static PetscErrorCode PCSetUp_Redundant(PC pc)
70 {
71   PC_Redundant   *red = (PC_Redundant*)pc->data;
72   PetscErrorCode ierr;
73   PetscInt       mstart,mend,mlocal,M;
74   PetscMPIInt    size;
75   MPI_Comm       comm,subcomm;
76   Vec            x;
77 
78   PetscFunctionBegin;
79   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
80 
81   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
82   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
83   if (size == 1) red->useparallelmat = PETSC_FALSE;
84 
85   if (!pc->setupcalled) {
86     PetscInt mloc_sub;
87     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
88       KSP ksp;
89       ierr = PCRedundantGetKSP(pc,&ksp);CHKERRQ(ierr);
90     }
91     subcomm = PetscSubcommChild(red->psubcomm);
92 
93     if (red->useparallelmat) {
94       /* grab the parallel matrix and put it into processors of a subcomminicator */
95       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);CHKERRQ(ierr);
96 
97       ierr = MPI_Comm_size(subcomm,&size);CHKERRQ(ierr);
98       if (size > 1) {
99         PetscBool foundpack;
100         ierr = MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_LU,&foundpack);CHKERRQ(ierr);
101         if (!foundpack) { /* reset default ksp and pc */
102           ierr = KSPSetType(red->ksp,KSPGMRES);CHKERRQ(ierr);
103           ierr = PCSetType(red->pc,PCBJACOBI);CHKERRQ(ierr);
104         } else {
105           ierr = PCFactorSetMatSolverType(red->pc,NULL);CHKERRQ(ierr);
106         }
107       }
108 
109       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
110 
111       /* get working vectors xsub and ysub */
112       ierr = MatCreateVecs(red->pmats,&red->xsub,&red->ysub);CHKERRQ(ierr);
113 
114       /* create working vectors xdup and ydup.
115        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
116        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
117        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
118       ierr = MatGetLocalSize(red->pmats,&mloc_sub,NULL);CHKERRQ(ierr);
119       ierr = VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
120       ierr = VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);
121 
122       /* create vecscatters */
123       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
124         IS       is1,is2;
125         PetscInt *idx1,*idx2,i,j,k;
126 
127         ierr = MatCreateVecs(pc->pmat,&x,0);CHKERRQ(ierr);
128         ierr = VecGetSize(x,&M);CHKERRQ(ierr);
129         ierr = VecGetOwnershipRange(x,&mstart,&mend);CHKERRQ(ierr);
130         mlocal = mend - mstart;
131         ierr = PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);CHKERRQ(ierr);
132         j    = 0;
133         for (k=0; k<red->psubcomm->n; k++) {
134           for (i=mstart; i<mend; i++) {
135             idx1[j]   = i;
136             idx2[j++] = i + M*k;
137           }
138         }
139         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
140         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
141         ierr = VecScatterCreateWithData(x,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
142         ierr = ISDestroy(&is1);CHKERRQ(ierr);
143         ierr = ISDestroy(&is2);CHKERRQ(ierr);
144 
145         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
146         ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);CHKERRQ(ierr);
147         ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
148         ierr = VecScatterCreateWithData(red->xdup,is1,x,is2,&red->scatterout);CHKERRQ(ierr);
149         ierr = ISDestroy(&is1);CHKERRQ(ierr);
150         ierr = ISDestroy(&is2);CHKERRQ(ierr);
151         ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
152         ierr = VecDestroy(&x);CHKERRQ(ierr);
153       }
154     } else { /* !red->useparallelmat */
155       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
156     }
157   } else { /* pc->setupcalled */
158     if (red->useparallelmat) {
159       MatReuse       reuse;
160       /* grab the parallel matrix and put it into processors of a subcomminicator */
161       /*--------------------------------------------------------------------------*/
162       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
163         /* destroy old matrices */
164         ierr  = MatDestroy(&red->pmats);CHKERRQ(ierr);
165         reuse = MAT_INITIAL_MATRIX;
166       } else {
167         reuse = MAT_REUSE_MATRIX;
168       }
169       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats);CHKERRQ(ierr);
170       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
171     } else { /* !red->useparallelmat */
172       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
173     }
174   }
175 
176   if (pc->setfromoptionscalled) {
177     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
178   }
179   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
180   PetscFunctionReturn(0);
181 }
182 
183 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
184 {
185   PC_Redundant   *red = (PC_Redundant*)pc->data;
186   PetscErrorCode ierr;
187   PetscScalar    *array;
188 
189   PetscFunctionBegin;
190   if (!red->useparallelmat) {
191     ierr = KSPSolve(red->ksp,x,y);CHKERRQ(ierr);
192     ierr = KSPCheckSolve(red->ksp,pc,y);CHKERRQ(ierr);
193     PetscFunctionReturn(0);
194   }
195 
196   /* scatter x to xdup */
197   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
198   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
199 
200   /* place xdup's local array into xsub */
201   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
202   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
203 
204   /* apply preconditioner on each processor */
205   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
206   ierr = KSPCheckSolve(red->ksp,pc,red->ysub);CHKERRQ(ierr);
207   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
208   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
209 
210   /* place ysub's local array into ydup */
211   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
212   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
213 
214   /* scatter ydup to y */
215   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
216   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
217   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
218   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
219   PetscFunctionReturn(0);
220 }
221 
222 static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
223 {
224   PC_Redundant   *red = (PC_Redundant*)pc->data;
225   PetscErrorCode ierr;
226   PetscScalar    *array;
227 
228   PetscFunctionBegin;
229   if (!red->useparallelmat) {
230     ierr = KSPSolveTranspose(red->ksp,x,y);CHKERRQ(ierr);
231     ierr = KSPCheckSolve(red->ksp,pc,y);CHKERRQ(ierr);
232     PetscFunctionReturn(0);
233   }
234 
235   /* scatter x to xdup */
236   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
237   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
238 
239   /* place xdup's local array into xsub */
240   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
241   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
242 
243   /* apply preconditioner on each processor */
244   ierr = KSPSolveTranspose(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
245   ierr = KSPCheckSolve(red->ksp,pc,red->ysub);CHKERRQ(ierr);
246   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
247   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
248 
249   /* place ysub's local array into ydup */
250   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
251   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
252 
253   /* scatter ydup to y */
254   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
255   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
256   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
257   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
258   PetscFunctionReturn(0);
259 }
260 
261 static PetscErrorCode PCReset_Redundant(PC pc)
262 {
263   PC_Redundant   *red = (PC_Redundant*)pc->data;
264   PetscErrorCode ierr;
265 
266   PetscFunctionBegin;
267   if (red->useparallelmat) {
268     ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
269     ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
270     ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
271     ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
272     ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
273     ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
274   }
275   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
276   ierr = KSPReset(red->ksp);CHKERRQ(ierr);
277   PetscFunctionReturn(0);
278 }
279 
280 static PetscErrorCode PCDestroy_Redundant(PC pc)
281 {
282   PC_Redundant   *red = (PC_Redundant*)pc->data;
283   PetscErrorCode ierr;
284 
285   PetscFunctionBegin;
286   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
287   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
288   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
289   ierr = PetscFree(pc->data);CHKERRQ(ierr);
290   PetscFunctionReturn(0);
291 }
292 
293 static PetscErrorCode PCSetFromOptions_Redundant(PetscOptionItems *PetscOptionsObject,PC pc)
294 {
295   PetscErrorCode ierr;
296   PC_Redundant   *red = (PC_Redundant*)pc->data;
297 
298   PetscFunctionBegin;
299   ierr = PetscOptionsHead(PetscOptionsObject,"Redundant options");CHKERRQ(ierr);
300   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
301   ierr = PetscOptionsTail();CHKERRQ(ierr);
302   PetscFunctionReturn(0);
303 }
304 
305 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
306 {
307   PC_Redundant *red = (PC_Redundant*)pc->data;
308 
309   PetscFunctionBegin;
310   red->nsubcomm = nreds;
311   PetscFunctionReturn(0);
312 }
313 
314 /*@
315    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
316 
317    Logically Collective on PC
318 
319    Input Parameters:
320 +  pc - the preconditioner context
321 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
322                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
323 
324    Level: advanced
325 
326 .keywords: PC, redundant solve
327 @*/
328 PetscErrorCode PCRedundantSetNumber(PC pc,PetscInt nredundant)
329 {
330   PetscErrorCode ierr;
331 
332   PetscFunctionBegin;
333   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
334   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
335   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
336   PetscFunctionReturn(0);
337 }
338 
339 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
340 {
341   PC_Redundant   *red = (PC_Redundant*)pc->data;
342   PetscErrorCode ierr;
343 
344   PetscFunctionBegin;
345   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
346   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
347 
348   red->scatterin  = in;
349 
350   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
351   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
352   red->scatterout = out;
353   PetscFunctionReturn(0);
354 }
355 
356 /*@
357    PCRedundantSetScatter - Sets the scatter used to copy values into the
358      redundant local solve and the scatter to move them back into the global
359      vector.
360 
361    Logically Collective on PC
362 
363    Input Parameters:
364 +  pc - the preconditioner context
365 .  in - the scatter to move the values in
366 -  out - the scatter to move them out
367 
368    Level: advanced
369 
370 .keywords: PC, redundant solve
371 @*/
372 PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
373 {
374   PetscErrorCode ierr;
375 
376   PetscFunctionBegin;
377   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
378   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
379   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
380   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
381   PetscFunctionReturn(0);
382 }
383 
384 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
385 {
386   PetscErrorCode ierr;
387   PC_Redundant   *red = (PC_Redundant*)pc->data;
388   MPI_Comm       comm,subcomm;
389   const char     *prefix;
390 
391   PetscFunctionBegin;
392   if (!red->psubcomm) {
393     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
394 
395     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
396     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
397     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
398     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
399 
400     ierr = PetscSubcommSetOptionsPrefix(red->psubcomm,prefix);CHKERRQ(ierr);
401     ierr = PetscSubcommSetFromOptions(red->psubcomm);CHKERRQ(ierr);
402     ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
403 
404     /* create a new PC that processors in each subcomm have copy of */
405     subcomm = PetscSubcommChild(red->psubcomm);
406 
407     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
408     ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr);
409     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
410     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
411     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
412     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
413     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
414     if (red->shifttypeset) {
415       ierr = PCFactorSetShiftType(red->pc,red->shifttype);CHKERRQ(ierr);
416       red->shifttypeset = PETSC_FALSE;
417     }
418     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
419     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
420   }
421   *innerksp = red->ksp;
422   PetscFunctionReturn(0);
423 }
424 
425 /*@
426    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
427 
428    Not Collective
429 
430    Input Parameter:
431 .  pc - the preconditioner context
432 
433    Output Parameter:
434 .  innerksp - the KSP on the smaller set of processes
435 
436    Level: advanced
437 
438 .keywords: PC, redundant solve
439 @*/
440 PetscErrorCode PCRedundantGetKSP(PC pc,KSP *innerksp)
441 {
442   PetscErrorCode ierr;
443 
444   PetscFunctionBegin;
445   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
446   PetscValidPointer(innerksp,2);
447   ierr = PetscUseMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
448   PetscFunctionReturn(0);
449 }
450 
451 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
452 {
453   PC_Redundant *red = (PC_Redundant*)pc->data;
454 
455   PetscFunctionBegin;
456   if (mat)  *mat  = red->pmats;
457   if (pmat) *pmat = red->pmats;
458   PetscFunctionReturn(0);
459 }
460 
461 /*@
462    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
463 
464    Not Collective
465 
466    Input Parameter:
467 .  pc - the preconditioner context
468 
469    Output Parameters:
470 +  mat - the matrix
471 -  pmat - the (possibly different) preconditioner matrix
472 
473    Level: advanced
474 
475 .keywords: PC, redundant solve
476 @*/
477 PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
478 {
479   PetscErrorCode ierr;
480 
481   PetscFunctionBegin;
482   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
483   if (mat)  PetscValidPointer(mat,2);
484   if (pmat) PetscValidPointer(pmat,3);
485   ierr = PetscUseMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
486   PetscFunctionReturn(0);
487 }
488 
489 /* -------------------------------------------------------------------------------------*/
490 /*MC
491      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
492 
493      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
494 
495   Options Database:
496 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
497                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
498 
499    Level: intermediate
500 
501    Notes:
502     The default KSP is preonly and the default PC is LU.
503 
504    PCFactorSetShiftType() applied to this PC will convey they shift type into the inner PC if it is factorization based.
505 
506    Developer Notes:
507     Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
508 
509 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
510            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
511 M*/
512 
513 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
514 {
515   PetscErrorCode ierr;
516   PC_Redundant   *red;
517   PetscMPIInt    size;
518 
519   PetscFunctionBegin;
520   ierr = PetscNewLog(pc,&red);CHKERRQ(ierr);
521   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
522 
523   red->nsubcomm       = size;
524   red->useparallelmat = PETSC_TRUE;
525   pc->data            = (void*)red;
526 
527   pc->ops->apply          = PCApply_Redundant;
528   pc->ops->applytranspose = PCApplyTranspose_Redundant;
529   pc->ops->setup          = PCSetUp_Redundant;
530   pc->ops->destroy        = PCDestroy_Redundant;
531   pc->ops->reset          = PCReset_Redundant;
532   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
533   pc->ops->view           = PCView_Redundant;
534 
535   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
536   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
537   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
538   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
539   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCFactorSetShiftType_C",PCFactorSetShiftType_Redundant);CHKERRQ(ierr);
540   PetscFunctionReturn(0);
541 }
542 
543