xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 9c334d8fdb557fc53fd345d68cbb3545b09ccab8)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP                ksp;
10   PC                 pc;                   /* actual preconditioner used on each processor */
11   Vec                xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec                xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat                pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter         scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool          useparallelmat;
16   PetscSubcomm       psubcomm;
17   PetscInt           nsubcomm;             /* num of data structure PetscSubcomm */
18   PetscBool          shifttypeset;
19   MatFactorShiftType shifttype;
20 } PC_Redundant;
21 
22 #undef __FUNCT__
23 #define __FUNCT__ "PCFactorSetShiftType_Redundant"
24 PetscErrorCode  PCFactorSetShiftType_Redundant(PC pc,MatFactorShiftType shifttype)
25 {
26   PC_Redundant   *red = (PC_Redundant*)pc->data;
27   PetscErrorCode ierr;
28 
29   PetscFunctionBegin;
30   if (red->ksp) {
31     PC pc;
32     ierr = KSPGetPC(red->ksp,&pc);CHKERRQ(ierr);
33     ierr = PCFactorSetShiftType(pc,shifttype);CHKERRQ(ierr);
34   } else {
35     red->shifttypeset = PETSC_TRUE;
36     red->shifttype    = shifttype;
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "PCView_Redundant"
43 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
44 {
45   PC_Redundant   *red = (PC_Redundant*)pc->data;
46   PetscErrorCode ierr;
47   PetscBool      iascii,isstring;
48   PetscViewer    subviewer;
49 
50   PetscFunctionBegin;
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
52   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
53   if (iascii) {
54     if (!red->psubcomm) {
55       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
56     } else {
57       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
58       ierr = PetscViewerGetSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
59       if (!red->psubcomm->color) { /* only view first redundant pc */
60         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
61         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
62         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
63       }
64       ierr = PetscViewerRestoreSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
65     }
66   } else if (isstring) {
67     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
68   }
69   PetscFunctionReturn(0);
70 }
71 
72 #undef __FUNCT__
73 #define __FUNCT__ "PCSetUp_Redundant"
74 static PetscErrorCode PCSetUp_Redundant(PC pc)
75 {
76   PC_Redundant   *red = (PC_Redundant*)pc->data;
77   PetscErrorCode ierr;
78   PetscInt       mstart,mend,mlocal,M;
79   PetscMPIInt    size;
80   MPI_Comm       comm,subcomm;
81   Vec            x;
82 
83   PetscFunctionBegin;
84   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
85 
86   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
87   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
88   if (size == 1) red->useparallelmat = PETSC_FALSE;
89 
90   if (!pc->setupcalled) {
91     PetscInt mloc_sub;
92     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
93       KSP ksp;
94       ierr = PCRedundantGetKSP(pc,&ksp);CHKERRQ(ierr);
95     }
96     subcomm = PetscSubcommChild(red->psubcomm);
97 
98     if (red->useparallelmat) {
99       /* grab the parallel matrix and put it into processors of a subcomminicator */
100       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);CHKERRQ(ierr);
101 
102       ierr = MPI_Comm_size(subcomm,&size);CHKERRQ(ierr);
103       if (size > 1) {
104         PetscBool foundpack;
105         ierr = MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_LU,&foundpack);CHKERRQ(ierr);
106         if (!foundpack) { /* reset default ksp and pc */
107           ierr = KSPSetType(red->ksp,KSPGMRES);CHKERRQ(ierr);
108           ierr = PCSetType(red->pc,PCBJACOBI);CHKERRQ(ierr);
109         } else {
110           ierr = PCFactorSetMatSolverPackage(red->pc,NULL);CHKERRQ(ierr);
111         }
112       }
113 
114       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
115 
116       /* get working vectors xsub and ysub */
117       ierr = MatCreateVecs(red->pmats,&red->xsub,&red->ysub);CHKERRQ(ierr);
118 
119       /* create working vectors xdup and ydup.
120        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
121        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
122        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
123       ierr = MatGetLocalSize(red->pmats,&mloc_sub,NULL);CHKERRQ(ierr);
124       ierr = VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
125       ierr = VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);
126 
127       /* create vecscatters */
128       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
129         IS       is1,is2;
130         PetscInt *idx1,*idx2,i,j,k;
131 
132         ierr = MatCreateVecs(pc->pmat,&x,0);CHKERRQ(ierr);
133         ierr = VecGetSize(x,&M);CHKERRQ(ierr);
134         ierr = VecGetOwnershipRange(x,&mstart,&mend);CHKERRQ(ierr);
135         mlocal = mend - mstart;
136         ierr = PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);CHKERRQ(ierr);
137         j    = 0;
138         for (k=0; k<red->psubcomm->n; k++) {
139           for (i=mstart; i<mend; i++) {
140             idx1[j]   = i;
141             idx2[j++] = i + M*k;
142           }
143         }
144         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
145         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
146         ierr = VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
147         ierr = ISDestroy(&is1);CHKERRQ(ierr);
148         ierr = ISDestroy(&is2);CHKERRQ(ierr);
149 
150         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
151         ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);CHKERRQ(ierr);
152         ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
153         ierr = VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);CHKERRQ(ierr);
154         ierr = ISDestroy(&is1);CHKERRQ(ierr);
155         ierr = ISDestroy(&is2);CHKERRQ(ierr);
156         ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
157         ierr = VecDestroy(&x);CHKERRQ(ierr);
158       }
159     } else { /* !red->useparallelmat */
160       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
161     }
162   } else { /* pc->setupcalled */
163     if (red->useparallelmat) {
164       MatReuse       reuse;
165       /* grab the parallel matrix and put it into processors of a subcomminicator */
166       /*--------------------------------------------------------------------------*/
167       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
168         /* destroy old matrices */
169         ierr  = MatDestroy(&red->pmats);CHKERRQ(ierr);
170         reuse = MAT_INITIAL_MATRIX;
171       } else {
172         reuse = MAT_REUSE_MATRIX;
173       }
174       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats);CHKERRQ(ierr);
175       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
176     } else { /* !red->useparallelmat */
177       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
178     }
179   }
180 
181   if (pc->setfromoptionscalled) {
182     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
183   }
184   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
185   PetscFunctionReturn(0);
186 }
187 
188 #undef __FUNCT__
189 #define __FUNCT__ "PCApply_Redundant"
190 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
191 {
192   PC_Redundant   *red = (PC_Redundant*)pc->data;
193   PetscErrorCode ierr;
194   PetscScalar    *array;
195 
196   PetscFunctionBegin;
197   if (!red->useparallelmat) {
198     ierr = KSPSolve(red->ksp,x,y);CHKERRQ(ierr);
199     PetscFunctionReturn(0);
200   }
201 
202   /* scatter x to xdup */
203   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
204   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
205 
206   /* place xdup's local array into xsub */
207   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
208   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
209 
210   /* apply preconditioner on each processor */
211   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
212   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
213   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
214 
215   /* place ysub's local array into ydup */
216   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
217   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
218 
219   /* scatter ydup to y */
220   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
221   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
222   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
223   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
224   PetscFunctionReturn(0);
225 }
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "PCApplyTranspose_Redundant"
229 static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
230 {
231   PC_Redundant   *red = (PC_Redundant*)pc->data;
232   PetscErrorCode ierr;
233   PetscScalar    *array;
234 
235   PetscFunctionBegin;
236   if (!red->useparallelmat) {
237     ierr = KSPSolveTranspose(red->ksp,x,y);CHKERRQ(ierr);
238     PetscFunctionReturn(0);
239   }
240 
241   /* scatter x to xdup */
242   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
243   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
244 
245   /* place xdup's local array into xsub */
246   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
247   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
248 
249   /* apply preconditioner on each processor */
250   ierr = KSPSolveTranspose(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
251   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
252   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
253 
254   /* place ysub's local array into ydup */
255   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
256   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
257 
258   /* scatter ydup to y */
259   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
260   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
261   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
262   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
263   PetscFunctionReturn(0);
264 }
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "PCReset_Redundant"
268 static PetscErrorCode PCReset_Redundant(PC pc)
269 {
270   PC_Redundant   *red = (PC_Redundant*)pc->data;
271   PetscErrorCode ierr;
272 
273   PetscFunctionBegin;
274   if (red->useparallelmat) {
275     ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
276     ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
277     ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
278     ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
279     ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
280     ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
281   }
282   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
283   ierr = KSPReset(red->ksp);CHKERRQ(ierr);
284   PetscFunctionReturn(0);
285 }
286 
287 #undef __FUNCT__
288 #define __FUNCT__ "PCDestroy_Redundant"
289 static PetscErrorCode PCDestroy_Redundant(PC pc)
290 {
291   PC_Redundant   *red = (PC_Redundant*)pc->data;
292   PetscErrorCode ierr;
293 
294   PetscFunctionBegin;
295   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
296   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
297   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
298   ierr = PetscFree(pc->data);CHKERRQ(ierr);
299   PetscFunctionReturn(0);
300 }
301 
302 #undef __FUNCT__
303 #define __FUNCT__ "PCSetFromOptions_Redundant"
304 static PetscErrorCode PCSetFromOptions_Redundant(PetscOptionItems *PetscOptionsObject,PC pc)
305 {
306   PetscErrorCode ierr;
307   PC_Redundant   *red = (PC_Redundant*)pc->data;
308 
309   PetscFunctionBegin;
310   ierr = PetscOptionsHead(PetscOptionsObject,"Redundant options");CHKERRQ(ierr);
311   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
312   ierr = PetscOptionsTail();CHKERRQ(ierr);
313   PetscFunctionReturn(0);
314 }
315 
316 #undef __FUNCT__
317 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
318 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
319 {
320   PC_Redundant *red = (PC_Redundant*)pc->data;
321 
322   PetscFunctionBegin;
323   red->nsubcomm = nreds;
324   PetscFunctionReturn(0);
325 }
326 
327 #undef __FUNCT__
328 #define __FUNCT__ "PCRedundantSetNumber"
329 /*@
330    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
331 
332    Logically Collective on PC
333 
334    Input Parameters:
335 +  pc - the preconditioner context
336 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
337                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
338 
339    Level: advanced
340 
341 .keywords: PC, redundant solve
342 @*/
343 PetscErrorCode PCRedundantSetNumber(PC pc,PetscInt nredundant)
344 {
345   PetscErrorCode ierr;
346 
347   PetscFunctionBegin;
348   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
349   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
350   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
351   PetscFunctionReturn(0);
352 }
353 
354 #undef __FUNCT__
355 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
356 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
357 {
358   PC_Redundant   *red = (PC_Redundant*)pc->data;
359   PetscErrorCode ierr;
360 
361   PetscFunctionBegin;
362   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
363   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
364 
365   red->scatterin  = in;
366 
367   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
368   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
369   red->scatterout = out;
370   PetscFunctionReturn(0);
371 }
372 
373 #undef __FUNCT__
374 #define __FUNCT__ "PCRedundantSetScatter"
375 /*@
376    PCRedundantSetScatter - Sets the scatter used to copy values into the
377      redundant local solve and the scatter to move them back into the global
378      vector.
379 
380    Logically Collective on PC
381 
382    Input Parameters:
383 +  pc - the preconditioner context
384 .  in - the scatter to move the values in
385 -  out - the scatter to move them out
386 
387    Level: advanced
388 
389 .keywords: PC, redundant solve
390 @*/
391 PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
392 {
393   PetscErrorCode ierr;
394 
395   PetscFunctionBegin;
396   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
397   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
398   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
399   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
400   PetscFunctionReturn(0);
401 }
402 
403 #undef __FUNCT__
404 #define __FUNCT__ "PCRedundantGetKSP_Redundant"
405 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
406 {
407   PetscErrorCode ierr;
408   PC_Redundant   *red = (PC_Redundant*)pc->data;
409   MPI_Comm       comm,subcomm;
410   const char     *prefix;
411 
412   PetscFunctionBegin;
413   if (!red->psubcomm) {
414     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
415 
416     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
417     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
418     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
419     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
420 
421     ierr = PetscSubcommSetOptionsPrefix(red->psubcomm,prefix);CHKERRQ(ierr);
422     ierr = PetscSubcommSetFromOptions(red->psubcomm);CHKERRQ(ierr);
423     ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
424 
425     /* create a new PC that processors in each subcomm have copy of */
426     subcomm = PetscSubcommChild(red->psubcomm);
427 
428     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
429     ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr);
430     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
431     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
432     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
433     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
434     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
435     if (red->shifttypeset) {
436       ierr = PCFactorSetShiftType(red->pc,red->shifttype);CHKERRQ(ierr);
437       red->shifttypeset = PETSC_FALSE;
438     }
439     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
440     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
441   }
442   *innerksp = red->ksp;
443   PetscFunctionReturn(0);
444 }
445 
446 #undef __FUNCT__
447 #define __FUNCT__ "PCRedundantGetKSP"
448 /*@
449    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
450 
451    Not Collective
452 
453    Input Parameter:
454 .  pc - the preconditioner context
455 
456    Output Parameter:
457 .  innerksp - the KSP on the smaller set of processes
458 
459    Level: advanced
460 
461 .keywords: PC, redundant solve
462 @*/
463 PetscErrorCode PCRedundantGetKSP(PC pc,KSP *innerksp)
464 {
465   PetscErrorCode ierr;
466 
467   PetscFunctionBegin;
468   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
469   PetscValidPointer(innerksp,2);
470   ierr = PetscUseMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
471   PetscFunctionReturn(0);
472 }
473 
474 #undef __FUNCT__
475 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
476 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
477 {
478   PC_Redundant *red = (PC_Redundant*)pc->data;
479 
480   PetscFunctionBegin;
481   if (mat)  *mat  = red->pmats;
482   if (pmat) *pmat = red->pmats;
483   PetscFunctionReturn(0);
484 }
485 
486 #undef __FUNCT__
487 #define __FUNCT__ "PCRedundantGetOperators"
488 /*@
489    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
490 
491    Not Collective
492 
493    Input Parameter:
494 .  pc - the preconditioner context
495 
496    Output Parameters:
497 +  mat - the matrix
498 -  pmat - the (possibly different) preconditioner matrix
499 
500    Level: advanced
501 
502 .keywords: PC, redundant solve
503 @*/
504 PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
505 {
506   PetscErrorCode ierr;
507 
508   PetscFunctionBegin;
509   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
510   if (mat)  PetscValidPointer(mat,2);
511   if (pmat) PetscValidPointer(pmat,3);
512   ierr = PetscUseMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
513   PetscFunctionReturn(0);
514 }
515 
516 /* -------------------------------------------------------------------------------------*/
517 /*MC
518      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
519 
520      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
521 
522   Options Database:
523 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
524                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
525 
526    Level: intermediate
527 
528    Notes: The default KSP is preonly and the default PC is LU.
529 
530    PCFactorSetShiftType() applied to this PC will convey they shift type into the inner PC if it is factorization based.
531 
532    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
533 
534 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
535            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
536 M*/
537 
538 #undef __FUNCT__
539 #define __FUNCT__ "PCCreate_Redundant"
540 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
541 {
542   PetscErrorCode ierr;
543   PC_Redundant   *red;
544   PetscMPIInt    size;
545 
546   PetscFunctionBegin;
547   ierr = PetscNewLog(pc,&red);CHKERRQ(ierr);
548   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
549 
550   red->nsubcomm       = size;
551   red->useparallelmat = PETSC_TRUE;
552   pc->data            = (void*)red;
553 
554   pc->ops->apply          = PCApply_Redundant;
555   pc->ops->applytranspose = PCApplyTranspose_Redundant;
556   pc->ops->setup          = PCSetUp_Redundant;
557   pc->ops->destroy        = PCDestroy_Redundant;
558   pc->ops->reset          = PCReset_Redundant;
559   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
560   pc->ops->view           = PCView_Redundant;
561 
562   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
563   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
564   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
565   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
566   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCFactorSetShiftType_C",PCFactorSetShiftType_Redundant);CHKERRQ(ierr);
567   PetscFunctionReturn(0);
568 }
569 
570