xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 7e57d48a51ee858b36449453daefa9e5ed052d62)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc-private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP          ksp;
10   PC           pc;                   /* actual preconditioner used on each processor */
11   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool    useparallelmat;
16   PetscSubcomm psubcomm;
17   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
18 } PC_Redundant;
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PCView_Redundant"
22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26   PetscBool      iascii,isstring;
27   PetscViewer    subviewer;
28 
29   PetscFunctionBegin;
30   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
31   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
32   if (iascii) {
33     if (!red->psubcomm) {
34       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
35     } else {
36       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
37       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
38       if (!red->psubcomm->color) { /* only view first redundant pc */
39         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
40         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
41         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
42       }
43       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
44     }
45   } else if (isstring) {
46     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
47   }
48   PetscFunctionReturn(0);
49 }
50 
51 #undef __FUNCT__
52 #define __FUNCT__ "PCSetUp_Redundant"
53 static PetscErrorCode PCSetUp_Redundant(PC pc)
54 {
55   PC_Redundant   *red = (PC_Redundant*)pc->data;
56   PetscErrorCode ierr;
57   PetscInt       mstart,mend,mlocal,M;
58   PetscMPIInt    size;
59   MPI_Comm       comm,subcomm;
60   Vec            x;
61   const char     *prefix;
62 
63   PetscFunctionBegin;
64   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
65 
66   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
67   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
68   if (size == 1) red->useparallelmat = PETSC_FALSE;
69 
70   if (!pc->setupcalled) {
71     PetscInt mloc_sub;
72     if (!red->psubcomm) {
73       ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
74       ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
75       ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
76       /* enable runtime switch of psubcomm type, e.g., '-psubcomm_type interlaced */
77       ierr = PetscSubcommSetFromOptions(red->psubcomm);CHKERRQ(ierr);
78       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
79 
80       /* create a new PC that processors in each subcomm have copy of */
81       subcomm = red->psubcomm->comm;
82 
83       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
84       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
85       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
86       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
87       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
88       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
89 
90       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
91       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
92       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
93     } else {
94       subcomm = red->psubcomm->comm;
95     }
96 
97     if (red->useparallelmat) {
98       /* grab the parallel matrix and put it into processors of a subcomminicator */
99       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);CHKERRQ(ierr);
100       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
101 
102       /* get working vectors xsub and ysub */
103       ierr = MatCreateVecs(red->pmats,&red->xsub,&red->ysub);CHKERRQ(ierr);
104 
105       /* create working vectors xdup and ydup.
106        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
107        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
108        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
109       ierr = MatGetLocalSize(red->pmats,&mloc_sub,NULL);CHKERRQ(ierr);
110       ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
111       ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);
112 
113       /* create vecscatters */
114       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
115         IS       is1,is2;
116         PetscInt *idx1,*idx2,i,j,k;
117 
118         ierr = MatCreateVecs(pc->pmat,&x,0);CHKERRQ(ierr);
119         ierr = VecGetSize(x,&M);CHKERRQ(ierr);
120         ierr = VecGetOwnershipRange(x,&mstart,&mend);CHKERRQ(ierr);
121         mlocal = mend - mstart;
122         ierr = PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);CHKERRQ(ierr);
123         j    = 0;
124         for (k=0; k<red->psubcomm->n; k++) {
125           for (i=mstart; i<mend; i++) {
126             idx1[j]   = i;
127             idx2[j++] = i + M*k;
128           }
129         }
130         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
131         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
132         ierr = VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
133         ierr = ISDestroy(&is1);CHKERRQ(ierr);
134         ierr = ISDestroy(&is2);CHKERRQ(ierr);
135 
136         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
137         ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);CHKERRQ(ierr);
138         ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
139         ierr = VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);CHKERRQ(ierr);
140         ierr = ISDestroy(&is1);CHKERRQ(ierr);
141         ierr = ISDestroy(&is2);CHKERRQ(ierr);
142         ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
143         ierr = VecDestroy(&x);CHKERRQ(ierr);
144       }
145     } else { /* !red->useparallelmat */
146       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
147     }
148   } else { /* pc->setupcalled */
149     if (red->useparallelmat) {
150       MatReuse       reuse;
151       /* grab the parallel matrix and put it into processors of a subcomminicator */
152       /*--------------------------------------------------------------------------*/
153       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
154         /* destroy old matrices */
155         ierr  = MatDestroy(&red->pmats);CHKERRQ(ierr);
156         reuse = MAT_INITIAL_MATRIX;
157       } else {
158         reuse = MAT_REUSE_MATRIX;
159       }
160       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,reuse,&red->pmats);CHKERRQ(ierr);
161       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
162     } else { /* !red->useparallelmat */
163       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
164     }
165   }
166 
167   if (pc->setfromoptionscalled) {
168     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
169   }
170   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "PCApply_Redundant"
176 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
177 {
178   PC_Redundant   *red = (PC_Redundant*)pc->data;
179   PetscErrorCode ierr;
180   PetscScalar    *array;
181 
182   PetscFunctionBegin;
183   if (!red->useparallelmat) {
184     ierr = KSPSolve(red->ksp,x,y);CHKERRQ(ierr);
185     PetscFunctionReturn(0);
186   }
187 
188   /* scatter x to xdup */
189   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
190   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
191 
192   /* place xdup's local array into xsub */
193   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
194   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
195 
196   /* apply preconditioner on each processor */
197   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
198   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
199   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
200 
201   /* place ysub's local array into ydup */
202   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
203   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
204 
205   /* scatter ydup to y */
206   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
207   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
208   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
209   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
210   PetscFunctionReturn(0);
211 }
212 
213 #undef __FUNCT__
214 #define __FUNCT__ "PCApplyTranspose_Redundant"
215 static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
216 {
217   PC_Redundant   *red = (PC_Redundant*)pc->data;
218   PetscErrorCode ierr;
219   PetscScalar    *array;
220 
221   PetscFunctionBegin;
222   if (!red->useparallelmat) {
223     ierr = KSPSolveTranspose(red->ksp,x,y);CHKERRQ(ierr);
224     PetscFunctionReturn(0);
225   }
226 
227   /* scatter x to xdup */
228   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
229   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
230 
231   /* place xdup's local array into xsub */
232   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
233   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
234 
235   /* apply preconditioner on each processor */
236   ierr = KSPSolveTranspose(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
237   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
238   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
239 
240   /* place ysub's local array into ydup */
241   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
242   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
243 
244   /* scatter ydup to y */
245   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
246   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
247   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
248   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
249   PetscFunctionReturn(0);
250 }
251 
252 #undef __FUNCT__
253 #define __FUNCT__ "PCReset_Redundant"
254 static PetscErrorCode PCReset_Redundant(PC pc)
255 {
256   PC_Redundant   *red = (PC_Redundant*)pc->data;
257   PetscErrorCode ierr;
258 
259   PetscFunctionBegin;
260   if (red->useparallelmat) {
261     ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
262     ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
263     ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
264     ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
265     ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
266     ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
267   }
268   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
269   ierr = KSPReset(red->ksp);CHKERRQ(ierr);
270   PetscFunctionReturn(0);
271 }
272 
273 #undef __FUNCT__
274 #define __FUNCT__ "PCDestroy_Redundant"
275 static PetscErrorCode PCDestroy_Redundant(PC pc)
276 {
277   PC_Redundant   *red = (PC_Redundant*)pc->data;
278   PetscErrorCode ierr;
279 
280   PetscFunctionBegin;
281   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
282   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
283   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
284   ierr = PetscFree(pc->data);CHKERRQ(ierr);
285   PetscFunctionReturn(0);
286 }
287 
288 #undef __FUNCT__
289 #define __FUNCT__ "PCSetFromOptions_Redundant"
290 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
291 {
292   PetscErrorCode ierr;
293   PC_Redundant   *red = (PC_Redundant*)pc->data;
294 
295   PetscFunctionBegin;
296   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
297   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
298   ierr = PetscOptionsTail();CHKERRQ(ierr);
299   PetscFunctionReturn(0);
300 }
301 
302 #undef __FUNCT__
303 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
304 static PetscErrorCode  PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
305 {
306   PC_Redundant *red = (PC_Redundant*)pc->data;
307 
308   PetscFunctionBegin;
309   red->nsubcomm = nreds;
310   PetscFunctionReturn(0);
311 }
312 
313 #undef __FUNCT__
314 #define __FUNCT__ "PCRedundantSetNumber"
315 /*@
316    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
317 
318    Logically Collective on PC
319 
320    Input Parameters:
321 +  pc - the preconditioner context
322 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
323                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
324 
325    Level: advanced
326 
327 .keywords: PC, redundant solve
328 @*/
329 PetscErrorCode  PCRedundantSetNumber(PC pc,PetscInt nredundant)
330 {
331   PetscErrorCode ierr;
332 
333   PetscFunctionBegin;
334   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
335   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
336   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
337   PetscFunctionReturn(0);
338 }
339 
340 #undef __FUNCT__
341 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
342 static PetscErrorCode  PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
343 {
344   PC_Redundant   *red = (PC_Redundant*)pc->data;
345   PetscErrorCode ierr;
346 
347   PetscFunctionBegin;
348   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
349   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
350 
351   red->scatterin  = in;
352 
353   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
354   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
355   red->scatterout = out;
356   PetscFunctionReturn(0);
357 }
358 
359 #undef __FUNCT__
360 #define __FUNCT__ "PCRedundantSetScatter"
361 /*@
362    PCRedundantSetScatter - Sets the scatter used to copy values into the
363      redundant local solve and the scatter to move them back into the global
364      vector.
365 
366    Logically Collective on PC
367 
368    Input Parameters:
369 +  pc - the preconditioner context
370 .  in - the scatter to move the values in
371 -  out - the scatter to move them out
372 
373    Level: advanced
374 
375 .keywords: PC, redundant solve
376 @*/
377 PetscErrorCode  PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
378 {
379   PetscErrorCode ierr;
380 
381   PetscFunctionBegin;
382   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
383   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
384   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
385   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
386   PetscFunctionReturn(0);
387 }
388 
389 #undef __FUNCT__
390 #define __FUNCT__ "PCRedundantGetKSP_Redundant"
391 static PetscErrorCode  PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
392 {
393   PetscErrorCode ierr;
394   PC_Redundant   *red = (PC_Redundant*)pc->data;
395   MPI_Comm       comm,subcomm;
396   const char     *prefix;
397 
398   PetscFunctionBegin;
399   if (!red->psubcomm) {
400     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
401     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
402     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
403     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
404     ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
405 
406     /* create a new PC that processors in each subcomm have copy of */
407     subcomm = red->psubcomm->comm;
408 
409     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
410     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
411     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
412     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
413     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
414     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
415 
416     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
417     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
418     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
419   }
420   *innerksp = red->ksp;
421   PetscFunctionReturn(0);
422 }
423 
424 #undef __FUNCT__
425 #define __FUNCT__ "PCRedundantGetKSP"
426 /*@
427    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
428 
429    Not Collective
430 
431    Input Parameter:
432 .  pc - the preconditioner context
433 
434    Output Parameter:
435 .  innerksp - the KSP on the smaller set of processes
436 
437    Level: advanced
438 
439 .keywords: PC, redundant solve
440 @*/
441 PetscErrorCode  PCRedundantGetKSP(PC pc,KSP *innerksp)
442 {
443   PetscErrorCode ierr;
444 
445   PetscFunctionBegin;
446   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
447   PetscValidPointer(innerksp,2);
448   ierr = PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
449   PetscFunctionReturn(0);
450 }
451 
452 #undef __FUNCT__
453 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
454 static PetscErrorCode  PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
455 {
456   PC_Redundant *red = (PC_Redundant*)pc->data;
457 
458   PetscFunctionBegin;
459   if (mat)  *mat  = red->pmats;
460   if (pmat) *pmat = red->pmats;
461   PetscFunctionReturn(0);
462 }
463 
464 #undef __FUNCT__
465 #define __FUNCT__ "PCRedundantGetOperators"
466 /*@
467    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
468 
469    Not Collective
470 
471    Input Parameter:
472 .  pc - the preconditioner context
473 
474    Output Parameters:
475 +  mat - the matrix
476 -  pmat - the (possibly different) preconditioner matrix
477 
478    Level: advanced
479 
480 .keywords: PC, redundant solve
481 @*/
482 PetscErrorCode  PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
483 {
484   PetscErrorCode ierr;
485 
486   PetscFunctionBegin;
487   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
488   if (mat)  PetscValidPointer(mat,2);
489   if (pmat) PetscValidPointer(pmat,3);
490   ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
491   PetscFunctionReturn(0);
492 }
493 
494 /* -------------------------------------------------------------------------------------*/
495 /*MC
496      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
497 
498      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
499 
500   Options Database:
501 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
502                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
503 
504    Level: intermediate
505 
506    Notes: The default KSP is preonly and the default PC is LU.
507 
508    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
509 
510 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
511            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
512 M*/
513 
514 #undef __FUNCT__
515 #define __FUNCT__ "PCCreate_Redundant"
516 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
517 {
518   PetscErrorCode ierr;
519   PC_Redundant   *red;
520   PetscMPIInt    size;
521 
522   PetscFunctionBegin;
523   ierr = PetscNewLog(pc,&red);CHKERRQ(ierr);
524   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
525 
526   red->nsubcomm       = size;
527   red->useparallelmat = PETSC_TRUE;
528   pc->data            = (void*)red;
529 
530   pc->ops->apply          = PCApply_Redundant;
531   pc->ops->applytranspose = PCApplyTranspose_Redundant;
532   pc->ops->setup          = PCSetUp_Redundant;
533   pc->ops->destroy        = PCDestroy_Redundant;
534   pc->ops->reset          = PCReset_Redundant;
535   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
536   pc->ops->view           = PCView_Redundant;
537 
538   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
539   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
540   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
541   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
542   PetscFunctionReturn(0);
543 }
544 
545