xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision e7f46db8d62cb2e4e59111bf21061e64ea60daab)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP                ksp;
10   PC                 pc;                   /* actual preconditioner used on each processor */
11   Vec                xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec                xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat                pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter         scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool          useparallelmat;
16   PetscSubcomm       psubcomm;
17   PetscInt           nsubcomm;             /* num of data structure PetscSubcomm */
18   PetscBool          shifttypeset;
19   MatFactorShiftType shifttype;
20 } PC_Redundant;
21 
22 PetscErrorCode  PCFactorSetShiftType_Redundant(PC pc,MatFactorShiftType shifttype)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   if (red->ksp) {
29     PC pc;
30     ierr = KSPGetPC(red->ksp,&pc);CHKERRQ(ierr);
31     ierr = PCFactorSetShiftType(pc,shifttype);CHKERRQ(ierr);
32   } else {
33     red->shifttypeset = PETSC_TRUE;
34     red->shifttype    = shifttype;
35   }
36   PetscFunctionReturn(0);
37 }
38 
39 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
40 {
41   PC_Redundant   *red = (PC_Redundant*)pc->data;
42   PetscErrorCode ierr;
43   PetscBool      iascii,isstring;
44   PetscViewer    subviewer;
45 
46   PetscFunctionBegin;
47   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
48   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
49   if (iascii) {
50     if (!red->psubcomm) {
51       ierr = PetscViewerASCIIPrintf(viewer,"  Not yet setup\n");CHKERRQ(ierr);
52     } else {
53       ierr = PetscViewerASCIIPrintf(viewer,"  First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
54       ierr = PetscViewerGetSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
55       if (!red->psubcomm->color) { /* only view first redundant pc */
56         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
57         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
58         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
59       }
60       ierr = PetscViewerRestoreSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
61     }
62   } else if (isstring) {
63     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
64   }
65   PetscFunctionReturn(0);
66 }
67 
68 #include <../src/mat/impls/aij/mpi/mpiaij.h>
69 static PetscErrorCode PCSetUp_Redundant(PC pc)
70 {
71   PC_Redundant   *red = (PC_Redundant*)pc->data;
72   PetscErrorCode ierr;
73   PetscInt       mstart,mend,mlocal,M;
74   PetscMPIInt    size;
75   MPI_Comm       comm,subcomm;
76   Vec            x;
77 
78   PetscFunctionBegin;
79   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
80 
81   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
82   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
83   if (size == 1) red->useparallelmat = PETSC_FALSE;
84 
85   if (!pc->setupcalled) {
86     PetscInt mloc_sub;
87     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
88       KSP ksp;
89       ierr = PCRedundantGetKSP(pc,&ksp);CHKERRQ(ierr);
90     }
91     subcomm = PetscSubcommChild(red->psubcomm);
92 
93     if (red->useparallelmat) {
94       /* grab the parallel matrix and put it into processors of a subcomminicator */
95       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);CHKERRQ(ierr);
96 
97       ierr = MPI_Comm_size(subcomm,&size);CHKERRQ(ierr);
98       if (size > 1) {
99         PetscBool foundpack;
100         ierr = MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_LU,&foundpack);CHKERRQ(ierr);
101         if (!foundpack) { /* reset default ksp and pc */
102           ierr = KSPSetType(red->ksp,KSPGMRES);CHKERRQ(ierr);
103           ierr = PCSetType(red->pc,PCBJACOBI);CHKERRQ(ierr);
104         } else {
105           ierr = PCFactorSetMatSolverType(red->pc,NULL);CHKERRQ(ierr);
106         }
107       }
108 
109       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
110 
111       /* get working vectors xsub and ysub */
112       ierr = MatCreateVecs(red->pmats,&red->xsub,&red->ysub);CHKERRQ(ierr);
113 
114       /* create working vectors xdup and ydup.
115        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
116        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
117        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
118       ierr = MatGetLocalSize(red->pmats,&mloc_sub,NULL);CHKERRQ(ierr);
119       ierr = VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
120       ierr = VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);
121 
122       /* create vecscatters */
123       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
124         IS       is1,is2;
125         PetscInt *idx1,*idx2,i,j,k;
126 
127         ierr = MatCreateVecs(pc->pmat,&x,0);CHKERRQ(ierr);
128         ierr = VecGetSize(x,&M);CHKERRQ(ierr);
129         ierr = VecGetOwnershipRange(x,&mstart,&mend);CHKERRQ(ierr);
130         mlocal = mend - mstart;
131         ierr = PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);CHKERRQ(ierr);
132         j    = 0;
133         for (k=0; k<red->psubcomm->n; k++) {
134           for (i=mstart; i<mend; i++) {
135             idx1[j]   = i;
136             idx2[j++] = i + M*k;
137           }
138         }
139         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
140         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
141         ierr = VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
142         ierr = ISDestroy(&is1);CHKERRQ(ierr);
143         ierr = ISDestroy(&is2);CHKERRQ(ierr);
144 
145         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
146         ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);CHKERRQ(ierr);
147         ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
148         ierr = VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);CHKERRQ(ierr);
149         ierr = ISDestroy(&is1);CHKERRQ(ierr);
150         ierr = ISDestroy(&is2);CHKERRQ(ierr);
151         ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
152         ierr = VecDestroy(&x);CHKERRQ(ierr);
153       }
154     } else { /* !red->useparallelmat */
155       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
156     }
157   } else { /* pc->setupcalled */
158     if (red->useparallelmat) {
159       MatReuse       reuse;
160       /* grab the parallel matrix and put it into processors of a subcomminicator */
161       /*--------------------------------------------------------------------------*/
162       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
163         /* destroy old matrices */
164         ierr  = MatDestroy(&red->pmats);CHKERRQ(ierr);
165         reuse = MAT_INITIAL_MATRIX;
166       } else {
167         reuse = MAT_REUSE_MATRIX;
168       }
169       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats);CHKERRQ(ierr);
170       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
171     } else { /* !red->useparallelmat */
172       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
173     }
174   }
175 
176   if (pc->setfromoptionscalled) {
177     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
178   }
179   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
180   PetscFunctionReturn(0);
181 }
182 
183 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
184 {
185   PC_Redundant   *red = (PC_Redundant*)pc->data;
186   PetscErrorCode ierr;
187   PetscScalar    *array;
188 
189   PetscFunctionBegin;
190   if (!red->useparallelmat) {
191     ierr = KSPSolve(red->ksp,x,y);CHKERRQ(ierr);
192     PetscFunctionReturn(0);
193   }
194 
195   /* scatter x to xdup */
196   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
197   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
198 
199   /* place xdup's local array into xsub */
200   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
201   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
202 
203   /* apply preconditioner on each processor */
204   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
205   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
206   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
207 
208   /* place ysub's local array into ydup */
209   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
210   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
211 
212   /* scatter ydup to y */
213   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
214   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
215   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
216   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
217   PetscFunctionReturn(0);
218 }
219 
220 static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
221 {
222   PC_Redundant   *red = (PC_Redundant*)pc->data;
223   PetscErrorCode ierr;
224   PetscScalar    *array;
225 
226   PetscFunctionBegin;
227   if (!red->useparallelmat) {
228     ierr = KSPSolveTranspose(red->ksp,x,y);CHKERRQ(ierr);
229     PetscFunctionReturn(0);
230   }
231 
232   /* scatter x to xdup */
233   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
234   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
235 
236   /* place xdup's local array into xsub */
237   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
238   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
239 
240   /* apply preconditioner on each processor */
241   ierr = KSPSolveTranspose(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
242   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
243   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
244 
245   /* place ysub's local array into ydup */
246   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
247   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
248 
249   /* scatter ydup to y */
250   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
251   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
252   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
253   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
254   PetscFunctionReturn(0);
255 }
256 
257 static PetscErrorCode PCReset_Redundant(PC pc)
258 {
259   PC_Redundant   *red = (PC_Redundant*)pc->data;
260   PetscErrorCode ierr;
261 
262   PetscFunctionBegin;
263   if (red->useparallelmat) {
264     ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
265     ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
266     ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
267     ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
268     ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
269     ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
270   }
271   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
272   ierr = KSPReset(red->ksp);CHKERRQ(ierr);
273   PetscFunctionReturn(0);
274 }
275 
276 static PetscErrorCode PCDestroy_Redundant(PC pc)
277 {
278   PC_Redundant   *red = (PC_Redundant*)pc->data;
279   PetscErrorCode ierr;
280 
281   PetscFunctionBegin;
282   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
283   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
284   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
285   ierr = PetscFree(pc->data);CHKERRQ(ierr);
286   PetscFunctionReturn(0);
287 }
288 
289 static PetscErrorCode PCSetFromOptions_Redundant(PetscOptionItems *PetscOptionsObject,PC pc)
290 {
291   PetscErrorCode ierr;
292   PC_Redundant   *red = (PC_Redundant*)pc->data;
293 
294   PetscFunctionBegin;
295   ierr = PetscOptionsHead(PetscOptionsObject,"Redundant options");CHKERRQ(ierr);
296   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
297   ierr = PetscOptionsTail();CHKERRQ(ierr);
298   PetscFunctionReturn(0);
299 }
300 
301 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
302 {
303   PC_Redundant *red = (PC_Redundant*)pc->data;
304 
305   PetscFunctionBegin;
306   red->nsubcomm = nreds;
307   PetscFunctionReturn(0);
308 }
309 
310 /*@
311    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
312 
313    Logically Collective on PC
314 
315    Input Parameters:
316 +  pc - the preconditioner context
317 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
318                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
319 
320    Level: advanced
321 
322 .keywords: PC, redundant solve
323 @*/
324 PetscErrorCode PCRedundantSetNumber(PC pc,PetscInt nredundant)
325 {
326   PetscErrorCode ierr;
327 
328   PetscFunctionBegin;
329   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
330   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
331   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
332   PetscFunctionReturn(0);
333 }
334 
335 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
336 {
337   PC_Redundant   *red = (PC_Redundant*)pc->data;
338   PetscErrorCode ierr;
339 
340   PetscFunctionBegin;
341   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
342   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
343 
344   red->scatterin  = in;
345 
346   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
347   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
348   red->scatterout = out;
349   PetscFunctionReturn(0);
350 }
351 
352 /*@
353    PCRedundantSetScatter - Sets the scatter used to copy values into the
354      redundant local solve and the scatter to move them back into the global
355      vector.
356 
357    Logically Collective on PC
358 
359    Input Parameters:
360 +  pc - the preconditioner context
361 .  in - the scatter to move the values in
362 -  out - the scatter to move them out
363 
364    Level: advanced
365 
366 .keywords: PC, redundant solve
367 @*/
368 PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
369 {
370   PetscErrorCode ierr;
371 
372   PetscFunctionBegin;
373   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
374   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
375   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
376   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
377   PetscFunctionReturn(0);
378 }
379 
380 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
381 {
382   PetscErrorCode ierr;
383   PC_Redundant   *red = (PC_Redundant*)pc->data;
384   MPI_Comm       comm,subcomm;
385   const char     *prefix;
386 
387   PetscFunctionBegin;
388   if (!red->psubcomm) {
389     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
390 
391     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
392     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
393     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
394     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
395 
396     ierr = PetscSubcommSetOptionsPrefix(red->psubcomm,prefix);CHKERRQ(ierr);
397     ierr = PetscSubcommSetFromOptions(red->psubcomm);CHKERRQ(ierr);
398     ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
399 
400     /* create a new PC that processors in each subcomm have copy of */
401     subcomm = PetscSubcommChild(red->psubcomm);
402 
403     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
404     ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr);
405     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
406     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
407     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
408     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
409     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
410     if (red->shifttypeset) {
411       ierr = PCFactorSetShiftType(red->pc,red->shifttype);CHKERRQ(ierr);
412       red->shifttypeset = PETSC_FALSE;
413     }
414     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
415     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
416   }
417   *innerksp = red->ksp;
418   PetscFunctionReturn(0);
419 }
420 
421 /*@
422    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
423 
424    Not Collective
425 
426    Input Parameter:
427 .  pc - the preconditioner context
428 
429    Output Parameter:
430 .  innerksp - the KSP on the smaller set of processes
431 
432    Level: advanced
433 
434 .keywords: PC, redundant solve
435 @*/
436 PetscErrorCode PCRedundantGetKSP(PC pc,KSP *innerksp)
437 {
438   PetscErrorCode ierr;
439 
440   PetscFunctionBegin;
441   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
442   PetscValidPointer(innerksp,2);
443   ierr = PetscUseMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
444   PetscFunctionReturn(0);
445 }
446 
447 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
448 {
449   PC_Redundant *red = (PC_Redundant*)pc->data;
450 
451   PetscFunctionBegin;
452   if (mat)  *mat  = red->pmats;
453   if (pmat) *pmat = red->pmats;
454   PetscFunctionReturn(0);
455 }
456 
457 /*@
458    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
459 
460    Not Collective
461 
462    Input Parameter:
463 .  pc - the preconditioner context
464 
465    Output Parameters:
466 +  mat - the matrix
467 -  pmat - the (possibly different) preconditioner matrix
468 
469    Level: advanced
470 
471 .keywords: PC, redundant solve
472 @*/
473 PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
474 {
475   PetscErrorCode ierr;
476 
477   PetscFunctionBegin;
478   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
479   if (mat)  PetscValidPointer(mat,2);
480   if (pmat) PetscValidPointer(pmat,3);
481   ierr = PetscUseMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
482   PetscFunctionReturn(0);
483 }
484 
485 /* -------------------------------------------------------------------------------------*/
486 /*MC
487      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
488 
489      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
490 
491   Options Database:
492 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
493                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
494 
495    Level: intermediate
496 
497    Notes: The default KSP is preonly and the default PC is LU.
498 
499    PCFactorSetShiftType() applied to this PC will convey they shift type into the inner PC if it is factorization based.
500 
501    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
502 
503 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
504            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
505 M*/
506 
507 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
508 {
509   PetscErrorCode ierr;
510   PC_Redundant   *red;
511   PetscMPIInt    size;
512 
513   PetscFunctionBegin;
514   ierr = PetscNewLog(pc,&red);CHKERRQ(ierr);
515   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
516 
517   red->nsubcomm       = size;
518   red->useparallelmat = PETSC_TRUE;
519   pc->data            = (void*)red;
520 
521   pc->ops->apply          = PCApply_Redundant;
522   pc->ops->applytranspose = PCApplyTranspose_Redundant;
523   pc->ops->setup          = PCSetUp_Redundant;
524   pc->ops->destroy        = PCDestroy_Redundant;
525   pc->ops->reset          = PCReset_Redundant;
526   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
527   pc->ops->view           = PCView_Redundant;
528 
529   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
530   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
531   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
532   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
533   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCFactorSetShiftType_C",PCFactorSetShiftType_Redundant);CHKERRQ(ierr);
534   PetscFunctionReturn(0);
535 }
536 
537