xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 5a856986583887c326abe5dfd149e8184a29cd80)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP                ksp;
10   PC                 pc;                   /* actual preconditioner used on each processor */
11   Vec                xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec                xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat                pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter         scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool          useparallelmat;
16   PetscSubcomm       psubcomm;
17   PetscInt           nsubcomm;             /* num of data structure PetscSubcomm */
18   PetscBool          shifttypeset;
19   MatFactorShiftType shifttype;
20 } PC_Redundant;
21 
22 PetscErrorCode  PCFactorSetShiftType_Redundant(PC pc,MatFactorShiftType shifttype)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   if (red->ksp) {
29     PC pc;
30     ierr = KSPGetPC(red->ksp,&pc);CHKERRQ(ierr);
31     ierr = PCFactorSetShiftType(pc,shifttype);CHKERRQ(ierr);
32   } else {
33     red->shifttypeset = PETSC_TRUE;
34     red->shifttype    = shifttype;
35   }
36   PetscFunctionReturn(0);
37 }
38 
39 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
40 {
41   PC_Redundant   *red = (PC_Redundant*)pc->data;
42   PetscErrorCode ierr;
43   PetscBool      iascii,isstring;
44   PetscViewer    subviewer;
45 
46   PetscFunctionBegin;
47   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
48   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
49   if (iascii) {
50     if (!red->psubcomm) {
51       ierr = PetscViewerASCIIPrintf(viewer,"  Not yet setup\n");CHKERRQ(ierr);
52     } else {
53       ierr = PetscViewerASCIIPrintf(viewer,"  First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
54       ierr = PetscViewerGetSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
55       if (!red->psubcomm->color) { /* only view first redundant pc */
56         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
57         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
58         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
59       }
60       ierr = PetscViewerRestoreSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
61     }
62   } else if (isstring) {
63     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
64   }
65   PetscFunctionReturn(0);
66 }
67 
68 #include <../src/mat/impls/aij/mpi/mpiaij.h>
69 static PetscErrorCode PCSetUp_Redundant(PC pc)
70 {
71   PC_Redundant   *red = (PC_Redundant*)pc->data;
72   PetscErrorCode ierr;
73   PetscInt       mstart,mend,mlocal,M;
74   PetscMPIInt    size;
75   MPI_Comm       comm,subcomm;
76   Vec            x;
77 
78   PetscFunctionBegin;
79   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
80 
81   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
82   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
83   if (size == 1) red->useparallelmat = PETSC_FALSE;
84 
85   if (!pc->setupcalled) {
86     PetscInt mloc_sub;
87     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
88       KSP ksp;
89       ierr = PCRedundantGetKSP(pc,&ksp);CHKERRQ(ierr);
90     }
91     subcomm = PetscSubcommChild(red->psubcomm);
92 
93     if (red->useparallelmat) {
94       /* grab the parallel matrix and put it into processors of a subcomminicator */
95       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);CHKERRQ(ierr);
96 
97       ierr = MPI_Comm_size(subcomm,&size);CHKERRQ(ierr);
98       if (size > 1) {
99         PetscBool foundpack;
100         ierr = MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_LU,&foundpack);CHKERRQ(ierr);
101         if (!foundpack) { /* reset default ksp and pc */
102           ierr = KSPSetType(red->ksp,KSPGMRES);CHKERRQ(ierr);
103           ierr = PCSetType(red->pc,PCBJACOBI);CHKERRQ(ierr);
104         } else {
105           ierr = PCFactorSetMatSolverType(red->pc,NULL);CHKERRQ(ierr);
106         }
107       }
108 
109       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
110 
111       /* get working vectors xsub and ysub */
112       ierr = MatCreateVecs(red->pmats,&red->xsub,&red->ysub);CHKERRQ(ierr);
113 
114       /* create working vectors xdup and ydup.
115        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
116        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
117        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
118       ierr = MatGetLocalSize(red->pmats,&mloc_sub,NULL);CHKERRQ(ierr);
119       ierr = VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
120       ierr = VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);
121 
122       /* create vecscatters */
123       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
124         IS       is1,is2;
125         PetscInt *idx1,*idx2,i,j,k;
126 
127         ierr = MatCreateVecs(pc->pmat,&x,0);CHKERRQ(ierr);
128         ierr = VecGetSize(x,&M);CHKERRQ(ierr);
129         ierr = VecGetOwnershipRange(x,&mstart,&mend);CHKERRQ(ierr);
130         mlocal = mend - mstart;
131         ierr = PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);CHKERRQ(ierr);
132         j    = 0;
133         for (k=0; k<red->psubcomm->n; k++) {
134           for (i=mstart; i<mend; i++) {
135             idx1[j]   = i;
136             idx2[j++] = i + M*k;
137           }
138         }
139         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
140         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
141         ierr = VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
142         ierr = ISDestroy(&is1);CHKERRQ(ierr);
143         ierr = ISDestroy(&is2);CHKERRQ(ierr);
144 
145         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
146         ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);CHKERRQ(ierr);
147         ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
148         ierr = VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);CHKERRQ(ierr);
149         ierr = ISDestroy(&is1);CHKERRQ(ierr);
150         ierr = ISDestroy(&is2);CHKERRQ(ierr);
151         ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
152         ierr = VecDestroy(&x);CHKERRQ(ierr);
153       }
154     } else { /* !red->useparallelmat */
155       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
156     }
157   } else { /* pc->setupcalled */
158     if (red->useparallelmat) {
159       MatReuse       reuse;
160       /* grab the parallel matrix and put it into processors of a subcomminicator */
161       /*--------------------------------------------------------------------------*/
162       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
163         /* destroy old matrices */
164         ierr  = MatDestroy(&red->pmats);CHKERRQ(ierr);
165         reuse = MAT_INITIAL_MATRIX;
166       } else {
167         reuse = MAT_REUSE_MATRIX;
168       }
169       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats);CHKERRQ(ierr);
170       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
171     } else { /* !red->useparallelmat */
172       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
173     }
174   }
175 
176   if (pc->setfromoptionscalled) {
177     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
178   }
179   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
180   PetscFunctionReturn(0);
181 }
182 
183 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
184 {
185   PC_Redundant   *red = (PC_Redundant*)pc->data;
186   PetscErrorCode ierr;
187   PetscScalar    *array;
188 
189   PetscFunctionBegin;
190   if (!red->useparallelmat) {
191     ierr = KSPSolve(red->ksp,x,y);CHKERRQ(ierr);
192     ierr = KSPCheckSolve(red->ksp,pc,y);CHKERRQ(ierr);
193     PetscFunctionReturn(0);
194   }
195 
196   /* scatter x to xdup */
197   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
198   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
199 
200   /* place xdup's local array into xsub */
201   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
202   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
203 
204   /* apply preconditioner on each processor */
205   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
206   ierr = KSPCheckSolve(red->ksp,pc,red->ysub);CHKERRQ(ierr);
207   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
208   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
209 
210   /* place ysub's local array into ydup */
211   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
212   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
213 
214   /* scatter ydup to y */
215   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
216   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
217   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
218   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
219   PetscFunctionReturn(0);
220 }
221 
222 static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
223 {
224   PC_Redundant   *red = (PC_Redundant*)pc->data;
225   PetscErrorCode ierr;
226   PetscScalar    *array;
227 
228   PetscFunctionBegin;
229   if (!red->useparallelmat) {
230     ierr = KSPSolveTranspose(red->ksp,x,y);CHKERRQ(ierr);
231     ierr = KSPCheckSolve(red->ksp,pc,y);CHKERRQ(ierr);
232     PetscFunctionReturn(0);
233   }
234 
235   /* scatter x to xdup */
236   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
237   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
238 
239   /* place xdup's local array into xsub */
240   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
241   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
242 
243   /* apply preconditioner on each processor */
244   ierr = KSPSolveTranspose(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
245   ierr = KSPCheckSolve(red->ksp,pc,red->ysub);CHKERRQ(ierr);
246   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
247   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
248 
249   /* place ysub's local array into ydup */
250   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
251   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
252 
253   /* scatter ydup to y */
254   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
255   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
256   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
257   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
258   PetscFunctionReturn(0);
259 }
260 
261 static PetscErrorCode PCReset_Redundant(PC pc)
262 {
263   PC_Redundant   *red = (PC_Redundant*)pc->data;
264   PetscErrorCode ierr;
265 
266   PetscFunctionBegin;
267   if (red->useparallelmat) {
268     ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
269     ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
270     ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
271     ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
272     ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
273     ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
274   }
275   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
276   ierr = KSPReset(red->ksp);CHKERRQ(ierr);
277   PetscFunctionReturn(0);
278 }
279 
280 static PetscErrorCode PCDestroy_Redundant(PC pc)
281 {
282   PC_Redundant   *red = (PC_Redundant*)pc->data;
283   PetscErrorCode ierr;
284 
285   PetscFunctionBegin;
286   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
287   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
288   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
289   ierr = PetscFree(pc->data);CHKERRQ(ierr);
290   PetscFunctionReturn(0);
291 }
292 
293 static PetscErrorCode PCSetFromOptions_Redundant(PetscOptionItems *PetscOptionsObject,PC pc)
294 {
295   PetscErrorCode ierr;
296   PC_Redundant   *red = (PC_Redundant*)pc->data;
297 
298   PetscFunctionBegin;
299   ierr = PetscOptionsHead(PetscOptionsObject,"Redundant options");CHKERRQ(ierr);
300   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
301   ierr = PetscOptionsTail();CHKERRQ(ierr);
302   PetscFunctionReturn(0);
303 }
304 
305 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
306 {
307   PC_Redundant *red = (PC_Redundant*)pc->data;
308 
309   PetscFunctionBegin;
310   red->nsubcomm = nreds;
311   PetscFunctionReturn(0);
312 }
313 
314 /*@
315    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
316 
317    Logically Collective on PC
318 
319    Input Parameters:
320 +  pc - the preconditioner context
321 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
322                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
323 
324    Level: advanced
325 
326 @*/
327 PetscErrorCode PCRedundantSetNumber(PC pc,PetscInt nredundant)
328 {
329   PetscErrorCode ierr;
330 
331   PetscFunctionBegin;
332   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
333   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
334   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
335   PetscFunctionReturn(0);
336 }
337 
338 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
339 {
340   PC_Redundant   *red = (PC_Redundant*)pc->data;
341   PetscErrorCode ierr;
342 
343   PetscFunctionBegin;
344   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
345   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
346 
347   red->scatterin  = in;
348 
349   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
350   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
351   red->scatterout = out;
352   PetscFunctionReturn(0);
353 }
354 
355 /*@
356    PCRedundantSetScatter - Sets the scatter used to copy values into the
357      redundant local solve and the scatter to move them back into the global
358      vector.
359 
360    Logically Collective on PC
361 
362    Input Parameters:
363 +  pc - the preconditioner context
364 .  in - the scatter to move the values in
365 -  out - the scatter to move them out
366 
367    Level: advanced
368 
369 @*/
370 PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
371 {
372   PetscErrorCode ierr;
373 
374   PetscFunctionBegin;
375   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
376   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
377   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
378   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
379   PetscFunctionReturn(0);
380 }
381 
382 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
383 {
384   PetscErrorCode ierr;
385   PC_Redundant   *red = (PC_Redundant*)pc->data;
386   MPI_Comm       comm,subcomm;
387   const char     *prefix;
388 
389   PetscFunctionBegin;
390   if (!red->psubcomm) {
391     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
392 
393     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
394     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
395     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
396     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
397 
398     ierr = PetscSubcommSetOptionsPrefix(red->psubcomm,prefix);CHKERRQ(ierr);
399     ierr = PetscSubcommSetFromOptions(red->psubcomm);CHKERRQ(ierr);
400     ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
401 
402     /* create a new PC that processors in each subcomm have copy of */
403     subcomm = PetscSubcommChild(red->psubcomm);
404 
405     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
406     ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr);
407     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
408     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
409     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
410     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
411     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
412     if (red->shifttypeset) {
413       ierr = PCFactorSetShiftType(red->pc,red->shifttype);CHKERRQ(ierr);
414       red->shifttypeset = PETSC_FALSE;
415     }
416     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
417     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
418   }
419   *innerksp = red->ksp;
420   PetscFunctionReturn(0);
421 }
422 
423 /*@
424    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
425 
426    Not Collective
427 
428    Input Parameter:
429 .  pc - the preconditioner context
430 
431    Output Parameter:
432 .  innerksp - the KSP on the smaller set of processes
433 
434    Level: advanced
435 
436 @*/
437 PetscErrorCode PCRedundantGetKSP(PC pc,KSP *innerksp)
438 {
439   PetscErrorCode ierr;
440 
441   PetscFunctionBegin;
442   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
443   PetscValidPointer(innerksp,2);
444   ierr = PetscUseMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
445   PetscFunctionReturn(0);
446 }
447 
448 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
449 {
450   PC_Redundant *red = (PC_Redundant*)pc->data;
451 
452   PetscFunctionBegin;
453   if (mat)  *mat  = red->pmats;
454   if (pmat) *pmat = red->pmats;
455   PetscFunctionReturn(0);
456 }
457 
458 /*@
459    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
460 
461    Not Collective
462 
463    Input Parameter:
464 .  pc - the preconditioner context
465 
466    Output Parameters:
467 +  mat - the matrix
468 -  pmat - the (possibly different) preconditioner matrix
469 
470    Level: advanced
471 
472 @*/
473 PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
474 {
475   PetscErrorCode ierr;
476 
477   PetscFunctionBegin;
478   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
479   if (mat)  PetscValidPointer(mat,2);
480   if (pmat) PetscValidPointer(pmat,3);
481   ierr = PetscUseMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
482   PetscFunctionReturn(0);
483 }
484 
485 /* -------------------------------------------------------------------------------------*/
486 /*MC
487      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
488 
489      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
490 
491   Options Database:
492 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
493                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
494 
495    Level: intermediate
496 
497    Notes:
498     The default KSP is preonly and the default PC is LU.
499 
500    PCFactorSetShiftType() applied to this PC will convey they shift type into the inner PC if it is factorization based.
501 
502    Developer Notes:
503     Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
504 
505 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
506            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
507 M*/
508 
509 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
510 {
511   PetscErrorCode ierr;
512   PC_Redundant   *red;
513   PetscMPIInt    size;
514 
515   PetscFunctionBegin;
516   ierr = PetscNewLog(pc,&red);CHKERRQ(ierr);
517   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
518 
519   red->nsubcomm       = size;
520   red->useparallelmat = PETSC_TRUE;
521   pc->data            = (void*)red;
522 
523   pc->ops->apply          = PCApply_Redundant;
524   pc->ops->applytranspose = PCApplyTranspose_Redundant;
525   pc->ops->setup          = PCSetUp_Redundant;
526   pc->ops->destroy        = PCDestroy_Redundant;
527   pc->ops->reset          = PCReset_Redundant;
528   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
529   pc->ops->view           = PCView_Redundant;
530 
531   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
532   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
533   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
534   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
535   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCFactorSetShiftType_C",PCFactorSetShiftType_Redundant);CHKERRQ(ierr);
536   PetscFunctionReturn(0);
537 }
538 
539