xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 2b8d69ca7ea5fe9190df62c1dce3bbd66fce84dd)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP          ksp;
10   PC           pc;                   /* actual preconditioner used on each processor */
11   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool    useparallelmat;
16   PetscSubcomm psubcomm;
17   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
18 } PC_Redundant;
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PCView_Redundant"
22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26   PetscBool      iascii,isstring;
27   PetscViewer    subviewer;
28 
29   PetscFunctionBegin;
30   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
31   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
32   if (iascii) {
33     if (!red->psubcomm) {
34       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
35     } else {
36       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
37       ierr = PetscViewerGetSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
38       if (!red->psubcomm->color) { /* only view first redundant pc */
39         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
40         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
41         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
42       }
43       ierr = PetscViewerRestoreSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
44     }
45   } else if (isstring) {
46     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
47   }
48   PetscFunctionReturn(0);
49 }
50 
51 #undef __FUNCT__
52 #define __FUNCT__ "PCSetUp_Redundant"
53 static PetscErrorCode PCSetUp_Redundant(PC pc)
54 {
55   PC_Redundant   *red = (PC_Redundant*)pc->data;
56   PetscErrorCode ierr;
57   PetscInt       mstart,mend,mlocal,M;
58   PetscMPIInt    size;
59   MPI_Comm       comm,subcomm;
60   Vec            x;
61 
62   PetscFunctionBegin;
63   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
64 
65   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
66   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
67   if (size == 1) red->useparallelmat = PETSC_FALSE;
68 
69   if (!pc->setupcalled) {
70     PetscInt mloc_sub;
71     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
72       KSP ksp;
73       ierr = PCRedundantGetKSP(pc,&ksp);CHKERRQ(ierr);
74     }
75     subcomm = PetscSubcommChild(red->psubcomm);
76 
77     if (red->useparallelmat) {
78       /* grab the parallel matrix and put it into processors of a subcomminicator */
79       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);CHKERRQ(ierr);
80 
81       ierr = MPI_Comm_size(subcomm,&size);CHKERRQ(ierr);
82       if (size > 1) {
83         PetscBool foundpack;
84         ierr = MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_LU,&foundpack);CHKERRQ(ierr);
85         if (!foundpack) { /* reset default ksp and pc */
86           ierr = KSPSetType(red->ksp,KSPGMRES);CHKERRQ(ierr);
87           ierr = PCSetType(red->pc,PCBJACOBI);CHKERRQ(ierr);
88         } else {
89           ierr = PCFactorSetMatSolverPackage(red->pc,NULL);CHKERRQ(ierr);
90         }
91       }
92 
93       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
94 
95       /* get working vectors xsub and ysub */
96       ierr = MatCreateVecs(red->pmats,&red->xsub,&red->ysub);CHKERRQ(ierr);
97 
98       /* create working vectors xdup and ydup.
99        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
100        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
101        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
102       ierr = MatGetLocalSize(red->pmats,&mloc_sub,NULL);CHKERRQ(ierr);
103       ierr = VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
104       ierr = VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);
105 
106       /* create vecscatters */
107       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
108         IS       is1,is2;
109         PetscInt *idx1,*idx2,i,j,k;
110 
111         ierr = MatCreateVecs(pc->pmat,&x,0);CHKERRQ(ierr);
112         ierr = VecGetSize(x,&M);CHKERRQ(ierr);
113         ierr = VecGetOwnershipRange(x,&mstart,&mend);CHKERRQ(ierr);
114         mlocal = mend - mstart;
115         ierr = PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);CHKERRQ(ierr);
116         j    = 0;
117         for (k=0; k<red->psubcomm->n; k++) {
118           for (i=mstart; i<mend; i++) {
119             idx1[j]   = i;
120             idx2[j++] = i + M*k;
121           }
122         }
123         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
124         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
125         ierr = VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
126         ierr = ISDestroy(&is1);CHKERRQ(ierr);
127         ierr = ISDestroy(&is2);CHKERRQ(ierr);
128 
129         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
130         ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);CHKERRQ(ierr);
131         ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
132         ierr = VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);CHKERRQ(ierr);
133         ierr = ISDestroy(&is1);CHKERRQ(ierr);
134         ierr = ISDestroy(&is2);CHKERRQ(ierr);
135         ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
136         ierr = VecDestroy(&x);CHKERRQ(ierr);
137       }
138     } else { /* !red->useparallelmat */
139       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
140     }
141   } else { /* pc->setupcalled */
142     if (red->useparallelmat) {
143       MatReuse       reuse;
144       /* grab the parallel matrix and put it into processors of a subcomminicator */
145       /*--------------------------------------------------------------------------*/
146       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
147         /* destroy old matrices */
148         ierr  = MatDestroy(&red->pmats);CHKERRQ(ierr);
149         reuse = MAT_INITIAL_MATRIX;
150       } else {
151         reuse = MAT_REUSE_MATRIX;
152       }
153       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats);CHKERRQ(ierr);
154       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
155     } else { /* !red->useparallelmat */
156       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
157     }
158   }
159 
160   if (pc->setfromoptionscalled) {
161     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
162   }
163   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
164   PetscFunctionReturn(0);
165 }
166 
167 #undef __FUNCT__
168 #define __FUNCT__ "PCApply_Redundant"
169 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
170 {
171   PC_Redundant   *red = (PC_Redundant*)pc->data;
172   PetscErrorCode ierr;
173   PetscScalar    *array;
174 
175   PetscFunctionBegin;
176   if (!red->useparallelmat) {
177     ierr = KSPSolve(red->ksp,x,y);CHKERRQ(ierr);
178     PetscFunctionReturn(0);
179   }
180 
181   /* scatter x to xdup */
182   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
183   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
184 
185   /* place xdup's local array into xsub */
186   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
187   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
188 
189   /* apply preconditioner on each processor */
190   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
191   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
192   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
193 
194   /* place ysub's local array into ydup */
195   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
196   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
197 
198   /* scatter ydup to y */
199   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
200   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
201   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
202   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
203   PetscFunctionReturn(0);
204 }
205 
206 #undef __FUNCT__
207 #define __FUNCT__ "PCApplyTranspose_Redundant"
208 static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
209 {
210   PC_Redundant   *red = (PC_Redundant*)pc->data;
211   PetscErrorCode ierr;
212   PetscScalar    *array;
213 
214   PetscFunctionBegin;
215   if (!red->useparallelmat) {
216     ierr = KSPSolveTranspose(red->ksp,x,y);CHKERRQ(ierr);
217     PetscFunctionReturn(0);
218   }
219 
220   /* scatter x to xdup */
221   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
222   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
223 
224   /* place xdup's local array into xsub */
225   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
226   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
227 
228   /* apply preconditioner on each processor */
229   ierr = KSPSolveTranspose(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
230   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
231   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
232 
233   /* place ysub's local array into ydup */
234   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
235   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
236 
237   /* scatter ydup to y */
238   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
239   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
240   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
241   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
242   PetscFunctionReturn(0);
243 }
244 
245 #undef __FUNCT__
246 #define __FUNCT__ "PCReset_Redundant"
247 static PetscErrorCode PCReset_Redundant(PC pc)
248 {
249   PC_Redundant   *red = (PC_Redundant*)pc->data;
250   PetscErrorCode ierr;
251 
252   PetscFunctionBegin;
253   if (red->useparallelmat) {
254     ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
255     ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
256     ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
257     ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
258     ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
259     ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
260   }
261   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
262   ierr = KSPReset(red->ksp);CHKERRQ(ierr);
263   PetscFunctionReturn(0);
264 }
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "PCDestroy_Redundant"
268 static PetscErrorCode PCDestroy_Redundant(PC pc)
269 {
270   PC_Redundant   *red = (PC_Redundant*)pc->data;
271   PetscErrorCode ierr;
272 
273   PetscFunctionBegin;
274   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
275   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
276   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
277   ierr = PetscFree(pc->data);CHKERRQ(ierr);
278   PetscFunctionReturn(0);
279 }
280 
281 #undef __FUNCT__
282 #define __FUNCT__ "PCSetFromOptions_Redundant"
283 static PetscErrorCode PCSetFromOptions_Redundant(PetscOptionItems *PetscOptionsObject,PC pc)
284 {
285   PetscErrorCode ierr;
286   PC_Redundant   *red = (PC_Redundant*)pc->data;
287 
288   PetscFunctionBegin;
289   ierr = PetscOptionsHead(PetscOptionsObject,"Redundant options");CHKERRQ(ierr);
290   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
291   ierr = PetscOptionsTail();CHKERRQ(ierr);
292   PetscFunctionReturn(0);
293 }
294 
295 #undef __FUNCT__
296 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
297 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
298 {
299   PC_Redundant *red = (PC_Redundant*)pc->data;
300 
301   PetscFunctionBegin;
302   red->nsubcomm = nreds;
303   PetscFunctionReturn(0);
304 }
305 
306 #undef __FUNCT__
307 #define __FUNCT__ "PCRedundantSetNumber"
308 /*@
309    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
310 
311    Logically Collective on PC
312 
313    Input Parameters:
314 +  pc - the preconditioner context
315 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
316                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
317 
318    Level: advanced
319 
320 .keywords: PC, redundant solve
321 @*/
322 PetscErrorCode PCRedundantSetNumber(PC pc,PetscInt nredundant)
323 {
324   PetscErrorCode ierr;
325 
326   PetscFunctionBegin;
327   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
328   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
329   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
330   PetscFunctionReturn(0);
331 }
332 
333 #undef __FUNCT__
334 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
335 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
336 {
337   PC_Redundant   *red = (PC_Redundant*)pc->data;
338   PetscErrorCode ierr;
339 
340   PetscFunctionBegin;
341   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
342   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
343 
344   red->scatterin  = in;
345 
346   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
347   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
348   red->scatterout = out;
349   PetscFunctionReturn(0);
350 }
351 
352 #undef __FUNCT__
353 #define __FUNCT__ "PCRedundantSetScatter"
354 /*@
355    PCRedundantSetScatter - Sets the scatter used to copy values into the
356      redundant local solve and the scatter to move them back into the global
357      vector.
358 
359    Logically Collective on PC
360 
361    Input Parameters:
362 +  pc - the preconditioner context
363 .  in - the scatter to move the values in
364 -  out - the scatter to move them out
365 
366    Level: advanced
367 
368 .keywords: PC, redundant solve
369 @*/
370 PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
371 {
372   PetscErrorCode ierr;
373 
374   PetscFunctionBegin;
375   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
376   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
377   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
378   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
379   PetscFunctionReturn(0);
380 }
381 
382 #undef __FUNCT__
383 #define __FUNCT__ "PCRedundantGetKSP_Redundant"
384 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
385 {
386   PetscErrorCode ierr;
387   PC_Redundant   *red = (PC_Redundant*)pc->data;
388   MPI_Comm       comm,subcomm;
389   const char     *prefix;
390 
391   PetscFunctionBegin;
392   if (!red->psubcomm) {
393     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
394 
395     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
396     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
397     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
398     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
399 
400     ierr = PetscSubcommSetOptionsPrefix(red->psubcomm,prefix);CHKERRQ(ierr);
401     ierr = PetscSubcommSetFromOptions(red->psubcomm);CHKERRQ(ierr);
402     ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
403 
404     /* create a new PC that processors in each subcomm have copy of */
405     subcomm = PetscSubcommChild(red->psubcomm);
406 
407     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
408     ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr);
409     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
410     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
411     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
412     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
413     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
414 
415     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
416     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
417   }
418   *innerksp = red->ksp;
419   PetscFunctionReturn(0);
420 }
421 
422 #undef __FUNCT__
423 #define __FUNCT__ "PCRedundantGetKSP"
424 /*@
425    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
426 
427    Not Collective
428 
429    Input Parameter:
430 .  pc - the preconditioner context
431 
432    Output Parameter:
433 .  innerksp - the KSP on the smaller set of processes
434 
435    Level: advanced
436 
437 .keywords: PC, redundant solve
438 @*/
439 PetscErrorCode PCRedundantGetKSP(PC pc,KSP *innerksp)
440 {
441   PetscErrorCode ierr;
442 
443   PetscFunctionBegin;
444   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
445   PetscValidPointer(innerksp,2);
446   ierr = PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
447   PetscFunctionReturn(0);
448 }
449 
450 #undef __FUNCT__
451 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
452 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
453 {
454   PC_Redundant *red = (PC_Redundant*)pc->data;
455 
456   PetscFunctionBegin;
457   if (mat)  *mat  = red->pmats;
458   if (pmat) *pmat = red->pmats;
459   PetscFunctionReturn(0);
460 }
461 
462 #undef __FUNCT__
463 #define __FUNCT__ "PCRedundantGetOperators"
464 /*@
465    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
466 
467    Not Collective
468 
469    Input Parameter:
470 .  pc - the preconditioner context
471 
472    Output Parameters:
473 +  mat - the matrix
474 -  pmat - the (possibly different) preconditioner matrix
475 
476    Level: advanced
477 
478 .keywords: PC, redundant solve
479 @*/
480 PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
481 {
482   PetscErrorCode ierr;
483 
484   PetscFunctionBegin;
485   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
486   if (mat)  PetscValidPointer(mat,2);
487   if (pmat) PetscValidPointer(pmat,3);
488   ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
489   PetscFunctionReturn(0);
490 }
491 
492 /* -------------------------------------------------------------------------------------*/
493 /*MC
494      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
495 
496      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
497 
498   Options Database:
499 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
500                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
501 
502    Level: intermediate
503 
504    Notes: The default KSP is preonly and the default PC is LU.
505 
506    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
507 
508 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
509            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
510 M*/
511 
512 #undef __FUNCT__
513 #define __FUNCT__ "PCCreate_Redundant"
514 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
515 {
516   PetscErrorCode ierr;
517   PC_Redundant   *red;
518   PetscMPIInt    size;
519 
520   PetscFunctionBegin;
521   ierr = PetscNewLog(pc,&red);CHKERRQ(ierr);
522   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
523 
524   red->nsubcomm       = size;
525   red->useparallelmat = PETSC_TRUE;
526   pc->data            = (void*)red;
527 
528   pc->ops->apply          = PCApply_Redundant;
529   pc->ops->applytranspose = PCApplyTranspose_Redundant;
530   pc->ops->setup          = PCSetUp_Redundant;
531   pc->ops->destroy        = PCDestroy_Redundant;
532   pc->ops->reset          = PCReset_Redundant;
533   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
534   pc->ops->view           = PCView_Redundant;
535 
536   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
537   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
538   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
539   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
540   PetscFunctionReturn(0);
541 }
542 
543