xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision e5acf8a457bbca6d488ec50ccac76a0142f931f5)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP          ksp;
10   PC           pc;                   /* actual preconditioner used on each processor */
11   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool    useparallelmat;
16   PetscSubcomm psubcomm;
17   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
18 } PC_Redundant;
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PCView_Redundant"
22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26   PetscBool      iascii,isstring;
27   PetscViewer    subviewer;
28 
29   PetscFunctionBegin;
30   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
31   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
32   if (iascii) {
33     if (!red->psubcomm) {
34       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
35     } else {
36       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
37       ierr = PetscViewerGetSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
38       if (!red->psubcomm->color) { /* only view first redundant pc */
39         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
40         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
41         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
42       }
43       ierr = PetscViewerRestoreSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
44     }
45   } else if (isstring) {
46     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
47   }
48   PetscFunctionReturn(0);
49 }
50 
51 #undef __FUNCT__
52 #define __FUNCT__ "PCSetUp_Redundant"
53 static PetscErrorCode PCSetUp_Redundant(PC pc)
54 {
55   PC_Redundant   *red = (PC_Redundant*)pc->data;
56   PetscErrorCode ierr;
57   PetscInt       mstart,mend,mlocal,M;
58   PetscMPIInt    size;
59   MPI_Comm       comm,subcomm;
60   Vec            x;
61 
62   PetscFunctionBegin;
63   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
64 
65   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
66   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
67   if (size == 1) red->useparallelmat = PETSC_FALSE;
68 
69   if (!pc->setupcalled) {
70     PetscInt mloc_sub;
71     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
72       KSP ksp;
73       ierr = PCRedundantGetKSP(pc,&ksp);CHKERRQ(ierr);
74       subcomm = PetscSubcommChild(red->psubcomm);
75     }
76     subcomm = PetscSubcommChild(red->psubcomm);
77 
78     if (red->useparallelmat) {
79       /* grab the parallel matrix and put it into processors of a subcomminicator */
80       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);CHKERRQ(ierr);
81       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
82 
83       /* get working vectors xsub and ysub */
84       ierr = MatCreateVecs(red->pmats,&red->xsub,&red->ysub);CHKERRQ(ierr);
85 
86       /* create working vectors xdup and ydup.
87        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
88        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
89        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
90       ierr = MatGetLocalSize(red->pmats,&mloc_sub,NULL);CHKERRQ(ierr);
91       ierr = VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
92       ierr = VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);
93 
94       /* create vecscatters */
95       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
96         IS       is1,is2;
97         PetscInt *idx1,*idx2,i,j,k;
98 
99         ierr = MatCreateVecs(pc->pmat,&x,0);CHKERRQ(ierr);
100         ierr = VecGetSize(x,&M);CHKERRQ(ierr);
101         ierr = VecGetOwnershipRange(x,&mstart,&mend);CHKERRQ(ierr);
102         mlocal = mend - mstart;
103         ierr = PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);CHKERRQ(ierr);
104         j    = 0;
105         for (k=0; k<red->psubcomm->n; k++) {
106           for (i=mstart; i<mend; i++) {
107             idx1[j]   = i;
108             idx2[j++] = i + M*k;
109           }
110         }
111         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
112         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
113         ierr = VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
114         ierr = ISDestroy(&is1);CHKERRQ(ierr);
115         ierr = ISDestroy(&is2);CHKERRQ(ierr);
116 
117         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
118         ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);CHKERRQ(ierr);
119         ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
120         ierr = VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);CHKERRQ(ierr);
121         ierr = ISDestroy(&is1);CHKERRQ(ierr);
122         ierr = ISDestroy(&is2);CHKERRQ(ierr);
123         ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
124         ierr = VecDestroy(&x);CHKERRQ(ierr);
125       }
126     } else { /* !red->useparallelmat */
127       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
128     }
129   } else { /* pc->setupcalled */
130     if (red->useparallelmat) {
131       MatReuse       reuse;
132       /* grab the parallel matrix and put it into processors of a subcomminicator */
133       /*--------------------------------------------------------------------------*/
134       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
135         /* destroy old matrices */
136         ierr  = MatDestroy(&red->pmats);CHKERRQ(ierr);
137         reuse = MAT_INITIAL_MATRIX;
138       } else {
139         reuse = MAT_REUSE_MATRIX;
140       }
141       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats);CHKERRQ(ierr);
142       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
143     } else { /* !red->useparallelmat */
144       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
145     }
146   }
147 
148   if (pc->setfromoptionscalled) {
149     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
150   }
151   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNCT__
156 #define __FUNCT__ "PCApply_Redundant"
157 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
158 {
159   PC_Redundant   *red = (PC_Redundant*)pc->data;
160   PetscErrorCode ierr;
161   PetscScalar    *array;
162 
163   PetscFunctionBegin;
164   if (!red->useparallelmat) {
165     ierr = KSPSolve(red->ksp,x,y);CHKERRQ(ierr);
166     PetscFunctionReturn(0);
167   }
168 
169   /* scatter x to xdup */
170   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
171   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
172 
173   /* place xdup's local array into xsub */
174   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
175   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
176 
177   /* apply preconditioner on each processor */
178   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
179   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
180   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
181 
182   /* place ysub's local array into ydup */
183   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
184   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
185 
186   /* scatter ydup to y */
187   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
188   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
189   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
190   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
191   PetscFunctionReturn(0);
192 }
193 
194 #undef __FUNCT__
195 #define __FUNCT__ "PCApplyTranspose_Redundant"
196 static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
197 {
198   PC_Redundant   *red = (PC_Redundant*)pc->data;
199   PetscErrorCode ierr;
200   PetscScalar    *array;
201 
202   PetscFunctionBegin;
203   if (!red->useparallelmat) {
204     ierr = KSPSolveTranspose(red->ksp,x,y);CHKERRQ(ierr);
205     PetscFunctionReturn(0);
206   }
207 
208   /* scatter x to xdup */
209   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
210   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
211 
212   /* place xdup's local array into xsub */
213   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
214   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
215 
216   /* apply preconditioner on each processor */
217   ierr = KSPSolveTranspose(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
218   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
219   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
220 
221   /* place ysub's local array into ydup */
222   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
223   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
224 
225   /* scatter ydup to y */
226   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
227   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
228   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
229   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "PCReset_Redundant"
235 static PetscErrorCode PCReset_Redundant(PC pc)
236 {
237   PC_Redundant   *red = (PC_Redundant*)pc->data;
238   PetscErrorCode ierr;
239 
240   PetscFunctionBegin;
241   if (red->useparallelmat) {
242     ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
243     ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
244     ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
245     ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
246     ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
247     ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
248   }
249   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
250   ierr = KSPReset(red->ksp);CHKERRQ(ierr);
251   PetscFunctionReturn(0);
252 }
253 
254 #undef __FUNCT__
255 #define __FUNCT__ "PCDestroy_Redundant"
256 static PetscErrorCode PCDestroy_Redundant(PC pc)
257 {
258   PC_Redundant   *red = (PC_Redundant*)pc->data;
259   PetscErrorCode ierr;
260 
261   PetscFunctionBegin;
262   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
263   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
264   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
265   ierr = PetscFree(pc->data);CHKERRQ(ierr);
266   PetscFunctionReturn(0);
267 }
268 
269 #undef __FUNCT__
270 #define __FUNCT__ "PCSetFromOptions_Redundant"
271 static PetscErrorCode PCSetFromOptions_Redundant(PetscOptionItems *PetscOptionsObject,PC pc)
272 {
273   PetscErrorCode ierr;
274   PC_Redundant   *red = (PC_Redundant*)pc->data;
275 
276   PetscFunctionBegin;
277   ierr = PetscOptionsHead(PetscOptionsObject,"Redundant options");CHKERRQ(ierr);
278   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
279   ierr = PetscOptionsTail();CHKERRQ(ierr);
280   PetscFunctionReturn(0);
281 }
282 
283 #undef __FUNCT__
284 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
285 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
286 {
287   PC_Redundant *red = (PC_Redundant*)pc->data;
288 
289   PetscFunctionBegin;
290   red->nsubcomm = nreds;
291   PetscFunctionReturn(0);
292 }
293 
294 #undef __FUNCT__
295 #define __FUNCT__ "PCRedundantSetNumber"
296 /*@
297    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
298 
299    Logically Collective on PC
300 
301    Input Parameters:
302 +  pc - the preconditioner context
303 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
304                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
305 
306    Level: advanced
307 
308 .keywords: PC, redundant solve
309 @*/
310 PetscErrorCode PCRedundantSetNumber(PC pc,PetscInt nredundant)
311 {
312   PetscErrorCode ierr;
313 
314   PetscFunctionBegin;
315   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
316   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
317   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
318   PetscFunctionReturn(0);
319 }
320 
321 #undef __FUNCT__
322 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
323 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
324 {
325   PC_Redundant   *red = (PC_Redundant*)pc->data;
326   PetscErrorCode ierr;
327 
328   PetscFunctionBegin;
329   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
330   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
331 
332   red->scatterin  = in;
333 
334   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
335   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
336   red->scatterout = out;
337   PetscFunctionReturn(0);
338 }
339 
340 #undef __FUNCT__
341 #define __FUNCT__ "PCRedundantSetScatter"
342 /*@
343    PCRedundantSetScatter - Sets the scatter used to copy values into the
344      redundant local solve and the scatter to move them back into the global
345      vector.
346 
347    Logically Collective on PC
348 
349    Input Parameters:
350 +  pc - the preconditioner context
351 .  in - the scatter to move the values in
352 -  out - the scatter to move them out
353 
354    Level: advanced
355 
356 .keywords: PC, redundant solve
357 @*/
358 PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
359 {
360   PetscErrorCode ierr;
361 
362   PetscFunctionBegin;
363   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
364   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
365   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
366   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
367   PetscFunctionReturn(0);
368 }
369 
370 #undef __FUNCT__
371 #define __FUNCT__ "PCRedundantGetKSP_Redundant"
372 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
373 {
374   PetscErrorCode ierr;
375   PC_Redundant   *red = (PC_Redundant*)pc->data;
376   MPI_Comm       comm,subcomm;
377   const char     *prefix;
378 
379   PetscFunctionBegin;
380   if (!red->psubcomm) {
381     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
382 
383     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
384     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
385     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
386     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
387 
388     ierr = PetscSubcommSetOptionsPrefix(red->psubcomm,prefix);CHKERRQ(ierr);
389     ierr = PetscSubcommSetFromOptions(red->psubcomm);CHKERRQ(ierr);
390     ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
391 
392     /* create a new PC that processors in each subcomm have copy of */
393     subcomm = PetscSubcommChild(red->psubcomm);
394 
395     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
396     ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr);
397     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
398     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
399     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
400     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
401     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
402 
403     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
404     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
405   }
406   *innerksp = red->ksp;
407   PetscFunctionReturn(0);
408 }
409 
410 #undef __FUNCT__
411 #define __FUNCT__ "PCRedundantGetKSP"
412 /*@
413    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
414 
415    Not Collective
416 
417    Input Parameter:
418 .  pc - the preconditioner context
419 
420    Output Parameter:
421 .  innerksp - the KSP on the smaller set of processes
422 
423    Level: advanced
424 
425 .keywords: PC, redundant solve
426 @*/
427 PetscErrorCode PCRedundantGetKSP(PC pc,KSP *innerksp)
428 {
429   PetscErrorCode ierr;
430 
431   PetscFunctionBegin;
432   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
433   PetscValidPointer(innerksp,2);
434   ierr = PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
435   PetscFunctionReturn(0);
436 }
437 
438 #undef __FUNCT__
439 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
440 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
441 {
442   PC_Redundant *red = (PC_Redundant*)pc->data;
443 
444   PetscFunctionBegin;
445   if (mat)  *mat  = red->pmats;
446   if (pmat) *pmat = red->pmats;
447   PetscFunctionReturn(0);
448 }
449 
450 #undef __FUNCT__
451 #define __FUNCT__ "PCRedundantGetOperators"
452 /*@
453    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
454 
455    Not Collective
456 
457    Input Parameter:
458 .  pc - the preconditioner context
459 
460    Output Parameters:
461 +  mat - the matrix
462 -  pmat - the (possibly different) preconditioner matrix
463 
464    Level: advanced
465 
466 .keywords: PC, redundant solve
467 @*/
468 PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
469 {
470   PetscErrorCode ierr;
471 
472   PetscFunctionBegin;
473   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
474   if (mat)  PetscValidPointer(mat,2);
475   if (pmat) PetscValidPointer(pmat,3);
476   ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
477   PetscFunctionReturn(0);
478 }
479 
480 /* -------------------------------------------------------------------------------------*/
481 /*MC
482      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
483 
484      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
485 
486   Options Database:
487 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
488                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
489 
490    Level: intermediate
491 
492    Notes: The default KSP is preonly and the default PC is LU.
493 
494    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
495 
496 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
497            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
498 M*/
499 
500 #undef __FUNCT__
501 #define __FUNCT__ "PCCreate_Redundant"
502 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
503 {
504   PetscErrorCode ierr;
505   PC_Redundant   *red;
506   PetscMPIInt    size;
507 
508   PetscFunctionBegin;
509   ierr = PetscNewLog(pc,&red);CHKERRQ(ierr);
510   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
511 
512   red->nsubcomm       = size;
513   red->useparallelmat = PETSC_TRUE;
514   pc->data            = (void*)red;
515 
516   pc->ops->apply          = PCApply_Redundant;
517   pc->ops->applytranspose = PCApplyTranspose_Redundant;
518   pc->ops->setup          = PCSetUp_Redundant;
519   pc->ops->destroy        = PCDestroy_Redundant;
520   pc->ops->reset          = PCReset_Redundant;
521   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
522   pc->ops->view           = PCView_Redundant;
523 
524   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
525   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
526   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
527   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
528   PetscFunctionReturn(0);
529 }
530 
531