xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 3c48a1e8da19189ff2402a4e41a2fc082d52c349)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
6 #include "petscksp.h"           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP          ksp;
10   PC           pc;                   /* actual preconditioner used on each processor */
11   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of ((PetscObject)pc)->comm */
12   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool    useparallelmat;
16   PetscSubcomm psubcomm;
17   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
18 } PC_Redundant;
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PCView_Redundant"
22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26   PetscBool      iascii,isstring;
27   PetscViewer    subviewer;
28 
29   PetscFunctionBegin;
30   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
31   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
32   if (iascii) {
33     if (!red->psubcomm) {
34       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
35     } else {
36       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
37       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
38       if (!red->psubcomm->color) { /* only view first redundant pc */
39 	ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
40 	ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
41 	ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
42       }
43       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
44     }
45   } else if (isstring) {
46     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
47   } else {
48     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
49   }
50   PetscFunctionReturn(0);
51 }
52 
53 #undef __FUNCT__
54 #define __FUNCT__ "PCSetUp_Redundant"
55 static PetscErrorCode PCSetUp_Redundant(PC pc)
56 {
57   PC_Redundant   *red = (PC_Redundant*)pc->data;
58   PetscErrorCode ierr;
59   PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
60   PetscMPIInt    size;
61   MatReuse       reuse = MAT_INITIAL_MATRIX;
62   MatStructure   str = DIFFERENT_NONZERO_PATTERN;
63   MPI_Comm       comm = ((PetscObject)pc)->comm,subcomm;
64   Vec            vec;
65   PetscMPIInt    subsize,subrank;
66   const char     *prefix;
67   const PetscInt *range;
68 
69   PetscFunctionBegin;
70   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
71   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
72 
73   if (!pc->setupcalled) {
74     if (!red->psubcomm) {
75       ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
76       ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
77       ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
78       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
79 
80       /* create a new PC that processors in each subcomm have copy of */
81       subcomm = red->psubcomm->comm;
82       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
83       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
84       ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
85       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
86       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
87       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
88 
89       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
90       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
91       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
92     } else {
93        subcomm = red->psubcomm->comm;
94     }
95 
96     /* create working vectors xsub/ysub and xdup/ydup */
97     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
98     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
99 
100     /* get local size of xsub/ysub */
101     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
102     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
103     ierr = MatGetOwnershipRanges(pc->pmat,&range);CHKERRQ(ierr);
104     rstart_sub = range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
105     if (subrank+1 < subsize){
106       rend_sub = range[red->psubcomm->n*(subrank+1)];
107     } else {
108       rend_sub = m;
109     }
110     mloc_sub = rend_sub - rstart_sub;
111     ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
112     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
113     ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);
114 
115     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
116        Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */
117     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
118     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
119 
120     /* create vec scatters */
121     if (!red->scatterin){
122       IS       is1,is2;
123       PetscInt *idx1,*idx2,i,j,k;
124 
125       ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr);
126       j = 0;
127       for (k=0; k<red->psubcomm->n; k++){
128         for (i=mstart; i<mend; i++){
129           idx1[j]   = i;
130           idx2[j++] = i + m*k;
131         }
132       }
133       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
134       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
135       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
136       ierr = ISDestroy(is1);CHKERRQ(ierr);
137       ierr = ISDestroy(is2);CHKERRQ(ierr);
138 
139       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
140       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
141       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
142       ierr = ISDestroy(is1);CHKERRQ(ierr);
143       ierr = ISDestroy(is2);CHKERRQ(ierr);
144       ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
145     }
146   }
147   ierr = VecDestroy(vec);CHKERRQ(ierr);
148 
149   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
150   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
151   if (size == 1) {
152     red->useparallelmat = PETSC_FALSE;
153   }
154 
155   if (red->useparallelmat) {
156     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
157       /* destroy old matrices */
158       if (red->pmats) {
159         ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
160       }
161     } else if (pc->setupcalled == 1) {
162       reuse = MAT_REUSE_MATRIX;
163       str   = SAME_NONZERO_PATTERN;
164     }
165 
166     /* grab the parallel matrix and put it into processors of a subcomminicator */
167     /*--------------------------------------------------------------------------*/
168     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
169     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
170     /* tell PC of the subcommunicator its operators */
171     ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr);
172   } else {
173     ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
174   }
175   if (pc->setfromoptionscalled){
176     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
177   }
178   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
179   PetscFunctionReturn(0);
180 }
181 
182 #undef __FUNCT__
183 #define __FUNCT__ "PCApply_Redundant"
184 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
185 {
186   PC_Redundant   *red = (PC_Redundant*)pc->data;
187   PetscErrorCode ierr;
188   PetscScalar    *array;
189 
190   PetscFunctionBegin;
191   /* scatter x to xdup */
192   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
193   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
194 
195   /* place xdup's local array into xsub */
196   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
197   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
198 
199   /* apply preconditioner on each processor */
200   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
201   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
202   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
203 
204   /* place ysub's local array into ydup */
205   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
206   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
207 
208   /* scatter ydup to y */
209   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
210   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
211   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
212   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
213   PetscFunctionReturn(0);
214 }
215 
216 #undef __FUNCT__
217 #define __FUNCT__ "PCDestroy_Redundant"
218 static PetscErrorCode PCDestroy_Redundant(PC pc)
219 {
220   PC_Redundant   *red = (PC_Redundant*)pc->data;
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   if (red->scatterin)  {ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr);}
225   if (red->scatterout) {ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr);}
226   if (red->ysub)       {ierr = VecDestroy(red->ysub);CHKERRQ(ierr);}
227   if (red->xsub)       {ierr = VecDestroy(red->xsub);CHKERRQ(ierr);}
228   if (red->xdup)       {ierr = VecDestroy(red->xdup);CHKERRQ(ierr);}
229   if (red->ydup)       {ierr = VecDestroy(red->ydup);CHKERRQ(ierr);}
230   if (red->pmats) {
231     ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
232   }
233   if (red->ksp) {ierr = KSPDestroy(red->ksp);CHKERRQ(ierr);}
234   if (red->psubcomm) {ierr = PetscSubcommDestroy(red->psubcomm);CHKERRQ(ierr);}
235   ierr = PetscFree(pc->data);CHKERRQ(ierr);
236   PetscFunctionReturn(0);
237 }
238 
239 #undef __FUNCT__
240 #define __FUNCT__ "PCSetFromOptions_Redundant"
241 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
242 {
243   PetscErrorCode ierr;
244   PC_Redundant   *red = (PC_Redundant*)pc->data;
245 
246   PetscFunctionBegin;
247   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
248   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
249   ierr = PetscOptionsTail();CHKERRQ(ierr);
250   PetscFunctionReturn(0);
251 }
252 
253 EXTERN_C_BEGIN
254 #undef __FUNCT__
255 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
256 PetscErrorCode  PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
257 {
258   PC_Redundant *red = (PC_Redundant*)pc->data;
259 
260   PetscFunctionBegin;
261   red->nsubcomm = nreds;
262   PetscFunctionReturn(0);
263 }
264 EXTERN_C_END
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "PCRedundantSetNumber"
268 /*@
269    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
270 
271    Logically Collective on PC
272 
273    Input Parameters:
274 +  pc - the preconditioner context
275 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
276                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
277 
278    Level: advanced
279 
280 .keywords: PC, redundant solve
281 @*/
282 PetscErrorCode  PCRedundantSetNumber(PC pc,PetscInt nredundant)
283 {
284   PetscErrorCode ierr;
285 
286   PetscFunctionBegin;
287   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
288   if (nredundant <= 0) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
289   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
290   PetscFunctionReturn(0);
291 }
292 
293 EXTERN_C_BEGIN
294 #undef __FUNCT__
295 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
296 PetscErrorCode  PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
297 {
298   PC_Redundant   *red = (PC_Redundant*)pc->data;
299   PetscErrorCode ierr;
300 
301   PetscFunctionBegin;
302   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
303   if (red->scatterin) { ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr); }
304   red->scatterin  = in;
305   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
306   if (red->scatterout) { ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr); }
307   red->scatterout = out;
308   PetscFunctionReturn(0);
309 }
310 EXTERN_C_END
311 
312 #undef __FUNCT__
313 #define __FUNCT__ "PCRedundantSetScatter"
314 /*@
315    PCRedundantSetScatter - Sets the scatter used to copy values into the
316      redundant local solve and the scatter to move them back into the global
317      vector.
318 
319    Logically Collective on PC
320 
321    Input Parameters:
322 +  pc - the preconditioner context
323 .  in - the scatter to move the values in
324 -  out - the scatter to move them out
325 
326    Level: advanced
327 
328 .keywords: PC, redundant solve
329 @*/
330 PetscErrorCode  PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
331 {
332   PetscErrorCode ierr;
333 
334   PetscFunctionBegin;
335   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
336   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
337   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
338   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
339   PetscFunctionReturn(0);
340 }
341 
342 EXTERN_C_BEGIN
343 #undef __FUNCT__
344 #define __FUNCT__ "PCRedundantGetKSP_Redundant"
345 PetscErrorCode  PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
346 {
347   PetscErrorCode ierr;
348   PC_Redundant   *red = (PC_Redundant*)pc->data;
349   MPI_Comm       comm,subcomm;
350   const char     *prefix;
351 
352   PetscFunctionBegin;
353   if (!red->psubcomm) {
354     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
355     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
356     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
357     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
358     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
359 
360     /* create a new PC that processors in each subcomm have copy of */
361     subcomm = red->psubcomm->comm;
362     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
363     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
364     ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
365     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
366     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
367     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
368 
369     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
370     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
371     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
372   }
373   *innerksp = red->ksp;
374   PetscFunctionReturn(0);
375 }
376 EXTERN_C_END
377 
378 #undef __FUNCT__
379 #define __FUNCT__ "PCRedundantGetKSP"
380 /*@
381    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
382 
383    Not Collective
384 
385    Input Parameter:
386 .  pc - the preconditioner context
387 
388    Output Parameter:
389 .  innerksp - the KSP on the smaller set of processes
390 
391    Level: advanced
392 
393 .keywords: PC, redundant solve
394 @*/
395 PetscErrorCode  PCRedundantGetKSP(PC pc,KSP *innerksp)
396 {
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
401   PetscValidPointer(innerksp,2);
402   ierr = PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
403   PetscFunctionReturn(0);
404 }
405 
406 EXTERN_C_BEGIN
407 #undef __FUNCT__
408 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
409 PetscErrorCode  PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
410 {
411   PC_Redundant *red = (PC_Redundant*)pc->data;
412 
413   PetscFunctionBegin;
414   if (mat)  *mat  = red->pmats;
415   if (pmat) *pmat = red->pmats;
416   PetscFunctionReturn(0);
417 }
418 EXTERN_C_END
419 
420 #undef __FUNCT__
421 #define __FUNCT__ "PCRedundantGetOperators"
422 /*@
423    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
424 
425    Not Collective
426 
427    Input Parameter:
428 .  pc - the preconditioner context
429 
430    Output Parameters:
431 +  mat - the matrix
432 -  pmat - the (possibly different) preconditioner matrix
433 
434    Level: advanced
435 
436 .keywords: PC, redundant solve
437 @*/
438 PetscErrorCode  PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
439 {
440   PetscErrorCode ierr;
441 
442   PetscFunctionBegin;
443   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
444   if (mat)  PetscValidPointer(mat,2);
445   if (pmat) PetscValidPointer(pmat,3);
446   ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
447   PetscFunctionReturn(0);
448 }
449 
450 /* -------------------------------------------------------------------------------------*/
451 /*MC
452      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
453 
454      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
455 
456   Options Database:
457 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
458                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
459 
460    Level: intermediate
461 
462    Notes: The default KSP is preonly and the default PC is LU.
463 
464    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
465 
466 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
467            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
468 M*/
469 
470 EXTERN_C_BEGIN
471 #undef __FUNCT__
472 #define __FUNCT__ "PCCreate_Redundant"
473 PetscErrorCode  PCCreate_Redundant(PC pc)
474 {
475   PetscErrorCode ierr;
476   PC_Redundant   *red;
477   PetscMPIInt    size;
478 
479   PetscFunctionBegin;
480   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
481   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
482   red->nsubcomm       = size;
483   red->useparallelmat = PETSC_TRUE;
484   pc->data            = (void*)red;
485 
486   pc->ops->apply           = PCApply_Redundant;
487   pc->ops->applytranspose  = 0;
488   pc->ops->setup           = PCSetUp_Redundant;
489   pc->ops->destroy         = PCDestroy_Redundant;
490   pc->ops->setfromoptions  = PCSetFromOptions_Redundant;
491   pc->ops->view            = PCView_Redundant;
492   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
493                     PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
494   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant",
495                     PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
496   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetKSP_C","PCRedundantGetKSP_Redundant",
497                     PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
498   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
499                     PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
500   PetscFunctionReturn(0);
501 }
502 EXTERN_C_END
503