xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 1bb69edbc2f6efc8bcb8b8b24da8e7bcfef7c08b)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
5 */
6 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
7 #include "petscksp.h"
8 
9 typedef struct {
10   MPI_Comm   parent;      /* parent communicator */
11   MPI_Comm   dupparent;   /* duplicate parent communicator, under which the processors of this subcomm have contiguous rank */
12   MPI_Comm   comm;        /* this communicator */
13   PetscInt   n;           /* num of subcommunicators under the parent communicator */
14   PetscInt   color;       /* color of processors belong to this communicator */
15 } PetscSubcomm;
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "PetscSubcommDestroy"
19 PetscErrorCode PetscSubcommDestroy(PetscSubcomm *psubcomm)
20 {
21   PetscErrorCode ierr;
22 
23   PetscFunctionBegin;
24   ierr = PetscFree(psubcomm);CHKERRQ(ierr);
25   PetscFunctionReturn(0);
26 }
27 
28 /*--------------------------------------------------------------------------------------------------
29     To avoid data scattering from subcomm back to original comm, we create subcomm by iteratively taking a
30      processe into a subcomm.
31      An example: size=4, nsubcomm=3
32       pc->comm:
33       rank:     [0]  [1]  [2]  [3]
34       color:     0    1    2    0
35 
36      subcomm:
37       subrank:  [0]  [0]  [0]  [1]
38 
39      dupcomm:
40       duprank:  [0]  [2]  [3]  [1]
41 
42      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
43            subcomm[color = 1] has subsize=1, owns process [1]
44            subcomm[color = 2] has subsize=1, owns process [2]
45           dupcomm has same number of processes as pc->comm, and its duprank maps
46           processes in subcomm contiguously into a 1d array:
47             duprank: [0] [1]      [2]         [3]
48             rank:    [0] [3]      [1]         [2]
49                     subcomm[0] subcomm[1]  subcomm[2]
50      ----------------------------------------------------------------------------------------*/
51 #undef __FUNCT__
52 #define __FUNCT__ "PetscSubcommCreate"
53 PetscErrorCode PetscSubcommCreate(MPI_Comm comm,PetscInt nsubcomm,PetscSubcomm **psubcomm)
54 {
55   PetscErrorCode ierr;
56   PetscMPIInt    rank,size,*subsize,duprank,subrank;
57   PetscInt       np_subcomm,nleftover,i,j,color;
58   MPI_Comm       subcomm=0,dupcomm=0;
59   const char     *prefix;
60   PetscSubcomm   *psubcomm_tmp;
61 
62   PetscFunctionBegin;
63   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
64   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
65   if (nsubcomm < 1 || nsubcomm > size) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %D cannot be < 1 or > input comm size %D",nsubcomm,size);
66 
67   /* get size of each subcommunicator */
68   ierr = PetscMalloc((1+nsubcomm)*sizeof(PetscMPIInt),&subsize);CHKERRQ(ierr);
69   np_subcomm = size/nsubcomm;
70   nleftover  = size - nsubcomm*np_subcomm;
71   for (i=0; i<nsubcomm; i++){
72     subsize[i] = np_subcomm;
73     if (i<nleftover) subsize[i]++;
74   }
75 
76   /* find color for this proc */
77   color   = rank%nsubcomm;
78   subrank = rank/nsubcomm;
79 
80   ierr = MPI_Comm_split(comm,color,subrank,&subcomm);CHKERRQ(ierr);
81 
82   j = 0; duprank = 0;
83   for (i=0; i<nsubcomm; i++){
84     if (j == color){
85       duprank += subrank;
86       break;
87     }
88     duprank += subsize[i]; j++;
89   }
90 
91   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
92   ierr = MPI_Comm_split(comm,0,duprank,&dupcomm);CHKERRQ(ierr);
93   ierr = PetscFree(subsize);CHKERRQ(ierr);
94 
95   ierr = PetscNew(PetscSubcomm,&psubcomm_tmp);CHKERRQ(ierr);
96   psubcomm_tmp->parent    = comm;
97   psubcomm_tmp->dupparent = dupcomm;
98   psubcomm_tmp->comm      = subcomm;
99   psubcomm_tmp->n         = nsubcomm;
100   psubcomm_tmp->color     = color;
101   *psubcomm = psubcomm_tmp;
102   PetscFunctionReturn(0);
103 }
104 
105 typedef struct {
106   PC           pc;                   /* actual preconditioner used on each processor */
107   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of pc->comm */
108   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
109   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
110   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
111   PetscTruth   useparallelmat;
112   PetscSubcomm *psubcomm;
113   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
114 } PC_Redundant;
115 
116 #undef __FUNCT__
117 #define __FUNCT__ "PCView_Redundant"
118 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
119 {
120   PC_Redundant   *red = (PC_Redundant*)pc->data;
121   PetscErrorCode ierr;
122   PetscMPIInt    rank;
123   PetscTruth     iascii,isstring;
124   PetscViewer    sviewer,subviewer;
125   PetscInt       color = red->psubcomm->color;
126 
127   PetscFunctionBegin;
128   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
129   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
130   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
131   if (iascii) {
132     ierr = PetscViewerASCIIPrintf(viewer,"  Redundant solver preconditioner: First PC (color=0) follows\n");CHKERRQ(ierr);
133     ierr = PetscViewerGetSubcomm(viewer,red->pc->comm,&subviewer);CHKERRQ(ierr);
134     if (!color) { /* only view first redundant pc */
135       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
136       ierr = PCView(red->pc,subviewer);CHKERRQ(ierr);
137       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
138     }
139     ierr = PetscViewerRestoreSubcomm(viewer,red->pc->comm,&subviewer);CHKERRQ(ierr);
140   } else if (isstring) { /* not test it yet! */
141     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
142     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
143     if (!rank) {
144       ierr = PCView(red->pc,sviewer);CHKERRQ(ierr);
145     }
146     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
147   } else {
148     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
149   }
150   PetscFunctionReturn(0);
151 }
152 
153 #include "include/private/matimpl.h"        /*I "petscmat.h" I*/
154 #undef __FUNCT__
155 #define __FUNCT__ "PCSetUp_Redundant"
156 static PetscErrorCode PCSetUp_Redundant(PC pc)
157 {
158   PC_Redundant   *red = (PC_Redundant*)pc->data;
159   PetscErrorCode ierr;
160   PetscInt       mstart,mend,mlocal,m;
161   PetscMPIInt    size;
162   MatReuse       reuse = MAT_INITIAL_MATRIX;
163   MatStructure   str   = DIFFERENT_NONZERO_PATTERN;
164   MPI_Comm       comm = pc->comm;
165   Vec            vec;
166   PetscInt       mlocal_sub;
167   PetscMPIInt    subsize,subrank;
168   PetscInt       rstart_sub,rend_sub,mloc_sub,nsubcomm;
169   const char     *prefix;
170 
171   PetscFunctionBegin;
172   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
173   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
174 
175   if (!pc->setupcalled) {
176     ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr);
177     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
178 
179     /* create a new PC that processors in each subcomm have copy of */
180     MPI_Comm subcomm = red->psubcomm->comm;
181     ierr = PCCreate(subcomm,&red->pc);CHKERRQ(ierr);
182     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
183     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
184     ierr = PCSetOptionsPrefix(red->pc,prefix);CHKERRQ(ierr);
185     ierr = PCAppendOptionsPrefix(red->pc,"redundant_");CHKERRQ(ierr);
186     ierr = PCSetFromOptions(red->pc);CHKERRQ(ierr);
187 
188     /* create working vectors xsub/ysub and xdup/ydup */
189     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
190     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
191 
192     /* get local size of xsub/ysub */
193     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
194     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
195     rstart_sub = pc->pmat->rmap.range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
196     if (subrank+1 < subsize){
197       rend_sub = pc->pmat->rmap.range[red->psubcomm->n*(subrank+1)];
198     } else {
199       rend_sub = m;
200     }
201     mloc_sub = rend_sub - rstart_sub;
202     ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
203     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
204     ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);
205 
206     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
207        Note: we use communicator dupcomm, not pc->comm! */
208     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
209     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
210 
211     /* create vec scatters */
212     if (!red->scatterin){
213       IS       is1,is2;
214       PetscInt *idx1,*idx2,i,j,k;
215       ierr = PetscMalloc(2*red->psubcomm->n*mlocal*sizeof(PetscInt),&idx1);CHKERRQ(ierr);
216       idx2 = idx1 + red->psubcomm->n*mlocal;
217       j = 0;
218       for (k=0; k<red->psubcomm->n; k++){
219         for (i=mstart; i<mend; i++){
220           idx1[j]   = i;
221           idx2[j++] = i + m*k;
222         }
223       }
224       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,&is1);CHKERRQ(ierr);
225       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,&is2);CHKERRQ(ierr);
226       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
227       ierr = ISDestroy(is1);CHKERRQ(ierr);
228       ierr = ISDestroy(is2);CHKERRQ(ierr);
229 
230       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
231       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
232       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
233       ierr = ISDestroy(is1);CHKERRQ(ierr);
234       ierr = ISDestroy(is2);CHKERRQ(ierr);
235       ierr = PetscFree(idx1);CHKERRQ(ierr);
236     }
237   }
238   ierr = VecDestroy(vec);CHKERRQ(ierr);
239 
240   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
241   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
242   if (size == 1) {
243     red->useparallelmat = PETSC_FALSE;
244   }
245 
246   if (red->useparallelmat) {
247     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
248       /* destroy old matrices */
249       if (red->pmats) {
250         ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
251       }
252     } else if (pc->setupcalled == 1) {
253       reuse = MAT_REUSE_MATRIX;
254       str   = SAME_NONZERO_PATTERN;
255     }
256 
257     /* grab the parallel matrix and put it into processors of a subcomminicator */
258     /*--------------------------------------------------------------------------*/
259     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
260     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
261 
262     /* tell PC of the subcommunicator its operators */
263     ierr = PCSetOperators(red->pc,red->pmats,red->pmats,str);CHKERRQ(ierr);
264   } else {
265     ierr = PCSetOperators(red->pc,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
266   }
267   ierr = PCSetFromOptions(red->pc);CHKERRQ(ierr);
268   ierr = PCSetUp(red->pc);CHKERRQ(ierr);
269   PetscFunctionReturn(0);
270 }
271 
272 #undef __FUNCT__
273 #define __FUNCT__ "PCApply_Redundant"
274 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
275 {
276   PC_Redundant   *red = (PC_Redundant*)pc->data;
277   PetscErrorCode ierr;
278   PetscScalar    *array;
279 
280   PetscFunctionBegin;
281   /* scatter x to xdup */
282   ierr = VecScatterBegin(x,red->xdup,INSERT_VALUES,SCATTER_FORWARD,red->scatterin);CHKERRQ(ierr);
283   ierr = VecScatterEnd(x,red->xdup,INSERT_VALUES,SCATTER_FORWARD,red->scatterin);CHKERRQ(ierr);
284 
285   /* place xdup's local array into xsub */
286   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
287   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
288 
289   /* apply preconditioner on each processor */
290   ierr = PCApply(red->pc,red->xsub,red->ysub);CHKERRQ(ierr);
291   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
292   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
293 
294   /* place ysub's local array into ydup */
295   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
296   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
297 
298   /* scatter ydup to y */
299   ierr = VecScatterBegin(red->ydup,y,INSERT_VALUES,SCATTER_FORWARD,red->scatterout);CHKERRQ(ierr);
300   ierr = VecScatterEnd(red->ydup,y,INSERT_VALUES,SCATTER_FORWARD,red->scatterout);CHKERRQ(ierr);
301   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
302   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
303   PetscFunctionReturn(0);
304 }
305 
306 #undef __FUNCT__
307 #define __FUNCT__ "PCDestroy_Redundant"
308 static PetscErrorCode PCDestroy_Redundant(PC pc)
309 {
310   PC_Redundant   *red = (PC_Redundant*)pc->data;
311   PetscErrorCode ierr;
312 
313   PetscFunctionBegin;
314   if (red->scatterin)  {ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr);}
315   if (red->scatterout) {ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr);}
316   if (red->ysub)       {ierr = VecDestroy(red->ysub);CHKERRQ(ierr);}
317   if (red->xsub)       {ierr = VecDestroy(red->xsub);CHKERRQ(ierr);}
318   if (red->xdup)       {ierr = VecDestroy(red->xdup);CHKERRQ(ierr);}
319   if (red->ydup)       {ierr = VecDestroy(red->ydup);CHKERRQ(ierr);}
320   if (red->pmats) {
321     ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
322   }
323   ierr = PetscSubcommDestroy(red->psubcomm);CHKERRQ(ierr);
324   ierr = PCDestroy(red->pc);CHKERRQ(ierr);
325   ierr = PetscFree(red);CHKERRQ(ierr);
326   PetscFunctionReturn(0);
327 }
328 
329 #undef __FUNCT__
330 #define __FUNCT__ "PCSetFromOptions_Redundant"
331 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
332 {
333   PetscErrorCode ierr;
334   PC_Redundant   *red = (PC_Redundant*)pc->data;
335   PetscMPIInt    size;
336 
337   PetscFunctionBegin;
338   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
339   ierr = PetscOptionsInt("-pc_redundant_number_comm","Number of subcommunicators","PCRedundantSetNumComm",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
340   ierr = PetscOptionsTail();CHKERRQ(ierr);
341   PetscFunctionReturn(0);
342 }
343 
344 EXTERN_C_BEGIN
345 #undef __FUNCT__
346 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
347 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
348 {
349   PC_Redundant   *red = (PC_Redundant*)pc->data;
350   PetscErrorCode ierr;
351 
352   PetscFunctionBegin;
353   red->scatterin  = in;
354   red->scatterout = out;
355   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
356   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
357   PetscFunctionReturn(0);
358 }
359 EXTERN_C_END
360 
361 #undef __FUNCT__
362 #define __FUNCT__ "PCRedundantSetScatter"
363 /*@
364    PCRedundantSetScatter - Sets the scatter used to copy values into the
365      redundant local solve and the scatter to move them back into the global
366      vector.
367 
368    Collective on PC
369 
370    Input Parameters:
371 +  pc - the preconditioner context
372 .  in - the scatter to move the values in
373 -  out - the scatter to move them out
374 
375    Level: advanced
376 
377 .keywords: PC, redundant solve
378 @*/
379 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
380 {
381   PetscErrorCode ierr,(*f)(PC,VecScatter,VecScatter);
382 
383   PetscFunctionBegin;
384   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
385   PetscValidHeaderSpecific(in,VEC_SCATTER_COOKIE,2);
386   PetscValidHeaderSpecific(out,VEC_SCATTER_COOKIE,3);
387   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetScatter_C",(void (**)(void))&f);CHKERRQ(ierr);
388   if (f) {
389     ierr = (*f)(pc,in,out);CHKERRQ(ierr);
390   }
391   PetscFunctionReturn(0);
392 }
393 
394 EXTERN_C_BEGIN
395 #undef __FUNCT__
396 #define __FUNCT__ "PCRedundantGetPC_Redundant"
397 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC_Redundant(PC pc,PC *innerpc)
398 {
399   PC_Redundant *red = (PC_Redundant*)pc->data;
400 
401   PetscFunctionBegin;
402   *innerpc = red->pc;
403   PetscFunctionReturn(0);
404 }
405 EXTERN_C_END
406 
407 #undef __FUNCT__
408 #define __FUNCT__ "PCRedundantGetPC"
409 /*@
410    PCRedundantGetPC - Gets the sequential PC created by the redundant PC.
411 
412    Not Collective
413 
414    Input Parameter:
415 .  pc - the preconditioner context
416 
417    Output Parameter:
418 .  innerpc - the sequential PC
419 
420    Level: advanced
421 
422 .keywords: PC, redundant solve
423 @*/
424 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC(PC pc,PC *innerpc)
425 {
426   PetscErrorCode ierr,(*f)(PC,PC*);
427 
428   PetscFunctionBegin;
429   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
430   PetscValidPointer(innerpc,2);
431   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetPC_C",(void (**)(void))&f);CHKERRQ(ierr);
432   if (f) {
433     ierr = (*f)(pc,innerpc);CHKERRQ(ierr);
434   }
435   PetscFunctionReturn(0);
436 }
437 
438 EXTERN_C_BEGIN
439 #undef __FUNCT__
440 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
441 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
442 {
443   PC_Redundant *red = (PC_Redundant*)pc->data;
444 
445   PetscFunctionBegin;
446   if (mat)  *mat  = red->pmats;
447   if (pmat) *pmat = red->pmats;
448   PetscFunctionReturn(0);
449 }
450 EXTERN_C_END
451 
452 #undef __FUNCT__
453 #define __FUNCT__ "PCRedundantGetOperators"
454 /*@
455    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
456 
457    Not Collective
458 
459    Input Parameter:
460 .  pc - the preconditioner context
461 
462    Output Parameters:
463 +  mat - the matrix
464 -  pmat - the (possibly different) preconditioner matrix
465 
466    Level: advanced
467 
468 .keywords: PC, redundant solve
469 @*/
470 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
471 {
472   PetscErrorCode ierr,(*f)(PC,Mat*,Mat*);
473 
474   PetscFunctionBegin;
475   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
476   if (mat)  PetscValidPointer(mat,2);
477   if (pmat) PetscValidPointer(pmat,3);
478   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetOperators_C",(void (**)(void))&f);CHKERRQ(ierr);
479   if (f) {
480     ierr = (*f)(pc,mat,pmat);CHKERRQ(ierr);
481   }
482   PetscFunctionReturn(0);
483 }
484 
485 /* -------------------------------------------------------------------------------------*/
486 /*MC
487      PCREDUNDANT - Runs a preconditioner for the entire problem on subgroups of processors
488 
489      Options for the redundant preconditioners can be set with -redundant_pc_xxx
490 
491   Options Database:
492 .  -pc_redundant_number_comm - number of sub communicators to use
493 
494    Level: intermediate
495 
496 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
497            PCRedundantGetPC(), PCRedundantGetOperators()
498 M*/
499 
500 EXTERN_C_BEGIN
501 #undef __FUNCT__
502 #define __FUNCT__ "PCCreate_Redundant"
503 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_Redundant(PC pc)
504 {
505   PetscErrorCode ierr;
506   PC_Redundant   *red;
507   PetscMPIInt    size;
508 
509   PetscFunctionBegin;
510   ierr = PetscNew(PC_Redundant,&red);CHKERRQ(ierr);
511   ierr = PetscLogObjectMemory(pc,sizeof(PC_Redundant));CHKERRQ(ierr);
512   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
513   red->nsubcomm       = size;
514   red->useparallelmat = PETSC_TRUE;
515   pc->data            = (void*)red;
516 
517   pc->ops->apply           = PCApply_Redundant;
518   pc->ops->applytranspose  = 0;
519   pc->ops->setup           = PCSetUp_Redundant;
520   pc->ops->destroy         = PCDestroy_Redundant;
521   pc->ops->setfromoptions  = PCSetFromOptions_Redundant;
522   pc->ops->view            = PCView_Redundant;
523   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
524                     PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
525   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetPC_C","PCRedundantGetPC_Redundant",
526                     PCRedundantGetPC_Redundant);CHKERRQ(ierr);
527   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
528                     PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
529   PetscFunctionReturn(0);
530 }
531 EXTERN_C_END
532