xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision 90e035378b7ae2ceaeb17618b77f7bb7d863b669)
1 
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h> /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7 
8 static PetscBool  cited = PETSC_FALSE;
9 static const char citation[] =
10 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
11 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
12 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
13 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
14 "  series    = {PASC '16},\n"
15 "  isbn      = {978-1-4503-4126-4},\n"
16 "  location  = {Lausanne, Switzerland},\n"
17 "  pages     = {5:1--5:12},\n"
18 "  articleno = {5},\n"
19 "  numpages  = {12},\n"
20 "  url       = {http://doi.acm.org/10.1145/2929908.2929913},\n"
21 "  doi       = {10.1145/2929908.2929913},\n"
22 "  acmid     = {2929913},\n"
23 "  publisher = {ACM},\n"
24 "  address   = {New York, NY, USA},\n"
25 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
26 "  year      = {2016}\n"
27 "}\n";
28 
29 /*
30  PCTelescopeSetUp_default()
31  PCTelescopeMatCreate_default()
32 
33  default
34 
35  // scatter in
36  x(comm) -> xtmp(comm)
37 
38  xred(subcomm) <- xtmp
39  yred(subcomm)
40 
41  yred(subcomm) --> xtmp
42 
43  // scatter out
44  xtmp(comm) -> y(comm)
45 */
46 
47 PetscBool isActiveRank(PetscSubcomm scomm)
48 {
49   if (scomm->color == 0) { return PETSC_TRUE; }
50   else { return PETSC_FALSE; }
51 }
52 
53 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
54 {
55   DM subdm = NULL;
56 
57   if (!isActiveRank(sred->psubcomm)) { subdm = NULL; }
58   else {
59     switch (sred->sr_type) {
60     case TELESCOPE_DEFAULT: subdm = NULL;
61       break;
62     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
63       break;
64     case TELESCOPE_DMPLEX:  subdm = NULL;
65       break;
66     }
67   }
68   return(subdm);
69 }
70 
71 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
72 {
73   PetscErrorCode ierr;
74   PetscInt       m,M,bs,st,ed;
75   Vec            x,xred,yred,xtmp;
76   Mat            B;
77   MPI_Comm       comm,subcomm;
78   VecScatter     scatter;
79   IS             isin;
80 
81   PetscFunctionBegin;
82   ierr = PetscInfo(pc,"PCTelescope: setup (default)\n");CHKERRQ(ierr);
83   comm = PetscSubcommParent(sred->psubcomm);
84   subcomm = PetscSubcommChild(sred->psubcomm);
85 
86   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
87   ierr = MatGetSize(B,&M,NULL);CHKERRQ(ierr);
88   ierr = MatGetBlockSize(B,&bs);CHKERRQ(ierr);
89   ierr = MatCreateVecs(B,&x,NULL);CHKERRQ(ierr);
90 
91   xred = NULL;
92   m    = 0;
93   if (isActiveRank(sred->psubcomm)) {
94     ierr = VecCreate(subcomm,&xred);CHKERRQ(ierr);
95     ierr = VecSetSizes(xred,PETSC_DECIDE,M);CHKERRQ(ierr);
96     ierr = VecSetBlockSize(xred,bs);CHKERRQ(ierr);
97     ierr = VecSetFromOptions(xred);CHKERRQ(ierr);
98     ierr = VecGetLocalSize(xred,&m);CHKERRQ(ierr);
99   }
100 
101   yred = NULL;
102   if (isActiveRank(sred->psubcomm)) {
103     ierr = VecDuplicate(xred,&yred);CHKERRQ(ierr);
104   }
105 
106   ierr = VecCreate(comm,&xtmp);CHKERRQ(ierr);
107   ierr = VecSetSizes(xtmp,m,PETSC_DECIDE);CHKERRQ(ierr);
108   ierr = VecSetBlockSize(xtmp,bs);CHKERRQ(ierr);
109   ierr = VecSetType(xtmp,((PetscObject)x)->type_name);CHKERRQ(ierr);
110 
111   if (isActiveRank(sred->psubcomm)) {
112     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
113     ierr = ISCreateStride(comm,(ed-st),st,1,&isin);CHKERRQ(ierr);
114   } else {
115     ierr = VecGetOwnershipRange(x,&st,&ed);CHKERRQ(ierr);
116     ierr = ISCreateStride(comm,0,st,1,&isin);CHKERRQ(ierr);
117   }
118   ierr = ISSetBlockSize(isin,bs);CHKERRQ(ierr);
119 
120   ierr = VecScatterCreateWithData(x,isin,xtmp,NULL,&scatter);CHKERRQ(ierr);
121 
122   sred->isin    = isin;
123   sred->scatter = scatter;
124   sred->xred    = xred;
125   sred->yred    = yred;
126   sred->xtmp    = xtmp;
127   ierr = VecDestroy(&x);CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
132 {
133   PetscErrorCode ierr;
134   MPI_Comm       comm,subcomm;
135   Mat            Bred,B;
136   PetscInt       nr,nc;
137   IS             isrow,iscol;
138   Mat            Blocal,*_Blocal;
139 
140   PetscFunctionBegin;
141   ierr = PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");CHKERRQ(ierr);
142   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
143   subcomm = PetscSubcommChild(sred->psubcomm);
144   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
145   ierr = MatGetSize(B,&nr,&nc);CHKERRQ(ierr);
146   isrow = sred->isin;
147   ierr = ISCreateStride(comm,nc,0,1,&iscol);CHKERRQ(ierr);
148   ierr = MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);CHKERRQ(ierr);
149   Blocal = *_Blocal;
150   ierr = PetscFree(_Blocal);CHKERRQ(ierr);
151   Bred = NULL;
152   if (isActiveRank(sred->psubcomm)) {
153     PetscInt mm;
154 
155     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
156 
157     ierr = MatGetSize(Blocal,&mm,NULL);CHKERRQ(ierr);
158     ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);CHKERRQ(ierr);
159   }
160   *A = Bred;
161   ierr = ISDestroy(&iscol);CHKERRQ(ierr);
162   ierr = MatDestroy(&Blocal);CHKERRQ(ierr);
163   PetscFunctionReturn(0);
164 }
165 
166 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
167 {
168   PetscErrorCode   ierr;
169   PetscBool        has_const;
170   const Vec        *vecs;
171   Vec              *sub_vecs = NULL;
172   PetscInt         i,k,n = 0;
173   MPI_Comm         subcomm;
174 
175   PetscFunctionBegin;
176   subcomm = PetscSubcommChild(sred->psubcomm);
177   ierr = MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);CHKERRQ(ierr);
178 
179   if (isActiveRank(sred->psubcomm)) {
180     if (n) {
181       ierr = VecDuplicateVecs(sred->xred,n,&sub_vecs);CHKERRQ(ierr);
182     }
183   }
184 
185   /* copy entries */
186   for (k=0; k<n; k++) {
187     const PetscScalar *x_array;
188     PetscScalar       *LA_sub_vec;
189     PetscInt          st,ed;
190 
191     /* pull in vector x->xtmp */
192     ierr = VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
193     ierr = VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
194     if (sub_vecs) {
195       /* copy vector entries into xred */
196       ierr = VecGetArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
197       if (sub_vecs[k]) {
198         ierr = VecGetOwnershipRange(sub_vecs[k],&st,&ed);CHKERRQ(ierr);
199         ierr = VecGetArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
200         for (i=0; i<ed-st; i++) {
201           LA_sub_vec[i] = x_array[i];
202         }
203         ierr = VecRestoreArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
204       }
205       ierr = VecRestoreArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
206     }
207   }
208 
209   if (isActiveRank(sred->psubcomm)) {
210     /* create new (near) nullspace for redundant object */
211     ierr = MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);CHKERRQ(ierr);
212     ierr = VecDestroyVecs(n,&sub_vecs);CHKERRQ(ierr);
213     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
214     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
215   }
216   PetscFunctionReturn(0);
217 }
218 
219 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
220 {
221   PetscErrorCode   ierr;
222   Mat              B;
223 
224   PetscFunctionBegin;
225   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
226 
227   /* Propagate the nullspace if it exists */
228   {
229     MatNullSpace nullspace,sub_nullspace;
230     ierr = MatGetNullSpace(B,&nullspace);CHKERRQ(ierr);
231     if (nullspace) {
232       ierr = PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");CHKERRQ(ierr);
233       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace);CHKERRQ(ierr);
234       if (isActiveRank(sred->psubcomm)) {
235         ierr = MatSetNullSpace(sub_mat,sub_nullspace);CHKERRQ(ierr);
236         ierr = MatNullSpaceDestroy(&sub_nullspace);CHKERRQ(ierr);
237       }
238     }
239   }
240 
241   /* Propagate the near nullspace if it exists */
242   {
243     MatNullSpace nearnullspace,sub_nearnullspace;
244     ierr = MatGetNearNullSpace(B,&nearnullspace);CHKERRQ(ierr);
245     if (nearnullspace) {
246       ierr = PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n");CHKERRQ(ierr);
247       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);CHKERRQ(ierr);
248       if (isActiveRank(sred->psubcomm)) {
249         ierr = MatSetNearNullSpace(sub_mat,sub_nearnullspace);CHKERRQ(ierr);
250         ierr = MatNullSpaceDestroy(&sub_nearnullspace);CHKERRQ(ierr);
251       }
252     }
253   }
254   PetscFunctionReturn(0);
255 }
256 
257 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
258 {
259   PC_Telescope     sred = (PC_Telescope)pc->data;
260   PetscErrorCode   ierr;
261   PetscBool        iascii,isstring;
262   PetscViewer      subviewer;
263 
264   PetscFunctionBegin;
265   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
266   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
267   if (iascii) {
268     if (!sred->psubcomm) {
269       ierr = PetscViewerASCIIPrintf(viewer,"  preconditioner not yet setup\n");CHKERRQ(ierr);
270     } else {
271       MPI_Comm    comm,subcomm;
272       PetscMPIInt comm_size,subcomm_size;
273       DM          dm,subdm;
274 
275       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
276       subdm = private_PCTelescopeGetSubDM(sred);
277       comm = PetscSubcommParent(sred->psubcomm);
278       subcomm = PetscSubcommChild(sred->psubcomm);
279       ierr = MPI_Comm_size(comm,&comm_size);CHKERRQ(ierr);
280       ierr = MPI_Comm_size(subcomm,&subcomm_size);CHKERRQ(ierr);
281 
282       ierr = PetscViewerASCIIPrintf(viewer,"  parent comm size reduction factor = %D\n",sred->redfactor);CHKERRQ(ierr);
283       ierr = PetscViewerASCIIPrintf(viewer,"  comm_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);CHKERRQ(ierr);
284       switch (sred->subcommtype) {
285         case PETSC_SUBCOMM_INTERLACED :
286           ierr = PetscViewerASCIIPrintf(viewer,"  subcomm type: interlaced\n",sred->subcommtype);CHKERRQ(ierr);
287           break;
288         case PETSC_SUBCOMM_CONTIGUOUS :
289           ierr = PetscViewerASCIIPrintf(viewer,"  subcomm type: contiguous\n",sred->subcommtype);CHKERRQ(ierr);
290           break;
291         default :
292           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
293       }
294       ierr = PetscViewerGetSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
295       if (isActiveRank(sred->psubcomm)) {
296         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
297 
298         if (dm && sred->ignore_dm) {
299           ierr = PetscViewerASCIIPrintf(subviewer,"  ignoring DM\n");CHKERRQ(ierr);
300         }
301         if (sred->ignore_kspcomputeoperators) {
302           ierr = PetscViewerASCIIPrintf(subviewer,"  ignoring KSPComputeOperators\n");CHKERRQ(ierr);
303         }
304         switch (sred->sr_type) {
305         case TELESCOPE_DEFAULT:
306           ierr = PetscViewerASCIIPrintf(subviewer,"  using default setup\n");CHKERRQ(ierr);
307           break;
308         case TELESCOPE_DMDA:
309           ierr = PetscViewerASCIIPrintf(subviewer,"  DMDA detected\n");CHKERRQ(ierr);
310           ierr = DMView_DA_Short(subdm,subviewer);CHKERRQ(ierr);
311           break;
312         case TELESCOPE_DMPLEX:
313           ierr = PetscViewerASCIIPrintf(subviewer,"  DMPLEX detected\n");CHKERRQ(ierr);
314           break;
315         }
316 
317         ierr = KSPView(sred->ksp,subviewer);CHKERRQ(ierr);
318         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
319       }
320       ierr = PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
321     }
322   }
323   PetscFunctionReturn(0);
324 }
325 
326 static PetscErrorCode PCSetUp_Telescope(PC pc)
327 {
328   PC_Telescope      sred = (PC_Telescope)pc->data;
329   PetscErrorCode    ierr;
330   MPI_Comm          comm,subcomm=0;
331   PCTelescopeType   sr_type;
332 
333   PetscFunctionBegin;
334   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
335 
336   /* subcomm definition */
337   if (!pc->setupcalled) {
338     if (!sred->psubcomm) {
339       ierr = PetscSubcommCreate(comm,&sred->psubcomm);CHKERRQ(ierr);
340       ierr = PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);CHKERRQ(ierr);
341       ierr = PetscSubcommSetType(sred->psubcomm,sred->subcommtype);CHKERRQ(ierr);
342       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
343     }
344   }
345   subcomm = PetscSubcommChild(sred->psubcomm);
346 
347   /* internal KSP */
348   if (!pc->setupcalled) {
349     const char *prefix;
350 
351     if (isActiveRank(sred->psubcomm)) {
352       ierr = KSPCreate(subcomm,&sred->ksp);CHKERRQ(ierr);
353       ierr = KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);CHKERRQ(ierr);
354       ierr = PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
355       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);CHKERRQ(ierr);
356       ierr = KSPSetType(sred->ksp,KSPPREONLY);CHKERRQ(ierr);
357       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
358       ierr = KSPSetOptionsPrefix(sred->ksp,prefix);CHKERRQ(ierr);
359       ierr = KSPAppendOptionsPrefix(sred->ksp,"telescope_");CHKERRQ(ierr);
360     }
361   }
362   /* Determine type of setup/update */
363   if (!pc->setupcalled) {
364     PetscBool has_dm,same;
365     DM        dm;
366 
367     sr_type = TELESCOPE_DEFAULT;
368     has_dm = PETSC_FALSE;
369     ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
370     if (dm) { has_dm = PETSC_TRUE; }
371     if (has_dm) {
372       /* check for dmda */
373       ierr = PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);CHKERRQ(ierr);
374       if (same) {
375         ierr = PetscInfo(pc,"PCTelescope: found DMDA\n");CHKERRQ(ierr);
376         sr_type = TELESCOPE_DMDA;
377       }
378       /* check for dmplex */
379       ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);CHKERRQ(ierr);
380       if (same) {
381         ierr = PetscInfo(pc,"PCTelescope: found DMPLEX\n");CHKERRQ(ierr);
382         sr_type = TELESCOPE_DMPLEX;
383       }
384     }
385 
386     if (sred->ignore_dm) {
387       ierr = PetscInfo(pc,"PCTelescope: ignore DM\n");CHKERRQ(ierr);
388       sr_type = TELESCOPE_DEFAULT;
389     }
390     sred->sr_type = sr_type;
391   } else {
392     sr_type = sred->sr_type;
393   }
394 
395   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
396   switch (sr_type) {
397   case TELESCOPE_DEFAULT:
398     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
399     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
400     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
401     sred->pctelescope_reset_type              = NULL;
402     break;
403   case TELESCOPE_DMDA:
404     pc->ops->apply                            = PCApply_Telescope_dmda;
405     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
406     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
407     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
408     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
409     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
410     break;
411   case TELESCOPE_DMPLEX: SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
412     break;
413   default: SETERRQ(comm,PETSC_ERR_SUP,"Only support for repartitioning DMDA is provided");
414     break;
415   }
416 
417   /* setup */
418   if (sred->pctelescope_setup_type) {
419     ierr = sred->pctelescope_setup_type(pc,sred);CHKERRQ(ierr);
420   }
421   /* update */
422   if (!pc->setupcalled) {
423     if (sred->pctelescope_matcreate_type) {
424       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);CHKERRQ(ierr);
425     }
426     if (sred->pctelescope_matnullspacecreate_type) {
427       ierr = sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);CHKERRQ(ierr);
428     }
429   } else {
430     if (sred->pctelescope_matcreate_type) {
431       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);CHKERRQ(ierr);
432     }
433   }
434 
435   /* common - no construction */
436   if (isActiveRank(sred->psubcomm)) {
437     ierr = KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);CHKERRQ(ierr);
438     if (pc->setfromoptionscalled && !pc->setupcalled){
439       ierr = KSPSetFromOptions(sred->ksp);CHKERRQ(ierr);
440     }
441   }
442   PetscFunctionReturn(0);
443 }
444 
445 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
446 {
447   PC_Telescope      sred = (PC_Telescope)pc->data;
448   PetscErrorCode    ierr;
449   Vec               xtmp,xred,yred;
450   PetscInt          i,st,ed;
451   VecScatter        scatter;
452   PetscScalar       *array;
453   const PetscScalar *x_array;
454 
455   PetscFunctionBegin;
456   ierr = PetscCitationsRegister(citation,&cited);CHKERRQ(ierr);
457 
458   xtmp    = sred->xtmp;
459   scatter = sred->scatter;
460   xred    = sred->xred;
461   yred    = sred->yred;
462 
463   /* pull in vector x->xtmp */
464   ierr = VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
465   ierr = VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
466 
467   /* copy vector entries into xred */
468   ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
469   if (xred) {
470     PetscScalar *LA_xred;
471     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
472     ierr = VecGetArray(xred,&LA_xred);CHKERRQ(ierr);
473     for (i=0; i<ed-st; i++) {
474       LA_xred[i] = x_array[i];
475     }
476     ierr = VecRestoreArray(xred,&LA_xred);CHKERRQ(ierr);
477   }
478   ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
479   /* solve */
480   if (isActiveRank(sred->psubcomm)) {
481     ierr = KSPSolve(sred->ksp,xred,yred);CHKERRQ(ierr);
482     ierr = KSPCheckSolve(sred->ksp,pc,yred);CHKERRQ(ierr);
483   }
484   /* return vector */
485   ierr = VecGetArray(xtmp,&array);CHKERRQ(ierr);
486   if (yred) {
487     const PetscScalar *LA_yred;
488     ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
489     ierr = VecGetArrayRead(yred,&LA_yred);CHKERRQ(ierr);
490     for (i=0; i<ed-st; i++) {
491       array[i] = LA_yred[i];
492     }
493     ierr = VecRestoreArrayRead(yred,&LA_yred);CHKERRQ(ierr);
494   }
495   ierr = VecRestoreArray(xtmp,&array);CHKERRQ(ierr);
496   ierr = VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
497   ierr = VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
498   PetscFunctionReturn(0);
499 }
500 
501 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
502 {
503   PC_Telescope      sred = (PC_Telescope)pc->data;
504   PetscErrorCode    ierr;
505   Vec               xtmp,yred;
506   PetscInt          i,st,ed;
507   VecScatter        scatter;
508   const PetscScalar *x_array;
509   PetscBool         default_init_guess_value;
510 
511   PetscFunctionBegin;
512   xtmp    = sred->xtmp;
513   scatter = sred->scatter;
514   yred    = sred->yred;
515 
516   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
517   *reason = (PCRichardsonConvergedReason)0;
518 
519   if (!zeroguess) {
520     ierr = PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");CHKERRQ(ierr);
521     /* pull in vector y->xtmp */
522     ierr = VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
523     ierr = VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
524 
525     /* copy vector entries into xred */
526     ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
527     if (yred) {
528       PetscScalar *LA_yred;
529       ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
530       ierr = VecGetArray(yred,&LA_yred);CHKERRQ(ierr);
531       for (i=0; i<ed-st; i++) {
532         LA_yred[i] = x_array[i];
533       }
534       ierr = VecRestoreArray(yred,&LA_yred);CHKERRQ(ierr);
535     }
536     ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
537   }
538 
539   if (isActiveRank(sred->psubcomm)) {
540     ierr = KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);CHKERRQ(ierr);
541     if (!zeroguess) ierr = KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);CHKERRQ(ierr);
542   }
543 
544   ierr = PCApply_Telescope(pc,x,y);CHKERRQ(ierr);
545 
546   if (isActiveRank(sred->psubcomm)) {
547     ierr = KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);CHKERRQ(ierr);
548   }
549 
550   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
551   *outits = 1;
552   PetscFunctionReturn(0);
553 }
554 
555 static PetscErrorCode PCReset_Telescope(PC pc)
556 {
557   PC_Telescope   sred = (PC_Telescope)pc->data;
558   PetscErrorCode ierr;
559 
560   ierr = ISDestroy(&sred->isin);CHKERRQ(ierr);
561   ierr = VecScatterDestroy(&sred->scatter);CHKERRQ(ierr);
562   ierr = VecDestroy(&sred->xred);CHKERRQ(ierr);
563   ierr = VecDestroy(&sred->yred);CHKERRQ(ierr);
564   ierr = VecDestroy(&sred->xtmp);CHKERRQ(ierr);
565   ierr = MatDestroy(&sred->Bred);CHKERRQ(ierr);
566   ierr = KSPReset(sred->ksp);CHKERRQ(ierr);
567   if (sred->pctelescope_reset_type) {
568     ierr = sred->pctelescope_reset_type(pc);CHKERRQ(ierr);
569   }
570   PetscFunctionReturn(0);
571 }
572 
573 static PetscErrorCode PCDestroy_Telescope(PC pc)
574 {
575   PC_Telescope     sred = (PC_Telescope)pc->data;
576   PetscErrorCode   ierr;
577 
578   PetscFunctionBegin;
579   ierr = PCReset_Telescope(pc);CHKERRQ(ierr);
580   ierr = KSPDestroy(&sred->ksp);CHKERRQ(ierr);
581   ierr = PetscSubcommDestroy(&sred->psubcomm);CHKERRQ(ierr);
582   ierr = PetscFree(sred->dm_ctx);CHKERRQ(ierr);
583   ierr = PetscFree(pc->data);CHKERRQ(ierr);
584   PetscFunctionReturn(0);
585 }
586 
587 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
588 {
589   PC_Telescope     sred = (PC_Telescope)pc->data;
590   PetscErrorCode   ierr;
591   MPI_Comm         comm;
592   PetscMPIInt      size;
593   PetscBool        flg;
594   PetscSubcommType subcommtype;
595 
596   PetscFunctionBegin;
597   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
598   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
599   ierr = PetscOptionsHead(PetscOptionsObject,"Telescope options");CHKERRQ(ierr);
600   ierr = PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg);CHKERRQ(ierr);
601   if (flg) {
602     ierr = PCTelescopeSetSubcommType(pc,subcommtype);CHKERRQ(ierr);
603   }
604   ierr = PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,0);CHKERRQ(ierr);
605   if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
606   ierr = PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,0);CHKERRQ(ierr);
607   ierr = PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,0);CHKERRQ(ierr);
608   ierr = PetscOptionsTail();CHKERRQ(ierr);
609   PetscFunctionReturn(0);
610 }
611 
612 /* PC simplementation specific API's */
613 
614 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
615 {
616   PC_Telescope red = (PC_Telescope)pc->data;
617   PetscFunctionBegin;
618   if (ksp) *ksp = red->ksp;
619   PetscFunctionReturn(0);
620 }
621 
622 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
623 {
624   PC_Telescope red = (PC_Telescope)pc->data;
625   PetscFunctionBegin;
626   if (subcommtype) *subcommtype = red->subcommtype;
627   PetscFunctionReturn(0);
628 }
629 
630 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
631 {
632   PC_Telescope     red = (PC_Telescope)pc->data;
633 
634   PetscFunctionBegin;
635   if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
636   red->subcommtype = subcommtype;
637   PetscFunctionReturn(0);
638 }
639 
640 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
641 {
642   PC_Telescope red = (PC_Telescope)pc->data;
643   PetscFunctionBegin;
644   if (fact) *fact = red->redfactor;
645   PetscFunctionReturn(0);
646 }
647 
648 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
649 {
650   PC_Telescope     red = (PC_Telescope)pc->data;
651   PetscMPIInt      size;
652   PetscErrorCode   ierr;
653 
654   PetscFunctionBegin;
655   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
656   if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
657   if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
658   red->redfactor = fact;
659   PetscFunctionReturn(0);
660 }
661 
662 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
663 {
664   PC_Telescope red = (PC_Telescope)pc->data;
665   PetscFunctionBegin;
666   if (v) *v = red->ignore_dm;
667   PetscFunctionReturn(0);
668 }
669 
670 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
671 {
672   PC_Telescope red = (PC_Telescope)pc->data;
673   PetscFunctionBegin;
674   red->ignore_dm = v;
675   PetscFunctionReturn(0);
676 }
677 
678 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
679 {
680   PC_Telescope red = (PC_Telescope)pc->data;
681   PetscFunctionBegin;
682   if (v) *v = red->ignore_kspcomputeoperators;
683   PetscFunctionReturn(0);
684 }
685 
686 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
687 {
688   PC_Telescope red = (PC_Telescope)pc->data;
689   PetscFunctionBegin;
690   red->ignore_kspcomputeoperators = v;
691   PetscFunctionReturn(0);
692 }
693 
694 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
695 {
696   PC_Telescope red = (PC_Telescope)pc->data;
697   PetscFunctionBegin;
698   *dm = private_PCTelescopeGetSubDM(red);
699   PetscFunctionReturn(0);
700 }
701 
702 /*@
703  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
704 
705  Not Collective
706 
707  Input Parameter:
708 .  pc - the preconditioner context
709 
710  Output Parameter:
711 .  subksp - the KSP defined the smaller set of processes
712 
713  Level: advanced
714 
715 .keywords: PC, telescopting solve
716 @*/
717 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
718 {
719   PetscErrorCode ierr;
720   PetscFunctionBegin;
721   ierr = PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));CHKERRQ(ierr);
722   PetscFunctionReturn(0);
723 }
724 
725 /*@
726  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
727 
728  Not Collective
729 
730  Input Parameter:
731 .  pc - the preconditioner context
732 
733  Output Parameter:
734 .  fact - the reduction factor
735 
736  Level: advanced
737 
738 .keywords: PC, telescoping solve
739 @*/
740 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
741 {
742   PetscErrorCode ierr;
743   PetscFunctionBegin;
744   ierr = PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));CHKERRQ(ierr);
745   PetscFunctionReturn(0);
746 }
747 
748 /*@
749  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
750 
751  Not Collective
752 
753  Input Parameter:
754 .  pc - the preconditioner context
755 
756  Output Parameter:
757 .  fact - the reduction factor
758 
759  Level: advanced
760 
761 .keywords: PC, telescoping solve
762 @*/
763 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
764 {
765   PetscErrorCode ierr;
766   PetscFunctionBegin;
767   ierr = PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));CHKERRQ(ierr);
768   PetscFunctionReturn(0);
769 }
770 
771 /*@
772  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
773 
774  Not Collective
775 
776  Input Parameter:
777 .  pc - the preconditioner context
778 
779  Output Parameter:
780 .  v - the flag
781 
782  Level: advanced
783 
784 .keywords: PC, telescoping solve
785 @*/
786 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
787 {
788   PetscErrorCode ierr;
789   PetscFunctionBegin;
790   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
791   PetscFunctionReturn(0);
792 }
793 
794 /*@
795  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
796 
797  Not Collective
798 
799  Input Parameter:
800 .  pc - the preconditioner context
801 
802  Output Parameter:
803 .  v - Use PETSC_TRUE to ignore any DM
804 
805  Level: advanced
806 
807 .keywords: PC, telescoping solve
808 @*/
809 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
810 {
811   PetscErrorCode ierr;
812   PetscFunctionBegin;
813   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
814   PetscFunctionReturn(0);
815 }
816 
817 /*@
818  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
819 
820  Not Collective
821 
822  Input Parameter:
823 .  pc - the preconditioner context
824 
825  Output Parameter:
826 .  v - the flag
827 
828  Level: advanced
829 
830 .keywords: PC, telescoping solve
831 @*/
832 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
833 {
834   PetscErrorCode ierr;
835   PetscFunctionBegin;
836   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
837   PetscFunctionReturn(0);
838 }
839 
840 /*@
841  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
842 
843  Not Collective
844 
845  Input Parameter:
846 .  pc - the preconditioner context
847 
848  Output Parameter:
849 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
850 
851  Level: advanced
852 
853 .keywords: PC, telescoping solve
854 @*/
855 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
856 {
857   PetscErrorCode ierr;
858   PetscFunctionBegin;
859   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
860   PetscFunctionReturn(0);
861 }
862 
863 /*@
864  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
865 
866  Not Collective
867 
868  Input Parameter:
869 .  pc - the preconditioner context
870 
871  Output Parameter:
872 .  subdm - The re-partitioned DM
873 
874  Level: advanced
875 
876 .keywords: PC, telescoping solve
877 @*/
878 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
879 {
880   PetscErrorCode ierr;
881   PetscFunctionBegin;
882   ierr = PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));CHKERRQ(ierr);
883   PetscFunctionReturn(0);
884 }
885 
886 /*@
887  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
888 
889  Logically Collective
890 
891  Input Parameter:
892 +  pc - the preconditioner context
893 -  subcommtype - the subcommunicator type (see PetscSubcommType)
894 
895  Level: advanced
896 
897 .keywords: PC, telescoping solve
898 
899 .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE
900 @*/
901 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
902 {
903   PetscErrorCode ierr;
904   PetscFunctionBegin;
905   ierr = PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));CHKERRQ(ierr);
906   PetscFunctionReturn(0);
907 }
908 
909 /*@
910  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
911 
912  Not Collective
913 
914  Input Parameter:
915 .  pc - the preconditioner context
916 
917  Output Parameter:
918 .  subcommtype - the subcommunicator type (see PetscSubcommType)
919 
920  Level: advanced
921 
922 .keywords: PC, telescoping solve
923 
924 .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE
925 @*/
926 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
927 {
928   PetscErrorCode ierr;
929   PetscFunctionBegin;
930   ierr = PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));CHKERRQ(ierr);
931   PetscFunctionReturn(0);
932 }
933 
934 /* -------------------------------------------------------------------------------------*/
935 /*MC
936    PCTELESCOPE - Runs a KSP solver on a sub-group of processors. MPI processes not in the sub-communicator are idle during the solve.
937 
938    Options Database:
939 +  -pc_telescope_reduction_factor <r> - factor to use communicator size by. e.g. with 64 MPI processes and r=4, the new sub-communicator will have 64/4 = 16 ranks.
940 -  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored
941 -  -pc_telescope_subcomm_type <interlaced,contiguous> - how to define the reduced communicator. see PetscSubcomm for more.
942 
943    Level: advanced
944 
945    Notes:
946    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
947    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
948    This means there will be MPI processes which will be idle during the application of this preconditioner.
949 
950    The default KSP is PREONLY. If a DM is attached to the PC, it is re-partitioned on the sub-communicator.
951    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
952    Currently only support for re-partitioning a DMDA is provided.
953    Any nullspace attached to the original Bmat operator is extracted, re-partitioned and set on the repartitioned Bmat operator.
954    KSPSetComputeOperators() is not propagated to the sub KSP.
955    Currently there is no support for the flag -pc_use_amat
956 
957    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
958    creates a child sub-communicator (c') containing fewer MPI processes than the original parent preconditioner (PC).
959 
960   Developer Notes:
961    During PCSetup, the B operator is scattered onto c'.
962    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
963    Then, KSPSolve() is executed on the c' communicator.
964 
965    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
966    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
967 
968    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
969    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
970    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
971 
972    The telescoping preconditioner is aware of an attached DM. In the event that the DM is of type DMDA (2D or 3D -
973    1D support for 1D DMDAs is not provided), a new DMDA is created on c' (e.g. it is re-partitioned), and this new DM
974    is attached the sub KSPSolve(). The design of telescope is such that it should be possible to extend support
975    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
976 
977    By default, B' is defined by simply fusing rows from different MPI processes
978 
979    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
980    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
981    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
982 
983    Limitations/improvements include the following.
984    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
985 
986    The symmetric permutation used when a DMDA is encountered is performed via explicitly assmbleming a permutation matrix P,
987    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
988    VecPermute() does not supported for the use case required here. By computing P, one can permute both the operator and RHS in a
989    consistent manner.
990 
991    Mapping of vectors is performed in the following way.
992    Suppose the parent comm size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
993    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
994    We perform the scatter to the sub-comm in the following way.
995    [1] Given a vector x defined on comm c
996 
997    rank(c) : _________ 0 ______  ________ 1 _______  ________ 2 _____________ ___________ 3 __________
998          x : [0, 1, 2, 3, 4, 5] [6, 7, 8, 9, 10, 11] [12, 13, 14, 15, 16, 17] [18, 19, 20, 21, 22, 23]
999 
1000    scatter to xtmp defined also on comm c so that we have the following values
1001 
1002    rank(c) : ___________________ 0 ________________  _1_  ______________________ 2 _______________________  __3_
1003       xtmp : [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] [  ] [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23] [  ]
1004 
1005    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1006 
1007 
1008    [2] Copy the value from rank 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1009    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1010 
1011     rank(c') : ___________________ 0 _______________  ______________________ 1 _____________________
1012       xred : [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]
1013 
1014 
1015   Contributed by Dave May
1016 
1017   Reference:
1018   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1019 
1020 .seealso:  PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
1021 M*/
1022 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1023 {
1024   PetscErrorCode       ierr;
1025   struct _PC_Telescope *sred;
1026 
1027   PetscFunctionBegin;
1028   ierr = PetscNewLog(pc,&sred);CHKERRQ(ierr);
1029   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1030   sred->redfactor      = 1;
1031   sred->ignore_dm      = PETSC_FALSE;
1032   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1033   pc->data             = (void*)sred;
1034 
1035   pc->ops->apply           = PCApply_Telescope;
1036   pc->ops->applytranspose  = NULL;
1037   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1038   pc->ops->setup           = PCSetUp_Telescope;
1039   pc->ops->destroy         = PCDestroy_Telescope;
1040   pc->ops->reset           = PCReset_Telescope;
1041   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1042   pc->ops->view            = PCView_Telescope;
1043 
1044   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1045   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1046   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1047   sred->pctelescope_reset_type              = NULL;
1048 
1049   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);CHKERRQ(ierr);
1050   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope);CHKERRQ(ierr);
1051   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope);CHKERRQ(ierr);
1052   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);CHKERRQ(ierr);
1053   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);CHKERRQ(ierr);
1054   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);CHKERRQ(ierr);
1055   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);CHKERRQ(ierr);
1056   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1057   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1058   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);CHKERRQ(ierr);
1059   PetscFunctionReturn(0);
1060 }
1061