xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision 2bf68e3e0f2a61f71e7c65bee250bfa1c8ce0cdb)
1 
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h> /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7 
8 static PetscBool  cited = PETSC_FALSE;
9 static const char citation[] =
10 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
11 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
12 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
13 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
14 "  series    = {PASC '16},\n"
15 "  isbn      = {978-1-4503-4126-4},\n"
16 "  location  = {Lausanne, Switzerland},\n"
17 "  pages     = {5:1--5:12},\n"
18 "  articleno = {5},\n"
19 "  numpages  = {12},\n"
20 "  url       = {http://doi.acm.org/10.1145/2929908.2929913},\n"
21 "  doi       = {10.1145/2929908.2929913},\n"
22 "  acmid     = {2929913},\n"
23 "  publisher = {ACM},\n"
24 "  address   = {New York, NY, USA},\n"
25 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
26 "  year      = {2016}\n"
27 "}\n";
28 
29 /*
30  PCTelescopeSetUp_default()
31  PCTelescopeMatCreate_default()
32 
33  default
34 
35  // scatter in
36  x(comm) -> xtmp(comm)
37 
38  xred(subcomm) <- xtmp
39  yred(subcomm)
40 
41  yred(subcomm) --> xtmp
42 
43  // scatter out
44  xtmp(comm) -> y(comm)
45 */
46 
47 PetscBool isActiveRank(PetscSubcomm scomm)
48 {
49   if (scomm->color == 0) { return PETSC_TRUE; }
50   else { return PETSC_FALSE; }
51 }
52 
53 #undef __FUNCT__
54 #define __FUNCT__ "private_PCTelescopeGetSubDM"
55 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
56 {
57   DM subdm = NULL;
58 
59   if (!isActiveRank(sred->psubcomm)) { subdm = NULL; }
60   else {
61     switch (sred->sr_type) {
62     case TELESCOPE_DEFAULT: subdm = NULL;
63       break;
64     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
65       break;
66     case TELESCOPE_DMPLEX:  subdm = NULL;
67       break;
68     }
69   }
70   return(subdm);
71 }
72 
73 #undef __FUNCT__
74 #define __FUNCT__ "PCTelescopeSetUp_default"
75 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
76 {
77   PetscErrorCode ierr;
78   PetscInt       m,M,bs,st,ed;
79   Vec            x,xred,yred,xtmp;
80   Mat            B;
81   MPI_Comm       comm,subcomm;
82   VecScatter     scatter;
83   IS             isin;
84 
85   PetscFunctionBegin;
86   ierr = PetscInfo(pc,"PCTelescope: setup (default)\n");CHKERRQ(ierr);
87   comm = PetscSubcommParent(sred->psubcomm);
88   subcomm = PetscSubcommChild(sred->psubcomm);
89 
90   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
91   ierr = MatGetSize(B,&M,NULL);CHKERRQ(ierr);
92   ierr = MatGetBlockSize(B,&bs);CHKERRQ(ierr);
93   ierr = MatCreateVecs(B,&x,NULL);CHKERRQ(ierr);
94 
95   xred = NULL;
96   m    = 0;
97   if (isActiveRank(sred->psubcomm)) {
98     ierr = VecCreate(subcomm,&xred);CHKERRQ(ierr);
99     ierr = VecSetSizes(xred,PETSC_DECIDE,M);CHKERRQ(ierr);
100     ierr = VecSetBlockSize(xred,bs);CHKERRQ(ierr);
101     ierr = VecSetFromOptions(xred);CHKERRQ(ierr);
102     ierr = VecGetLocalSize(xred,&m);CHKERRQ(ierr);
103   }
104 
105   yred = NULL;
106   if (isActiveRank(sred->psubcomm)) {
107     ierr = VecDuplicate(xred,&yred);CHKERRQ(ierr);
108   }
109 
110   ierr = VecCreate(comm,&xtmp);CHKERRQ(ierr);
111   ierr = VecSetSizes(xtmp,m,PETSC_DECIDE);CHKERRQ(ierr);
112   ierr = VecSetBlockSize(xtmp,bs);CHKERRQ(ierr);
113   ierr = VecSetType(xtmp,((PetscObject)x)->type_name);CHKERRQ(ierr);
114 
115   if (isActiveRank(sred->psubcomm)) {
116     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
117     ierr = ISCreateStride(comm,(ed-st),st,1,&isin);CHKERRQ(ierr);
118   } else {
119     ierr = VecGetOwnershipRange(x,&st,&ed);CHKERRQ(ierr);
120     ierr = ISCreateStride(comm,0,st,1,&isin);CHKERRQ(ierr);
121   }
122   ierr = ISSetBlockSize(isin,bs);CHKERRQ(ierr);
123 
124   ierr = VecScatterCreate(x,isin,xtmp,NULL,&scatter);CHKERRQ(ierr);
125 
126   sred->isin    = isin;
127   sred->scatter = scatter;
128   sred->xred    = xred;
129   sred->yred    = yred;
130   sred->xtmp    = xtmp;
131   ierr = VecDestroy(&x);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "PCTelescopeMatCreate_default"
137 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
138 {
139   PetscErrorCode ierr;
140   MPI_Comm       comm,subcomm;
141   Mat            Bred,B;
142   PetscInt       nr,nc;
143   IS             isrow,iscol;
144   Mat            Blocal,*_Blocal;
145 
146   PetscFunctionBegin;
147   ierr = PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");CHKERRQ(ierr);
148   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
149   subcomm = PetscSubcommChild(sred->psubcomm);
150   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
151   ierr = MatGetSize(B,&nr,&nc);CHKERRQ(ierr);
152   isrow = sred->isin;
153   ierr = ISCreateStride(comm,nc,0,1,&iscol);CHKERRQ(ierr);
154   ierr = MatGetSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);CHKERRQ(ierr);
155   Blocal = *_Blocal;
156   ierr = PetscFree(_Blocal);CHKERRQ(ierr);
157   Bred = NULL;
158   if (isActiveRank(sred->psubcomm)) {
159     PetscInt mm;
160 
161     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
162 
163     ierr = MatGetSize(Blocal,&mm,NULL);CHKERRQ(ierr);
164     ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);CHKERRQ(ierr);
165   }
166   *A = Bred;
167   ierr = ISDestroy(&iscol);CHKERRQ(ierr);
168   ierr = MatDestroy(&Blocal);CHKERRQ(ierr);
169   PetscFunctionReturn(0);
170 }
171 
172 #undef __FUNCT__
173 #define __FUNCT__ "PCTelescopeSubNullSpaceCreate_Telescope"
174 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
175 {
176   PetscErrorCode   ierr;
177   PetscBool        has_const;
178   const Vec        *vecs;
179   Vec              *sub_vecs = NULL;
180   PetscInt         i,k,n = 0;
181   MPI_Comm         subcomm;
182 
183   PetscFunctionBegin;
184   subcomm = PetscSubcommChild(sred->psubcomm);
185   ierr = MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);CHKERRQ(ierr);
186 
187   if (isActiveRank(sred->psubcomm)) {
188     if (n) {
189       ierr = VecDuplicateVecs(sred->xred,n,&sub_vecs);CHKERRQ(ierr);
190     }
191   }
192 
193   /* copy entries */
194   for (k=0; k<n; k++) {
195     const PetscScalar *x_array;
196     PetscScalar       *LA_sub_vec;
197     PetscInt          st,ed;
198 
199     /* pull in vector x->xtmp */
200     ierr = VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
201     ierr = VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
202     if (sub_vecs) {
203       /* copy vector entries into xred */
204       ierr = VecGetArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
205       if (sub_vecs[k]) {
206         ierr = VecGetOwnershipRange(sub_vecs[k],&st,&ed);CHKERRQ(ierr);
207         ierr = VecGetArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
208         for (i=0; i<ed-st; i++) {
209           LA_sub_vec[i] = x_array[i];
210         }
211         ierr = VecRestoreArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
212       }
213       ierr = VecRestoreArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
214     }
215   }
216 
217   if (isActiveRank(sred->psubcomm)) {
218     /* create new (near) nullspace for redundant object */
219     ierr = MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);CHKERRQ(ierr);
220     ierr = VecDestroyVecs(n,&sub_vecs);CHKERRQ(ierr);
221     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
222     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
223   }
224   PetscFunctionReturn(0);
225 }
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "PCTelescopeMatNullSpaceCreate_default"
229 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
230 {
231   PetscErrorCode   ierr;
232   Mat              B;
233 
234   PetscFunctionBegin;
235   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
236 
237   /* Propagate the nullspace if it exists */
238   {
239     MatNullSpace nullspace,sub_nullspace;
240     ierr = MatGetNullSpace(B,&nullspace);CHKERRQ(ierr);
241     if (nullspace) {
242       ierr = PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");CHKERRQ(ierr);
243       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace);CHKERRQ(ierr);
244       if (isActiveRank(sred->psubcomm)) {
245         ierr = MatSetNullSpace(sub_mat,sub_nullspace);CHKERRQ(ierr);
246         ierr = MatNullSpaceDestroy(&sub_nullspace);CHKERRQ(ierr);
247       }
248     }
249   }
250 
251   /* Propagate the near nullspace if it exists */
252   {
253     MatNullSpace nearnullspace,sub_nearnullspace;
254     ierr = MatGetNearNullSpace(B,&nearnullspace);CHKERRQ(ierr);
255     if (nearnullspace) {
256       ierr = PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n");CHKERRQ(ierr);
257       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);CHKERRQ(ierr);
258       if (isActiveRank(sred->psubcomm)) {
259         ierr = MatSetNearNullSpace(sub_mat,sub_nearnullspace);CHKERRQ(ierr);
260         ierr = MatNullSpaceDestroy(&sub_nearnullspace);CHKERRQ(ierr);
261       }
262     }
263   }
264   PetscFunctionReturn(0);
265 }
266 
267 #undef __FUNCT__
268 #define __FUNCT__ "PCView_Telescope"
269 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
270 {
271   PC_Telescope     sred = (PC_Telescope)pc->data;
272   PetscErrorCode   ierr;
273   PetscBool        iascii,isstring;
274   PetscViewer      subviewer;
275 
276   PetscFunctionBegin;
277   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
278   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
279   if (iascii) {
280     if (!sred->psubcomm) {
281       ierr = PetscViewerASCIIPrintf(viewer,"  Telescope: preconditioner not yet setup\n");CHKERRQ(ierr);
282     } else {
283       MPI_Comm    comm,subcomm;
284       PetscMPIInt comm_size,subcomm_size;
285       DM          dm,subdm;
286 
287       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
288       subdm = private_PCTelescopeGetSubDM(sred);
289       comm = PetscSubcommParent(sred->psubcomm);
290       subcomm = PetscSubcommChild(sred->psubcomm);
291       ierr = MPI_Comm_size(comm,&comm_size);CHKERRQ(ierr);
292       ierr = MPI_Comm_size(subcomm,&subcomm_size);CHKERRQ(ierr);
293 
294       ierr = PetscViewerASCIIPrintf(viewer,"  Telescope: parent comm size reduction factor = %D\n",sred->redfactor);CHKERRQ(ierr);
295       ierr = PetscViewerASCIIPrintf(viewer,"  Telescope: comm_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);CHKERRQ(ierr);
296       switch (sred->subcommtype) {
297         case PETSC_SUBCOMM_INTERLACED :
298           ierr = PetscViewerASCIIPrintf(viewer,"  Telescope: subcomm type: interlaced\n",sred->subcommtype);CHKERRQ(ierr);
299           break;
300         case PETSC_SUBCOMM_CONTIGUOUS :
301           ierr = PetscViewerASCIIPrintf(viewer,"  Telescope: subcomm type: contiguous\n",sred->subcommtype);CHKERRQ(ierr);
302           break;
303         default :
304           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
305       }
306       ierr = PetscViewerGetSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
307       if (isActiveRank(sred->psubcomm)) {
308         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
309 
310         if (dm && sred->ignore_dm) {
311           ierr = PetscViewerASCIIPrintf(subviewer,"  Telescope: ignoring DM\n");CHKERRQ(ierr);
312         }
313         if (sred->ignore_kspcomputeoperators) {
314           ierr = PetscViewerASCIIPrintf(subviewer,"  Telescope: ignoring KSPComputeOperators\n");CHKERRQ(ierr);
315         }
316         switch (sred->sr_type) {
317         case TELESCOPE_DEFAULT:
318           ierr = PetscViewerASCIIPrintf(subviewer,"  Telescope: using default setup\n");CHKERRQ(ierr);
319           break;
320         case TELESCOPE_DMDA:
321           ierr = PetscViewerASCIIPrintf(subviewer,"  Telescope: DMDA detected\n");CHKERRQ(ierr);
322           ierr = DMView_DMDAShort(subdm,subviewer);CHKERRQ(ierr);
323           break;
324         case TELESCOPE_DMPLEX:
325           ierr = PetscViewerASCIIPrintf(subviewer,"  Telescope: DMPLEX detected\n");CHKERRQ(ierr);
326           break;
327         }
328 
329         ierr = KSPView(sred->ksp,subviewer);CHKERRQ(ierr);
330         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
331       }
332       ierr = PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
333     }
334   }
335   PetscFunctionReturn(0);
336 }
337 
338 #undef __FUNCT__
339 #define __FUNCT__ "PCSetUp_Telescope"
340 static PetscErrorCode PCSetUp_Telescope(PC pc)
341 {
342   PC_Telescope      sred = (PC_Telescope)pc->data;
343   PetscErrorCode    ierr;
344   MPI_Comm          comm,subcomm=0;
345   PCTelescopeType   sr_type;
346 
347   PetscFunctionBegin;
348   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
349 
350   /* subcomm definition */
351   if (!pc->setupcalled) {
352     if (!sred->psubcomm) {
353       ierr = PetscSubcommCreate(comm,&sred->psubcomm);CHKERRQ(ierr);
354       ierr = PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);CHKERRQ(ierr);
355       ierr = PetscSubcommSetType(sred->psubcomm,sred->subcommtype);CHKERRQ(ierr);
356       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
357     }
358   }
359   subcomm = PetscSubcommChild(sred->psubcomm);
360 
361   /* internal KSP */
362   if (!pc->setupcalled) {
363     const char *prefix;
364 
365     if (isActiveRank(sred->psubcomm)) {
366       ierr = KSPCreate(subcomm,&sred->ksp);CHKERRQ(ierr);
367       ierr = KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);CHKERRQ(ierr);
368       ierr = PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
369       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);CHKERRQ(ierr);
370       ierr = KSPSetType(sred->ksp,KSPPREONLY);CHKERRQ(ierr);
371       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
372       ierr = KSPSetOptionsPrefix(sred->ksp,prefix);CHKERRQ(ierr);
373       ierr = KSPAppendOptionsPrefix(sred->ksp,"telescope_");CHKERRQ(ierr);
374     }
375   }
376   /* Determine type of setup/update */
377   if (!pc->setupcalled) {
378     PetscBool has_dm,same;
379     DM        dm;
380 
381     sr_type = TELESCOPE_DEFAULT;
382     has_dm = PETSC_FALSE;
383     ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
384     if (dm) { has_dm = PETSC_TRUE; }
385     if (has_dm) {
386       /* check for dmda */
387       ierr = PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);CHKERRQ(ierr);
388       if (same) {
389         ierr = PetscInfo(pc,"PCTelescope: found DMDA\n");CHKERRQ(ierr);
390         sr_type = TELESCOPE_DMDA;
391       }
392       /* check for dmplex */
393       ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);CHKERRQ(ierr);
394       if (same) {
395         PetscInfo(pc,"PCTelescope: found DMPLEX\n");
396         sr_type = TELESCOPE_DMPLEX;
397       }
398     }
399 
400     if (sred->ignore_dm) {
401       PetscInfo(pc,"PCTelescope: ignore DM\n");
402       sr_type = TELESCOPE_DEFAULT;
403     }
404     sred->sr_type = sr_type;
405   } else {
406     sr_type = sred->sr_type;
407   }
408 
409   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
410   switch (sr_type) {
411   case TELESCOPE_DEFAULT:
412     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
413     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
414     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
415     sred->pctelescope_reset_type              = NULL;
416     break;
417   case TELESCOPE_DMDA:
418     pc->ops->apply                            = PCApply_Telescope_dmda;
419     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
420     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
421     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
422     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
423     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
424     break;
425   case TELESCOPE_DMPLEX: SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
426     break;
427   default: SETERRQ(comm,PETSC_ERR_SUP,"Only support for repartitioning DMDA is provided");
428     break;
429   }
430 
431   /* setup */
432   if (sred->pctelescope_setup_type) {
433     ierr = sred->pctelescope_setup_type(pc,sred);CHKERRQ(ierr);
434   }
435   /* update */
436   if (!pc->setupcalled) {
437     if (sred->pctelescope_matcreate_type) {
438       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);CHKERRQ(ierr);
439     }
440     if (sred->pctelescope_matnullspacecreate_type) {
441       ierr = sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);CHKERRQ(ierr);
442     }
443   } else {
444     if (sred->pctelescope_matcreate_type) {
445       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);CHKERRQ(ierr);
446     }
447   }
448 
449   /* common - no construction */
450   if (isActiveRank(sred->psubcomm)) {
451     ierr = KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);CHKERRQ(ierr);
452     if (pc->setfromoptionscalled && !pc->setupcalled){
453       ierr = KSPSetFromOptions(sred->ksp);CHKERRQ(ierr);
454     }
455   }
456   PetscFunctionReturn(0);
457 }
458 
459 #undef __FUNCT__
460 #define __FUNCT__ "PCApply_Telescope"
461 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
462 {
463   PC_Telescope      sred = (PC_Telescope)pc->data;
464   PetscErrorCode    ierr;
465   Vec               xtmp,xred,yred;
466   PetscInt          i,st,ed;
467   VecScatter        scatter;
468   PetscScalar       *array;
469   const PetscScalar *x_array;
470 
471   PetscFunctionBegin;
472   ierr = PetscCitationsRegister(citation,&cited);CHKERRQ(ierr);
473 
474   xtmp    = sred->xtmp;
475   scatter = sred->scatter;
476   xred    = sred->xred;
477   yred    = sred->yred;
478 
479   /* pull in vector x->xtmp */
480   ierr = VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
481   ierr = VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
482 
483   /* copy vector entries into xred */
484   ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
485   if (xred) {
486     PetscScalar *LA_xred;
487     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
488     ierr = VecGetArray(xred,&LA_xred);CHKERRQ(ierr);
489     for (i=0; i<ed-st; i++) {
490       LA_xred[i] = x_array[i];
491     }
492     ierr = VecRestoreArray(xred,&LA_xred);CHKERRQ(ierr);
493   }
494   ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
495   /* solve */
496   if (isActiveRank(sred->psubcomm)) {
497     ierr = KSPSolve(sred->ksp,xred,yred);CHKERRQ(ierr);
498   }
499   /* return vector */
500   ierr = VecGetArray(xtmp,&array);CHKERRQ(ierr);
501   if (yred) {
502     const PetscScalar *LA_yred;
503     ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
504     ierr = VecGetArrayRead(yred,&LA_yred);CHKERRQ(ierr);
505     for (i=0; i<ed-st; i++) {
506       array[i] = LA_yred[i];
507     }
508     ierr = VecRestoreArrayRead(yred,&LA_yred);CHKERRQ(ierr);
509   }
510   ierr = VecRestoreArray(xtmp,&array);CHKERRQ(ierr);
511   ierr = VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
512   ierr = VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
513   PetscFunctionReturn(0);
514 }
515 
516 #undef __FUNCT__
517 #define __FUNCT__ "PCApplyRichardson_Telescope"
518 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
519 {
520   PC_Telescope      sred = (PC_Telescope)pc->data;
521   PetscErrorCode    ierr;
522   Vec               xtmp,yred;
523   PetscInt          i,st,ed;
524   VecScatter        scatter;
525   const PetscScalar *x_array;
526   PetscBool         default_init_guess_value;
527 
528   PetscFunctionBegin;
529   xtmp    = sred->xtmp;
530   scatter = sred->scatter;
531   yred    = sred->yred;
532 
533   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
534   *reason = (PCRichardsonConvergedReason)0;
535 
536   if (!zeroguess) {
537     ierr = PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");CHKERRQ(ierr);
538     /* pull in vector y->xtmp */
539     ierr = VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
540     ierr = VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
541 
542     /* copy vector entries into xred */
543     ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
544     if (yred) {
545       PetscScalar *LA_yred;
546       ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
547       ierr = VecGetArray(yred,&LA_yred);CHKERRQ(ierr);
548       for (i=0; i<ed-st; i++) {
549         LA_yred[i] = x_array[i];
550       }
551       ierr = VecRestoreArray(yred,&LA_yred);CHKERRQ(ierr);
552     }
553     ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
554   }
555 
556   if (isActiveRank(sred->psubcomm)) {
557     ierr = KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);CHKERRQ(ierr);
558     if (!zeroguess) ierr = KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);CHKERRQ(ierr);
559   }
560 
561   ierr = PCApply_Telescope(pc,x,y);CHKERRQ(ierr);
562 
563   if (isActiveRank(sred->psubcomm)) {
564     ierr = KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);CHKERRQ(ierr);
565   }
566 
567   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
568   *outits = 1;
569   PetscFunctionReturn(0);
570 }
571 
572 #undef __FUNCT__
573 #define __FUNCT__ "PCReset_Telescope"
574 static PetscErrorCode PCReset_Telescope(PC pc)
575 {
576   PC_Telescope   sred = (PC_Telescope)pc->data;
577   PetscErrorCode ierr;
578 
579   ierr = ISDestroy(&sred->isin);CHKERRQ(ierr);
580   ierr = VecScatterDestroy(&sred->scatter);CHKERRQ(ierr);
581   ierr = VecDestroy(&sred->xred);CHKERRQ(ierr);
582   ierr = VecDestroy(&sred->yred);CHKERRQ(ierr);
583   ierr = VecDestroy(&sred->xtmp);CHKERRQ(ierr);
584   ierr = MatDestroy(&sred->Bred);CHKERRQ(ierr);
585   ierr = KSPReset(sred->ksp);CHKERRQ(ierr);
586   if (sred->pctelescope_reset_type) {
587     ierr = sred->pctelescope_reset_type(pc);CHKERRQ(ierr);
588   }
589   PetscFunctionReturn(0);
590 }
591 
592 #undef __FUNCT__
593 #define __FUNCT__ "PCDestroy_Telescope"
594 static PetscErrorCode PCDestroy_Telescope(PC pc)
595 {
596   PC_Telescope     sred = (PC_Telescope)pc->data;
597   PetscErrorCode   ierr;
598 
599   PetscFunctionBegin;
600   ierr = PCReset_Telescope(pc);CHKERRQ(ierr);
601   ierr = KSPDestroy(&sred->ksp);CHKERRQ(ierr);
602   ierr = PetscSubcommDestroy(&sred->psubcomm);CHKERRQ(ierr);
603   ierr = PetscFree(sred->dm_ctx);CHKERRQ(ierr);
604   ierr = PetscFree(pc->data);CHKERRQ(ierr);
605   PetscFunctionReturn(0);
606 }
607 
608 #undef __FUNCT__
609 #define __FUNCT__ "PCSetFromOptions_Telescope"
610 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
611 {
612   PC_Telescope     sred = (PC_Telescope)pc->data;
613   PetscErrorCode   ierr;
614   MPI_Comm         comm;
615   PetscMPIInt      size;
616   PetscBool        flg;
617   PetscSubcommType subcommtype;
618 
619   PetscFunctionBegin;
620   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
621   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
622   ierr = PetscOptionsHead(PetscOptionsObject,"Telescope options");CHKERRQ(ierr);
623   ierr = PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg);CHKERRQ(ierr);
624   if (flg) {
625     ierr = PCTelescopeSetSubcommType(pc,subcommtype);CHKERRQ(ierr);
626   }
627   ierr = PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,0);CHKERRQ(ierr);
628   if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
629   ierr = PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,0);CHKERRQ(ierr);
630   ierr = PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,0);CHKERRQ(ierr);
631   ierr = PetscOptionsTail();CHKERRQ(ierr);
632   PetscFunctionReturn(0);
633 }
634 
635 /* PC simplementation specific API's */
636 
637 #undef __FUNCT__
638 #define __FUNCT__ "PCTelescopeGetKSP_Telescope"
639 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
640 {
641   PC_Telescope red = (PC_Telescope)pc->data;
642   PetscFunctionBegin;
643   if (ksp) *ksp = red->ksp;
644   PetscFunctionReturn(0);
645 }
646 
647 #undef __FUNCT__
648 #define __FUNCT__ "PCTelescopeGetSubcommType_Telescope"
649 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
650 {
651   PC_Telescope red = (PC_Telescope)pc->data;
652   PetscFunctionBegin;
653   if (subcommtype) *subcommtype = red->subcommtype;
654   PetscFunctionReturn(0);
655 }
656 
657 #undef __FUNCT__
658 #define __FUNCT__ "PCTelescopeSetSubcommType_Telescope"
659 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
660 {
661   PC_Telescope     red = (PC_Telescope)pc->data;
662 
663   PetscFunctionBegin;
664   if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
665   red->subcommtype = subcommtype;
666   PetscFunctionReturn(0);
667 }
668 
669 #undef __FUNCT__
670 #define __FUNCT__ "PCTelescopeGetReductionFactor_Telescope"
671 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
672 {
673   PC_Telescope red = (PC_Telescope)pc->data;
674   PetscFunctionBegin;
675   if (fact) *fact = red->redfactor;
676   PetscFunctionReturn(0);
677 }
678 
679 #undef __FUNCT__
680 #define __FUNCT__ "PCTelescopeSetReductionFactor_Telescope"
681 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
682 {
683   PC_Telescope     red = (PC_Telescope)pc->data;
684   PetscMPIInt      size;
685   PetscErrorCode   ierr;
686 
687   PetscFunctionBegin;
688   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
689   if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
690   if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
691   red->redfactor = fact;
692   PetscFunctionReturn(0);
693 }
694 
695 #undef __FUNCT__
696 #define __FUNCT__ "PCTelescopeGetIgnoreDM_Telescope"
697 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
698 {
699   PC_Telescope red = (PC_Telescope)pc->data;
700   PetscFunctionBegin;
701   if (v) *v = red->ignore_dm;
702   PetscFunctionReturn(0);
703 }
704 
705 #undef __FUNCT__
706 #define __FUNCT__ "PCTelescopeSetIgnoreDM_Telescope"
707 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
708 {
709   PC_Telescope red = (PC_Telescope)pc->data;
710   PetscFunctionBegin;
711   red->ignore_dm = v;
712   PetscFunctionReturn(0);
713 }
714 
715 #undef __FUNCT__
716 #define __FUNCT__ "PCTelescopeGetIgnoreKSPComputeOperators_Telescope"
717 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
718 {
719   PC_Telescope red = (PC_Telescope)pc->data;
720   PetscFunctionBegin;
721   if (v) *v = red->ignore_kspcomputeoperators;
722   PetscFunctionReturn(0);
723 }
724 
725 #undef __FUNCT__
726 #define __FUNCT__ "PCTelescopeSetIgnoreKSPComputeOperators_Telescope"
727 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
728 {
729   PC_Telescope red = (PC_Telescope)pc->data;
730   PetscFunctionBegin;
731   red->ignore_kspcomputeoperators = v;
732   PetscFunctionReturn(0);
733 }
734 
735 #undef __FUNCT__
736 #define __FUNCT__ "PCTelescopeGetDM_Telescope"
737 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
738 {
739   PC_Telescope red = (PC_Telescope)pc->data;
740   PetscFunctionBegin;
741   *dm = private_PCTelescopeGetSubDM(red);
742   PetscFunctionReturn(0);
743 }
744 
745 #undef __FUNCT__
746 #define __FUNCT__ "PCTelescopeGetKSP"
747 /*@
748  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
749 
750  Not Collective
751 
752  Input Parameter:
753 .  pc - the preconditioner context
754 
755  Output Parameter:
756 .  subksp - the KSP defined the smaller set of processes
757 
758  Level: advanced
759 
760 .keywords: PC, telescopting solve
761 @*/
762 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
763 {
764   PetscErrorCode ierr;
765   PetscFunctionBegin;
766   ierr = PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));CHKERRQ(ierr);
767   PetscFunctionReturn(0);
768 }
769 
770 #undef __FUNCT__
771 #define __FUNCT__ "PCTelescopeGetReductionFactor"
772 /*@
773  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
774 
775  Not Collective
776 
777  Input Parameter:
778 .  pc - the preconditioner context
779 
780  Output Parameter:
781 .  fact - the reduction factor
782 
783  Level: advanced
784 
785 .keywords: PC, telescoping solve
786 @*/
787 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
788 {
789   PetscErrorCode ierr;
790   PetscFunctionBegin;
791   ierr = PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));CHKERRQ(ierr);
792   PetscFunctionReturn(0);
793 }
794 
795 #undef __FUNCT__
796 #define __FUNCT__ "PCTelescopeSetReductionFactor"
797 /*@
798  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
799 
800  Not Collective
801 
802  Input Parameter:
803 .  pc - the preconditioner context
804 
805  Output Parameter:
806 .  fact - the reduction factor
807 
808  Level: advanced
809 
810 .keywords: PC, telescoping solve
811 @*/
812 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
813 {
814   PetscErrorCode ierr;
815   PetscFunctionBegin;
816   ierr = PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));CHKERRQ(ierr);
817   PetscFunctionReturn(0);
818 }
819 
820 #undef __FUNCT__
821 #define __FUNCT__ "PCTelescopeGetIgnoreDM"
822 /*@
823  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
824 
825  Not Collective
826 
827  Input Parameter:
828 .  pc - the preconditioner context
829 
830  Output Parameter:
831 .  v - the flag
832 
833  Level: advanced
834 
835 .keywords: PC, telescoping solve
836 @*/
837 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
838 {
839   PetscErrorCode ierr;
840   PetscFunctionBegin;
841   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
842   PetscFunctionReturn(0);
843 }
844 
845 #undef __FUNCT__
846 #define __FUNCT__ "PCTelescopeSetIgnoreDM"
847 /*@
848  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
849 
850  Not Collective
851 
852  Input Parameter:
853 .  pc - the preconditioner context
854 
855  Output Parameter:
856 .  v - Use PETSC_TRUE to ignore any DM
857 
858  Level: advanced
859 
860 .keywords: PC, telescoping solve
861 @*/
862 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
863 {
864   PetscErrorCode ierr;
865   PetscFunctionBegin;
866   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
867   PetscFunctionReturn(0);
868 }
869 
870 #undef __FUNCT__
871 #define __FUNCT__ "PCTelescopeGetIgnoreKSPComputeOperators"
872 /*@
873  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
874 
875  Not Collective
876 
877  Input Parameter:
878 .  pc - the preconditioner context
879 
880  Output Parameter:
881 .  v - the flag
882 
883  Level: advanced
884 
885 .keywords: PC, telescoping solve
886 @*/
887 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
888 {
889   PetscErrorCode ierr;
890   PetscFunctionBegin;
891   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
892   PetscFunctionReturn(0);
893 }
894 
895 #undef __FUNCT__
896 #define __FUNCT__ "PCTelescopeSetIgnoreKSPComputeOperators"
897 /*@
898  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
899 
900  Not Collective
901 
902  Input Parameter:
903 .  pc - the preconditioner context
904 
905  Output Parameter:
906 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
907 
908  Level: advanced
909 
910 .keywords: PC, telescoping solve
911 @*/
912 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
913 {
914   PetscErrorCode ierr;
915   PetscFunctionBegin;
916   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
917   PetscFunctionReturn(0);
918 }
919 
920 #undef __FUNCT__
921 #define __FUNCT__ "PCTelescopeGetDM"
922 /*@
923  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
924 
925  Not Collective
926 
927  Input Parameter:
928 .  pc - the preconditioner context
929 
930  Output Parameter:
931 .  subdm - The re-partitioned DM
932 
933  Level: advanced
934 
935 .keywords: PC, telescoping solve
936 @*/
937 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
938 {
939   PetscErrorCode ierr;
940   PetscFunctionBegin;
941   ierr = PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));CHKERRQ(ierr);
942   PetscFunctionReturn(0);
943 }
944 
945 #undef __FUNCT__
946 #define __FUNCT__ "PCTelescopeSetSubcommType"
947 /*@
948  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
949 
950  Logically Collective
951 
952  Input Parameter:
953 +  pc - the preconditioner context
954 -  subcommtype - the subcommunicator type (see PetscSubcommType)
955 
956  Level: advanced
957 
958 .keywords: PC, telescoping solve
959 
960 .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE
961 @*/
962 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
963 {
964   PetscErrorCode ierr;
965   PetscFunctionBegin;
966   ierr = PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));CHKERRQ(ierr);
967   PetscFunctionReturn(0);
968 }
969 
970 #undef __FUNCT__
971 #define __FUNCT__ "PCTelescopeGetSubcommType"
972 /*@
973  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
974 
975  Not Collective
976 
977  Input Parameter:
978 .  pc - the preconditioner context
979 
980  Output Parameter:
981 .  subcommtype - the subcommunicator type (see PetscSubcommType)
982 
983  Level: advanced
984 
985 .keywords: PC, telescoping solve
986 
987 .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE
988 @*/
989 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
990 {
991   PetscErrorCode ierr;
992   PetscFunctionBegin;
993   ierr = PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));CHKERRQ(ierr);
994   PetscFunctionReturn(0);
995 }
996 
997 /* -------------------------------------------------------------------------------------*/
998 /*MC
999    PCTELESCOPE - Runs a KSP solver on a sub-group of processors. MPI processes not in the sub-communicator are idle during the solve.
1000 
1001    Options Database:
1002 +  -pc_telescope_reduction_factor <r> - factor to use communicator size by. e.g. with 64 MPI processes and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1003 -  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored
1004 -  -pc_telescope_subcomm_type <interlaced,contiguous> - how to define the reduced communicator. see PetscSubcomm for more.
1005 
1006    Level: advanced
1007 
1008    Notes:
1009    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1010    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1011    This means there will be MPI processes which will be idle during the application of this preconditioner.
1012 
1013    The default KSP is PREONLY. If a DM is attached to the PC, it is re-partitioned on the sub-communicator.
1014    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1015    Currently only support for re-partitioning a DMDA is provided.
1016    Any nullspace attached to the original Bmat operator is extracted, re-partitioned and set on the repartitioned Bmat operator.
1017    KSPSetComputeOperators() is not propagated to the sub KSP.
1018    Currently there is no support for the flag -pc_use_amat
1019 
1020    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1021    creates a child sub-communicator (c') containing fewer MPI processes than the original parent preconditioner (PC).
1022 
1023   Developer Notes:
1024    During PCSetup, the B operator is scattered onto c'.
1025    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1026    Then, KSPSolve() is executed on the c' communicator.
1027 
1028    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
1029    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
1030 
1031    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1032    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1033    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1034 
1035    The telescoping preconditioner is aware of an attached DM. In the event that the DM is of type DMDA (2D or 3D -
1036    1D support for 1D DMDAs is not provided), a new DMDA is created on c' (e.g. it is re-partitioned), and this new DM
1037    is attached the sub KSPSolve(). The design of telescope is such that it should be possible to extend support
1038    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1039 
1040    By default, B' is defined by simply fusing rows from different MPI processes
1041 
1042    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1043    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatGetSubMatrices(), (iii) fusing the
1044    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
1045 
1046    Limitations/improvements include the following.
1047    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1048 
1049    The symmetric permutation used when a DMDA is encountered is performed via explicitly assmbleming a permutation matrix P,
1050    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1051    VecPermute() does not supported for the use case required here. By computing P, one can permute both the operator and RHS in a
1052    consistent manner.
1053 
1054    Mapping of vectors is performed in the following way.
1055    Suppose the parent comm size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1056    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1057    We perform the scatter to the sub-comm in the following way.
1058    [1] Given a vector x defined on comm c
1059 
1060    rank(c) : _________ 0 ______  ________ 1 _______  ________ 2 _____________ ___________ 3 __________
1061          x : [0, 1, 2, 3, 4, 5] [6, 7, 8, 9, 10, 11] [12, 13, 14, 15, 16, 17] [18, 19, 20, 21, 22, 23]
1062 
1063    scatter to xtmp defined also on comm c so that we have the following values
1064 
1065    rank(c) : ___________________ 0 ________________  _1_  ______________________ 2 _______________________  __3_
1066       xtmp : [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] [  ] [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23] [  ]
1067 
1068    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1069 
1070 
1071    [2] Copy the value from rank 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1072    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1073 
1074     rank(c') : ___________________ 0 _______________  ______________________ 1 _____________________
1075       xred : [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]
1076 
1077 
1078   Contributed by Dave May
1079 
1080   Reference:
1081   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1082 
1083 .seealso:  PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
1084 M*/
1085 #undef __FUNCT__
1086 #define __FUNCT__ "PCCreate_Telescope"
1087 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1088 {
1089   PetscErrorCode       ierr;
1090   struct _PC_Telescope *sred;
1091 
1092   PetscFunctionBegin;
1093   ierr = PetscNewLog(pc,&sred);CHKERRQ(ierr);
1094   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1095   sred->redfactor      = 1;
1096   sred->ignore_dm      = PETSC_FALSE;
1097   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1098   pc->data             = (void*)sred;
1099 
1100   pc->ops->apply           = PCApply_Telescope;
1101   pc->ops->applytranspose  = NULL;
1102   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1103   pc->ops->setup           = PCSetUp_Telescope;
1104   pc->ops->destroy         = PCDestroy_Telescope;
1105   pc->ops->reset           = PCReset_Telescope;
1106   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1107   pc->ops->view            = PCView_Telescope;
1108 
1109   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1110   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1111   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1112   sred->pctelescope_reset_type              = NULL;
1113 
1114   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);CHKERRQ(ierr);
1115   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope);CHKERRQ(ierr);
1116   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope);CHKERRQ(ierr);
1117   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);CHKERRQ(ierr);
1118   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);CHKERRQ(ierr);
1119   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);CHKERRQ(ierr);
1120   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);CHKERRQ(ierr);
1121   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1122   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1123   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);CHKERRQ(ierr);
1124   PetscFunctionReturn(0);
1125 }
1126