xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision 5f3c5e7ab1713b2b36ec2007ece43899b4f0dcb3)
1 #include <petsc/private/petscimpl.h>
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h>  /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7 
8 static PetscBool  cited      = PETSC_FALSE;
9 static const char citation[] = "@inproceedings{MaySananRuppKnepleySmith2016,\n"
10                                "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
11                                "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
12                                "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
13                                "  series    = {PASC '16},\n"
14                                "  isbn      = {978-1-4503-4126-4},\n"
15                                "  location  = {Lausanne, Switzerland},\n"
16                                "  pages     = {5:1--5:12},\n"
17                                "  articleno = {5},\n"
18                                "  numpages  = {12},\n"
19                                "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
20                                "  doi       = {10.1145/2929908.2929913},\n"
21                                "  acmid     = {2929913},\n"
22                                "  publisher = {ACM},\n"
23                                "  address   = {New York, NY, USA},\n"
24                                "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
25                                "  year      = {2016}\n"
26                                "}\n";
27 
28 /*
29  default setup mode
30 
31  [1a] scatter to (FORWARD)
32  x(comm) -> xtmp(comm)
33  [1b] local copy (to) ranks with color = 0
34  xred(subcomm) <- xtmp
35 
36  [2] solve on sub KSP to obtain yred(subcomm)
37 
38  [3a] local copy (from) ranks with color = 0
39  yred(subcomm) --> xtmp
40  [2b] scatter from (REVERSE)
41  xtmp(comm) -> y(comm)
42 */
43 
44 /*
45   Collective[comm_f]
46   Notes
47    * Using comm_f = MPI_COMM_NULL will result in an error
48    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
49    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
50 */
51 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f, MPI_Comm comm_c, PetscBool *isvalid)
52 {
53   PetscInt     valid = 1;
54   MPI_Group    group_f, group_c;
55   PetscMPIInt  count, k, size_f = 0, size_c = 0, size_c_sum = 0;
56   PetscMPIInt *ranks_f, *ranks_c;
57 
58   PetscFunctionBegin;
59   PetscCheck(comm_f != MPI_COMM_NULL, PETSC_COMM_SELF, PETSC_ERR_SUP, "comm_f cannot be MPI_COMM_NULL");
60 
61   PetscCallMPI(MPI_Comm_group(comm_f, &group_f));
62   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_group(comm_c, &group_c));
63 
64   PetscCallMPI(MPI_Comm_size(comm_f, &size_f));
65   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_size(comm_c, &size_c));
66 
67   /* check not all comm_c's are NULL */
68   size_c_sum = size_c;
69   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &size_c_sum, 1, MPI_INT, MPI_SUM, comm_f));
70   if (size_c_sum == 0) valid = 0;
71 
72   /* check we can map at least 1 rank in comm_c to comm_f */
73   PetscCall(PetscMalloc1(size_f, &ranks_f));
74   PetscCall(PetscMalloc1(size_c, &ranks_c));
75   for (k = 0; k < size_f; k++) ranks_f[k] = MPI_UNDEFINED;
76   for (k = 0; k < size_c; k++) ranks_c[k] = k;
77 
78   /*
79    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
80    I do not want the code to terminate immediately if this occurs, rather I want to throw
81    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
82    that comm_c is not a valid sub-communicator.
83    Hence I purposefully do not call PetscCall() after MPI_Group_translate_ranks().
84   */
85   count = 0;
86   if (comm_c != MPI_COMM_NULL) {
87     (void)MPI_Group_translate_ranks(group_c, size_c, ranks_c, group_f, ranks_f);
88     for (k = 0; k < size_f; k++) {
89       if (ranks_f[k] == MPI_UNDEFINED) count++;
90     }
91   }
92   if (count == size_f) valid = 0;
93 
94   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &valid, 1, MPIU_INT, MPI_MIN, comm_f));
95   if (valid == 1) *isvalid = PETSC_TRUE;
96   else *isvalid = PETSC_FALSE;
97 
98   PetscCall(PetscFree(ranks_f));
99   PetscCall(PetscFree(ranks_c));
100   PetscCallMPI(MPI_Group_free(&group_f));
101   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Group_free(&group_c));
102   PetscFunctionReturn(PETSC_SUCCESS);
103 }
104 
105 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
106 {
107   DM subdm = NULL;
108 
109   if (!PCTelescope_isActiveRank(sred)) {
110     subdm = NULL;
111   } else {
112     switch (sred->sr_type) {
113     case TELESCOPE_DEFAULT:
114       subdm = NULL;
115       break;
116     case TELESCOPE_DMDA:
117       subdm = ((PC_Telescope_DMDACtx *)sred->dm_ctx)->dmrepart;
118       break;
119     case TELESCOPE_DMPLEX:
120       subdm = NULL;
121       break;
122     case TELESCOPE_COARSEDM:
123       if (sred->ksp) PetscCallAbort(PETSC_COMM_SELF, KSPGetDM(sred->ksp, &subdm));
124       break;
125     }
126   }
127   return subdm;
128 }
129 
130 PetscErrorCode PCTelescopeSetUp_default(PC pc, PC_Telescope sred)
131 {
132   PetscInt   m, M, bs, st, ed;
133   Vec        x, xred, yred, xtmp;
134   Mat        B;
135   MPI_Comm   comm, subcomm;
136   VecScatter scatter;
137   IS         isin;
138   VecType    vectype;
139 
140   PetscFunctionBegin;
141   PetscCall(PetscInfo(pc, "PCTelescope: setup (default)\n"));
142   comm    = PetscSubcommParent(sred->psubcomm);
143   subcomm = PetscSubcommChild(sred->psubcomm);
144 
145   PetscCall(PCGetOperators(pc, NULL, &B));
146   PetscCall(MatGetSize(B, &M, NULL));
147   PetscCall(MatGetBlockSize(B, &bs));
148   PetscCall(MatCreateVecs(B, &x, NULL));
149   PetscCall(MatGetVecType(B, &vectype));
150 
151   xred = NULL;
152   m    = 0;
153   if (PCTelescope_isActiveRank(sred)) {
154     PetscCall(VecCreate(subcomm, &xred));
155     PetscCall(VecSetSizes(xred, PETSC_DECIDE, M));
156     PetscCall(VecSetBlockSize(xred, bs));
157     PetscCall(VecSetType(xred, vectype)); /* Use the preconditioner matrix's vectype by default */
158     PetscCall(VecSetFromOptions(xred));
159     PetscCall(VecGetLocalSize(xred, &m));
160   }
161 
162   yred = NULL;
163   if (PCTelescope_isActiveRank(sred)) PetscCall(VecDuplicate(xred, &yred));
164 
165   PetscCall(VecCreate(comm, &xtmp));
166   PetscCall(VecSetSizes(xtmp, m, PETSC_DECIDE));
167   PetscCall(VecSetBlockSize(xtmp, bs));
168   PetscCall(VecSetType(xtmp, vectype));
169 
170   if (PCTelescope_isActiveRank(sred)) {
171     PetscCall(VecGetOwnershipRange(xred, &st, &ed));
172     PetscCall(ISCreateStride(comm, (ed - st), st, 1, &isin));
173   } else {
174     PetscCall(VecGetOwnershipRange(x, &st, &ed));
175     PetscCall(ISCreateStride(comm, 0, st, 1, &isin));
176   }
177   PetscCall(ISSetBlockSize(isin, bs));
178 
179   PetscCall(VecScatterCreate(x, isin, xtmp, NULL, &scatter));
180 
181   sred->isin    = isin;
182   sred->scatter = scatter;
183   sred->xred    = xred;
184   sred->yred    = yred;
185   sred->xtmp    = xtmp;
186   PetscCall(VecDestroy(&x));
187   PetscFunctionReturn(PETSC_SUCCESS);
188 }
189 
190 PetscErrorCode PCTelescopeMatCreate_default(PC pc, PC_Telescope sred, MatReuse reuse, Mat *A)
191 {
192   MPI_Comm comm, subcomm;
193   Mat      Bred, B;
194   PetscInt nr, nc, bs;
195   IS       isrow, iscol;
196   Mat      Blocal, *_Blocal;
197 
198   PetscFunctionBegin;
199   PetscCall(PetscInfo(pc, "PCTelescope: updating the redundant preconditioned operator (default)\n"));
200   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
201   subcomm = PetscSubcommChild(sred->psubcomm);
202   PetscCall(PCGetOperators(pc, NULL, &B));
203   PetscCall(MatGetSize(B, &nr, &nc));
204   isrow = sred->isin;
205   PetscCall(ISCreateStride(PETSC_COMM_SELF, nc, 0, 1, &iscol));
206   PetscCall(ISSetIdentity(iscol));
207   PetscCall(MatGetBlockSizes(B, NULL, &bs));
208   PetscCall(ISSetBlockSize(iscol, bs));
209   PetscCall(MatSetOption(B, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
210   PetscCall(MatCreateSubMatrices(B, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &_Blocal));
211   Blocal = *_Blocal;
212   PetscCall(PetscFree(_Blocal));
213   Bred = NULL;
214   if (PCTelescope_isActiveRank(sred)) {
215     PetscInt mm;
216 
217     if (reuse != MAT_INITIAL_MATRIX) Bred = *A;
218 
219     PetscCall(MatGetSize(Blocal, &mm, NULL));
220     PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, Blocal, mm, reuse, &Bred));
221   }
222   *A = Bred;
223   PetscCall(ISDestroy(&iscol));
224   PetscCall(MatDestroy(&Blocal));
225   PetscFunctionReturn(PETSC_SUCCESS);
226 }
227 
228 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc, PC_Telescope sred, MatNullSpace nullspace, MatNullSpace *sub_nullspace)
229 {
230   PetscBool  has_const;
231   const Vec *vecs;
232   Vec       *sub_vecs = NULL;
233   PetscInt   i, k, n = 0;
234   MPI_Comm   subcomm;
235 
236   PetscFunctionBegin;
237   subcomm = PetscSubcommChild(sred->psubcomm);
238   PetscCall(MatNullSpaceGetVecs(nullspace, &has_const, &n, &vecs));
239 
240   if (PCTelescope_isActiveRank(sred)) {
241     if (n) PetscCall(VecDuplicateVecs(sred->xred, n, &sub_vecs));
242   }
243 
244   /* copy entries */
245   for (k = 0; k < n; k++) {
246     const PetscScalar *x_array;
247     PetscScalar       *LA_sub_vec;
248     PetscInt           st, ed;
249 
250     /* pull in vector x->xtmp */
251     PetscCall(VecScatterBegin(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
252     PetscCall(VecScatterEnd(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
253     if (sub_vecs) {
254       /* copy vector entries into xred */
255       PetscCall(VecGetArrayRead(sred->xtmp, &x_array));
256       if (sub_vecs[k]) {
257         PetscCall(VecGetOwnershipRange(sub_vecs[k], &st, &ed));
258         PetscCall(VecGetArray(sub_vecs[k], &LA_sub_vec));
259         for (i = 0; i < ed - st; i++) LA_sub_vec[i] = x_array[i];
260         PetscCall(VecRestoreArray(sub_vecs[k], &LA_sub_vec));
261       }
262       PetscCall(VecRestoreArrayRead(sred->xtmp, &x_array));
263     }
264   }
265 
266   if (PCTelescope_isActiveRank(sred)) {
267     /* create new (near) nullspace for redundant object */
268     PetscCall(MatNullSpaceCreate(subcomm, has_const, n, sub_vecs, sub_nullspace));
269     PetscCall(VecDestroyVecs(n, &sub_vecs));
270     PetscCheck(!nullspace->remove, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
271     PetscCheck(!nullspace->rmctx, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
272   }
273   PetscFunctionReturn(PETSC_SUCCESS);
274 }
275 
276 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc, PC_Telescope sred, Mat sub_mat)
277 {
278   Mat B;
279 
280   PetscFunctionBegin;
281   PetscCall(PCGetOperators(pc, NULL, &B));
282   /* Propagate the nullspace if it exists */
283   {
284     MatNullSpace nullspace, sub_nullspace;
285     PetscCall(MatGetNullSpace(B, &nullspace));
286     if (nullspace) {
287       PetscCall(PetscInfo(pc, "PCTelescope: generating nullspace (default)\n"));
288       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nullspace, &sub_nullspace));
289       if (PCTelescope_isActiveRank(sred)) {
290         PetscCall(MatSetNullSpace(sub_mat, sub_nullspace));
291         PetscCall(MatNullSpaceDestroy(&sub_nullspace));
292       }
293     }
294   }
295   /* Propagate the near nullspace if it exists */
296   {
297     MatNullSpace nearnullspace, sub_nearnullspace;
298     PetscCall(MatGetNearNullSpace(B, &nearnullspace));
299     if (nearnullspace) {
300       PetscCall(PetscInfo(pc, "PCTelescope: generating near nullspace (default)\n"));
301       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nearnullspace, &sub_nearnullspace));
302       if (PCTelescope_isActiveRank(sred)) {
303         PetscCall(MatSetNearNullSpace(sub_mat, sub_nearnullspace));
304         PetscCall(MatNullSpaceDestroy(&sub_nearnullspace));
305       }
306     }
307   }
308   PetscFunctionReturn(PETSC_SUCCESS);
309 }
310 
311 static PetscErrorCode PCView_Telescope(PC pc, PetscViewer viewer)
312 {
313   PC_Telescope sred = (PC_Telescope)pc->data;
314   PetscBool    iascii, isstring;
315   PetscViewer  subviewer;
316 
317   PetscFunctionBegin;
318   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
319   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
320   if (iascii) {
321     {
322       MPI_Comm    comm, subcomm;
323       PetscMPIInt comm_size, subcomm_size;
324       DM          dm = NULL, subdm = NULL;
325 
326       PetscCall(PCGetDM(pc, &dm));
327       subdm = private_PCTelescopeGetSubDM(sred);
328 
329       if (sred->psubcomm) {
330         comm    = PetscSubcommParent(sred->psubcomm);
331         subcomm = PetscSubcommChild(sred->psubcomm);
332         PetscCallMPI(MPI_Comm_size(comm, &comm_size));
333         PetscCallMPI(MPI_Comm_size(subcomm, &subcomm_size));
334 
335         PetscCall(PetscViewerASCIIPushTab(viewer));
336         PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent comm size reduction factor = %" PetscInt_FMT "\n", sred->redfactor));
337         PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent_size = %d , subcomm_size = %d\n", (int)comm_size, (int)subcomm_size));
338         switch (sred->subcommtype) {
339         case PETSC_SUBCOMM_INTERLACED:
340           PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: type = %s\n", PetscSubcommTypes[sred->subcommtype]));
341           break;
342         case PETSC_SUBCOMM_CONTIGUOUS:
343           PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm type = %s\n", PetscSubcommTypes[sred->subcommtype]));
344           break;
345         default:
346           SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "General subcomm type not supported by PCTelescope");
347         }
348         PetscCall(PetscViewerASCIIPopTab(viewer));
349       } else {
350         PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
351         subcomm = sred->subcomm;
352         if (!PCTelescope_isActiveRank(sred)) subcomm = PETSC_COMM_SELF;
353 
354         PetscCall(PetscViewerASCIIPushTab(viewer));
355         PetscCall(PetscViewerASCIIPrintf(viewer, "subcomm: using user provided sub-communicator\n"));
356         PetscCall(PetscViewerASCIIPopTab(viewer));
357       }
358 
359       PetscCall(PetscViewerGetSubViewer(viewer, subcomm, &subviewer));
360       if (PCTelescope_isActiveRank(sred)) {
361         PetscCall(PetscViewerASCIIPushTab(subviewer));
362 
363         if (dm && sred->ignore_dm) PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring DM\n"));
364         if (sred->ignore_kspcomputeoperators) PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring KSPComputeOperators\n"));
365         switch (sred->sr_type) {
366         case TELESCOPE_DEFAULT:
367           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: default\n"));
368           break;
369         case TELESCOPE_DMDA:
370           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMDA auto-repartitioning\n"));
371           PetscCall(DMView_DA_Short(subdm, subviewer));
372           break;
373         case TELESCOPE_DMPLEX:
374           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMPLEX auto-repartitioning\n"));
375           break;
376         case TELESCOPE_COARSEDM:
377           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: coarse DM\n"));
378           break;
379         }
380 
381         if (dm) {
382           PetscObject obj = (PetscObject)dm;
383           PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object:"));
384           PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE));
385           if (obj->type_name) PetscCall(PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name));
386           if (obj->name) PetscCall(PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name));
387           if (obj->prefix) PetscCall(PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix));
388           PetscCall(PetscViewerASCIIPrintf(subviewer, "\n"));
389           PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE));
390         } else {
391           PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object: NULL\n"));
392         }
393         if (subdm) {
394           PetscObject obj = (PetscObject)subdm;
395           PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object:"));
396           PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE));
397           if (obj->type_name) PetscCall(PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name));
398           if (obj->name) PetscCall(PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name));
399           if (obj->prefix) PetscCall(PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix));
400           PetscCall(PetscViewerASCIIPrintf(subviewer, "\n"));
401           PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE));
402         } else {
403           PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object: NULL\n"));
404         }
405 
406         PetscCall(KSPView(sred->ksp, subviewer));
407         PetscCall(PetscViewerASCIIPopTab(subviewer));
408       }
409       PetscCall(PetscViewerRestoreSubViewer(viewer, subcomm, &subviewer));
410     }
411   }
412   PetscFunctionReturn(PETSC_SUCCESS);
413 }
414 
415 static PetscErrorCode PCSetUp_Telescope(PC pc)
416 {
417   PC_Telescope    sred = (PC_Telescope)pc->data;
418   MPI_Comm        comm, subcomm = 0;
419   PCTelescopeType sr_type;
420 
421   PetscFunctionBegin;
422   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
423 
424   /* Determine type of setup/update */
425   if (!pc->setupcalled) {
426     PetscBool has_dm, same;
427     DM        dm;
428 
429     sr_type = TELESCOPE_DEFAULT;
430     has_dm  = PETSC_FALSE;
431     PetscCall(PCGetDM(pc, &dm));
432     if (dm) has_dm = PETSC_TRUE;
433     if (has_dm) {
434       /* check for dmda */
435       PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMDA, &same));
436       if (same) {
437         PetscCall(PetscInfo(pc, "PCTelescope: found DMDA\n"));
438         sr_type = TELESCOPE_DMDA;
439       }
440       /* check for dmplex */
441       PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &same));
442       if (same) {
443         PetscCall(PetscInfo(pc, "PCTelescope: found DMPLEX\n"));
444         sr_type = TELESCOPE_DMPLEX;
445       }
446 
447       if (sred->use_coarse_dm) {
448         PetscCall(PetscInfo(pc, "PCTelescope: using coarse DM\n"));
449         sr_type = TELESCOPE_COARSEDM;
450       }
451 
452       if (sred->ignore_dm) {
453         PetscCall(PetscInfo(pc, "PCTelescope: ignoring DM\n"));
454         sr_type = TELESCOPE_DEFAULT;
455       }
456     }
457     sred->sr_type = sr_type;
458   } else {
459     sr_type = sred->sr_type;
460   }
461 
462   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
463   switch (sr_type) {
464   case TELESCOPE_DEFAULT:
465     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
466     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
467     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
468     sred->pctelescope_reset_type              = NULL;
469     break;
470   case TELESCOPE_DMDA:
471     pc->ops->apply                            = PCApply_Telescope_dmda;
472     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
473     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
474     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
475     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
476     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
477     break;
478   case TELESCOPE_DMPLEX:
479     SETERRQ(comm, PETSC_ERR_SUP, "Support for DMPLEX is currently not available");
480   case TELESCOPE_COARSEDM:
481     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
482     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
483     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
484     sred->pctelescope_matcreate_type          = NULL;
485     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
486     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
487     break;
488   default:
489     SETERRQ(comm, PETSC_ERR_SUP, "Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
490   }
491 
492   /* subcomm definition */
493   if (!pc->setupcalled) {
494     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
495       if (!sred->psubcomm) {
496         PetscCall(PetscSubcommCreate(comm, &sred->psubcomm));
497         PetscCall(PetscSubcommSetNumber(sred->psubcomm, sred->redfactor));
498         PetscCall(PetscSubcommSetType(sred->psubcomm, sred->subcommtype));
499         sred->subcomm = PetscSubcommChild(sred->psubcomm);
500       }
501     } else { /* query PC for DM, check communicators */
502       DM          dm, dm_coarse_partition          = NULL;
503       MPI_Comm    comm_fine, comm_coarse_partition = MPI_COMM_NULL;
504       PetscMPIInt csize_fine = 0, csize_coarse_partition = 0, cs[2], csg[2], cnt = 0;
505       PetscBool   isvalidsubcomm;
506 
507       PetscCall(PCGetDM(pc, &dm));
508       comm_fine = PetscObjectComm((PetscObject)dm);
509       PetscCall(DMGetCoarseDM(dm, &dm_coarse_partition));
510       if (dm_coarse_partition) cnt = 1;
511       PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &cnt, 1, MPI_INT, MPI_SUM, comm_fine));
512       PetscCheck(cnt != 0, comm_fine, PETSC_ERR_SUP, "Zero instances of a coarse DM were found");
513 
514       PetscCallMPI(MPI_Comm_size(comm_fine, &csize_fine));
515       if (dm_coarse_partition) {
516         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
517         PetscCallMPI(MPI_Comm_size(comm_coarse_partition, &csize_coarse_partition));
518       }
519 
520       cs[0] = csize_fine;
521       cs[1] = csize_coarse_partition;
522       PetscCallMPI(MPI_Allreduce(cs, csg, 2, MPI_INT, MPI_MAX, comm_fine));
523       PetscCheck(csg[0] != csg[1], comm_fine, PETSC_ERR_SUP, "Coarse DM uses the same size communicator as the parent DM attached to the PC");
524 
525       PetscCall(PCTelescopeTestValidSubcomm(comm_fine, comm_coarse_partition, &isvalidsubcomm));
526       PetscCheck(isvalidsubcomm, comm_fine, PETSC_ERR_SUP, "Coarse DM communicator is not a sub-communicator of parentDM->comm");
527       sred->subcomm = comm_coarse_partition;
528     }
529   }
530   subcomm = sred->subcomm;
531 
532   /* internal KSP */
533   if (!pc->setupcalled) {
534     const char *prefix;
535 
536     if (PCTelescope_isActiveRank(sred)) {
537       PetscCall(KSPCreate(subcomm, &sred->ksp));
538       PetscCall(KSPSetErrorIfNotConverged(sred->ksp, pc->erroriffailure));
539       PetscCall(PetscObjectIncrementTabLevel((PetscObject)sred->ksp, (PetscObject)pc, 1));
540       PetscCall(KSPSetType(sred->ksp, KSPPREONLY));
541       PetscCall(PCGetOptionsPrefix(pc, &prefix));
542       PetscCall(KSPSetOptionsPrefix(sred->ksp, prefix));
543       PetscCall(KSPAppendOptionsPrefix(sred->ksp, "telescope_"));
544     }
545   }
546 
547   /* setup */
548   if (!pc->setupcalled && sred->pctelescope_setup_type) PetscCall(sred->pctelescope_setup_type(pc, sred));
549   /* update */
550   if (!pc->setupcalled) {
551     if (sred->pctelescope_matcreate_type) PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_INITIAL_MATRIX, &sred->Bred));
552     if (sred->pctelescope_matnullspacecreate_type) PetscCall(sred->pctelescope_matnullspacecreate_type(pc, sred, sred->Bred));
553   } else {
554     if (sred->pctelescope_matcreate_type) PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_REUSE_MATRIX, &sred->Bred));
555   }
556 
557   /* common - no construction */
558   if (PCTelescope_isActiveRank(sred)) {
559     PetscCall(KSPSetOperators(sred->ksp, sred->Bred, sred->Bred));
560     if (pc->setfromoptionscalled && !pc->setupcalled) PetscCall(KSPSetFromOptions(sred->ksp));
561   }
562   PetscFunctionReturn(PETSC_SUCCESS);
563 }
564 
565 static PetscErrorCode PCApply_Telescope(PC pc, Vec x, Vec y)
566 {
567   PC_Telescope       sred = (PC_Telescope)pc->data;
568   Vec                xtmp, xred, yred;
569   PetscInt           i, st, ed;
570   VecScatter         scatter;
571   PetscScalar       *array;
572   const PetscScalar *x_array;
573 
574   PetscFunctionBegin;
575   PetscCall(PetscCitationsRegister(citation, &cited));
576 
577   xtmp    = sred->xtmp;
578   scatter = sred->scatter;
579   xred    = sred->xred;
580   yred    = sred->yred;
581 
582   /* pull in vector x->xtmp */
583   PetscCall(VecScatterBegin(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD));
584   PetscCall(VecScatterEnd(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD));
585 
586   /* copy vector entries into xred */
587   PetscCall(VecGetArrayRead(xtmp, &x_array));
588   if (xred) {
589     PetscScalar *LA_xred;
590     PetscCall(VecGetOwnershipRange(xred, &st, &ed));
591     PetscCall(VecGetArray(xred, &LA_xred));
592     for (i = 0; i < ed - st; i++) LA_xred[i] = x_array[i];
593     PetscCall(VecRestoreArray(xred, &LA_xred));
594   }
595   PetscCall(VecRestoreArrayRead(xtmp, &x_array));
596   /* solve */
597   if (PCTelescope_isActiveRank(sred)) {
598     PetscCall(KSPSolve(sred->ksp, xred, yred));
599     PetscCall(KSPCheckSolve(sred->ksp, pc, yred));
600   }
601   /* return vector */
602   PetscCall(VecGetArray(xtmp, &array));
603   if (yred) {
604     const PetscScalar *LA_yred;
605     PetscCall(VecGetOwnershipRange(yred, &st, &ed));
606     PetscCall(VecGetArrayRead(yred, &LA_yred));
607     for (i = 0; i < ed - st; i++) array[i] = LA_yred[i];
608     PetscCall(VecRestoreArrayRead(yred, &LA_yred));
609   }
610   PetscCall(VecRestoreArray(xtmp, &array));
611   PetscCall(VecScatterBegin(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE));
612   PetscCall(VecScatterEnd(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE));
613   PetscFunctionReturn(PETSC_SUCCESS);
614 }
615 
616 static PetscErrorCode PCApplyRichardson_Telescope(PC pc, Vec x, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
617 {
618   PC_Telescope       sred = (PC_Telescope)pc->data;
619   Vec                xtmp, yred;
620   PetscInt           i, st, ed;
621   VecScatter         scatter;
622   const PetscScalar *x_array;
623   PetscBool          default_init_guess_value;
624 
625   PetscFunctionBegin;
626   xtmp    = sred->xtmp;
627   scatter = sred->scatter;
628   yred    = sred->yred;
629 
630   PetscCheck(its <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCApplyRichardson_Telescope only supports max_it = 1");
631   *reason = (PCRichardsonConvergedReason)0;
632 
633   if (!zeroguess) {
634     PetscCall(PetscInfo(pc, "PCTelescope: Scattering y for non-zero initial guess\n"));
635     /* pull in vector y->xtmp */
636     PetscCall(VecScatterBegin(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD));
637     PetscCall(VecScatterEnd(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD));
638 
639     /* copy vector entries into xred */
640     PetscCall(VecGetArrayRead(xtmp, &x_array));
641     if (yred) {
642       PetscScalar *LA_yred;
643       PetscCall(VecGetOwnershipRange(yred, &st, &ed));
644       PetscCall(VecGetArray(yred, &LA_yred));
645       for (i = 0; i < ed - st; i++) LA_yred[i] = x_array[i];
646       PetscCall(VecRestoreArray(yred, &LA_yred));
647     }
648     PetscCall(VecRestoreArrayRead(xtmp, &x_array));
649   }
650 
651   if (PCTelescope_isActiveRank(sred)) {
652     PetscCall(KSPGetInitialGuessNonzero(sred->ksp, &default_init_guess_value));
653     if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, PETSC_TRUE));
654   }
655 
656   PetscCall(PCApply_Telescope(pc, x, y));
657 
658   if (PCTelescope_isActiveRank(sred)) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, default_init_guess_value));
659 
660   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
661   *outits = 1;
662   PetscFunctionReturn(PETSC_SUCCESS);
663 }
664 
665 static PetscErrorCode PCReset_Telescope(PC pc)
666 {
667   PC_Telescope sred = (PC_Telescope)pc->data;
668 
669   PetscFunctionBegin;
670   PetscCall(ISDestroy(&sred->isin));
671   PetscCall(VecScatterDestroy(&sred->scatter));
672   PetscCall(VecDestroy(&sred->xred));
673   PetscCall(VecDestroy(&sred->yred));
674   PetscCall(VecDestroy(&sred->xtmp));
675   PetscCall(MatDestroy(&sred->Bred));
676   PetscCall(KSPReset(sred->ksp));
677   if (sred->pctelescope_reset_type) PetscCall(sred->pctelescope_reset_type(pc));
678   PetscFunctionReturn(PETSC_SUCCESS);
679 }
680 
681 static PetscErrorCode PCDestroy_Telescope(PC pc)
682 {
683   PC_Telescope sred = (PC_Telescope)pc->data;
684 
685   PetscFunctionBegin;
686   PetscCall(PCReset_Telescope(pc));
687   PetscCall(KSPDestroy(&sred->ksp));
688   PetscCall(PetscSubcommDestroy(&sred->psubcomm));
689   PetscCall(PetscFree(sred->dm_ctx));
690   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", NULL));
691   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", NULL));
692   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", NULL));
693   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", NULL));
694   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", NULL));
695   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", NULL));
696   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", NULL));
697   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", NULL));
698   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", NULL));
699   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", NULL));
700   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", NULL));
701   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", NULL));
702   PetscCall(PetscFree(pc->data));
703   PetscFunctionReturn(PETSC_SUCCESS);
704 }
705 
706 static PetscErrorCode PCSetFromOptions_Telescope(PC pc, PetscOptionItems *PetscOptionsObject)
707 {
708   PC_Telescope     sred = (PC_Telescope)pc->data;
709   MPI_Comm         comm;
710   PetscMPIInt      size;
711   PetscBool        flg;
712   PetscSubcommType subcommtype;
713 
714   PetscFunctionBegin;
715   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
716   PetscCallMPI(MPI_Comm_size(comm, &size));
717   PetscOptionsHeadBegin(PetscOptionsObject, "Telescope options");
718   PetscCall(PetscOptionsEnum("-pc_telescope_subcomm_type", "Subcomm type (interlaced or contiguous)", "PCTelescopeSetSubcommType", PetscSubcommTypes, (PetscEnum)sred->subcommtype, (PetscEnum *)&subcommtype, &flg));
719   if (flg) PetscCall(PCTelescopeSetSubcommType(pc, subcommtype));
720   PetscCall(PetscOptionsInt("-pc_telescope_reduction_factor", "Factor to reduce comm size by", "PCTelescopeSetReductionFactor", sred->redfactor, &sred->redfactor, NULL));
721   PetscCheck(sred->redfactor <= size, comm, PETSC_ERR_ARG_WRONG, "-pc_telescope_reduction_factor <= comm size");
722   PetscCall(PetscOptionsBool("-pc_telescope_ignore_dm", "Ignore any DM attached to the PC", "PCTelescopeSetIgnoreDM", sred->ignore_dm, &sred->ignore_dm, NULL));
723   PetscCall(PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators", "Ignore method used to compute A", "PCTelescopeSetIgnoreKSPComputeOperators", sred->ignore_kspcomputeoperators, &sred->ignore_kspcomputeoperators, NULL));
724   PetscCall(PetscOptionsBool("-pc_telescope_use_coarse_dm", "Define sub-communicator from the coarse DM", "PCTelescopeSetUseCoarseDM", sred->use_coarse_dm, &sred->use_coarse_dm, NULL));
725   PetscOptionsHeadEnd();
726   PetscFunctionReturn(PETSC_SUCCESS);
727 }
728 
729 /* PC simplementation specific API's */
730 
731 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc, KSP *ksp)
732 {
733   PC_Telescope red = (PC_Telescope)pc->data;
734   PetscFunctionBegin;
735   if (ksp) *ksp = red->ksp;
736   PetscFunctionReturn(PETSC_SUCCESS);
737 }
738 
739 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc, PetscSubcommType *subcommtype)
740 {
741   PC_Telescope red = (PC_Telescope)pc->data;
742   PetscFunctionBegin;
743   if (subcommtype) *subcommtype = red->subcommtype;
744   PetscFunctionReturn(PETSC_SUCCESS);
745 }
746 
747 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc, PetscSubcommType subcommtype)
748 {
749   PC_Telescope red = (PC_Telescope)pc->data;
750 
751   PetscFunctionBegin;
752   PetscCheck(!pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "You cannot change the subcommunicator type for PCTelescope after it has been set up.");
753   red->subcommtype = subcommtype;
754   PetscFunctionReturn(PETSC_SUCCESS);
755 }
756 
757 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc, PetscInt *fact)
758 {
759   PC_Telescope red = (PC_Telescope)pc->data;
760   PetscFunctionBegin;
761   if (fact) *fact = red->redfactor;
762   PetscFunctionReturn(PETSC_SUCCESS);
763 }
764 
765 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc, PetscInt fact)
766 {
767   PC_Telescope red = (PC_Telescope)pc->data;
768   PetscMPIInt  size;
769 
770   PetscFunctionBegin;
771   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
772   PetscCheck(fact > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be positive", fact);
773   PetscCheck(fact <= size, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be <= comm.size", fact);
774   red->redfactor = fact;
775   PetscFunctionReturn(PETSC_SUCCESS);
776 }
777 
778 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc, PetscBool *v)
779 {
780   PC_Telescope red = (PC_Telescope)pc->data;
781   PetscFunctionBegin;
782   if (v) *v = red->ignore_dm;
783   PetscFunctionReturn(PETSC_SUCCESS);
784 }
785 
786 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc, PetscBool v)
787 {
788   PC_Telescope red = (PC_Telescope)pc->data;
789   PetscFunctionBegin;
790   red->ignore_dm = v;
791   PetscFunctionReturn(PETSC_SUCCESS);
792 }
793 
794 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc, PetscBool *v)
795 {
796   PC_Telescope red = (PC_Telescope)pc->data;
797   PetscFunctionBegin;
798   if (v) *v = red->use_coarse_dm;
799   PetscFunctionReturn(PETSC_SUCCESS);
800 }
801 
802 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc, PetscBool v)
803 {
804   PC_Telescope red = (PC_Telescope)pc->data;
805   PetscFunctionBegin;
806   red->use_coarse_dm = v;
807   PetscFunctionReturn(PETSC_SUCCESS);
808 }
809 
810 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool *v)
811 {
812   PC_Telescope red = (PC_Telescope)pc->data;
813   PetscFunctionBegin;
814   if (v) *v = red->ignore_kspcomputeoperators;
815   PetscFunctionReturn(PETSC_SUCCESS);
816 }
817 
818 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool v)
819 {
820   PC_Telescope red = (PC_Telescope)pc->data;
821   PetscFunctionBegin;
822   red->ignore_kspcomputeoperators = v;
823   PetscFunctionReturn(PETSC_SUCCESS);
824 }
825 
826 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc, DM *dm)
827 {
828   PC_Telescope red = (PC_Telescope)pc->data;
829   PetscFunctionBegin;
830   *dm = private_PCTelescopeGetSubDM(red);
831   PetscFunctionReturn(PETSC_SUCCESS);
832 }
833 
834 /*@
835   PCTelescopeGetKSP - Gets the `KSP` created by the telescoping `PC`.
836 
837   Not Collective
838 
839   Input Parameter:
840 . pc - the preconditioner context
841 
842   Output Parameter:
843 . subksp - the `KSP` defined the smaller set of processes
844 
845   Level: advanced
846 
847 .seealso: `PCTELESCOPE`
848 @*/
849 PetscErrorCode PCTelescopeGetKSP(PC pc, KSP *subksp)
850 {
851   PetscFunctionBegin;
852   PetscUseMethod(pc, "PCTelescopeGetKSP_C", (PC, KSP *), (pc, subksp));
853   PetscFunctionReturn(PETSC_SUCCESS);
854 }
855 
856 /*@
857   PCTelescopeGetReductionFactor - Gets the factor by which the original number of MPI ranks  has been reduced by.
858 
859   Not Collective
860 
861   Input Parameter:
862 . pc - the preconditioner context
863 
864   Output Parameter:
865 . fact - the reduction factor
866 
867   Level: advanced
868 
869 .seealso: `PCTELESCOPE`, `PCTelescopeSetReductionFactor()`
870 @*/
871 PetscErrorCode PCTelescopeGetReductionFactor(PC pc, PetscInt *fact)
872 {
873   PetscFunctionBegin;
874   PetscUseMethod(pc, "PCTelescopeGetReductionFactor_C", (PC, PetscInt *), (pc, fact));
875   PetscFunctionReturn(PETSC_SUCCESS);
876 }
877 
878 /*@
879   PCTelescopeSetReductionFactor - Sets the factor by which the original number of MPI ranks will been reduced by.
880 
881   Not Collective
882 
883   Input Parameter:
884 . pc - the preconditioner context
885 
886   Output Parameter:
887 . fact - the reduction factor
888 
889   Level: advanced
890 
891 .seealso: `PCTELESCOPE`, `PCTelescopeGetReductionFactor()`
892 @*/
893 PetscErrorCode PCTelescopeSetReductionFactor(PC pc, PetscInt fact)
894 {
895   PetscFunctionBegin;
896   PetscTryMethod(pc, "PCTelescopeSetReductionFactor_C", (PC, PetscInt), (pc, fact));
897   PetscFunctionReturn(PETSC_SUCCESS);
898 }
899 
900 /*@
901   PCTelescopeGetIgnoreDM - Get the flag indicating if any `DM` attached to the `PC` will be used.
902 
903   Not Collective
904 
905   Input Parameter:
906 . pc - the preconditioner context
907 
908   Output Parameter:
909 . v - the flag
910 
911   Level: advanced
912 
913 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`
914 @*/
915 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc, PetscBool *v)
916 {
917   PetscFunctionBegin;
918   PetscUseMethod(pc, "PCTelescopeGetIgnoreDM_C", (PC, PetscBool *), (pc, v));
919   PetscFunctionReturn(PETSC_SUCCESS);
920 }
921 
922 /*@
923   PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
924 
925   Not Collective
926 
927   Input Parameter:
928 . pc - the preconditioner context
929 
930   Output Parameter:
931 . v - Use PETSC_TRUE to ignore any DM
932 
933   Level: advanced
934 
935 .seealso: `PCTELESCOPE`, `PCTelescopeGetIgnoreDM()`
936 @*/
937 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc, PetscBool v)
938 {
939   PetscFunctionBegin;
940   PetscTryMethod(pc, "PCTelescopeSetIgnoreDM_C", (PC, PetscBool), (pc, v));
941   PetscFunctionReturn(PETSC_SUCCESS);
942 }
943 
944 /*@
945   PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse `DM` attached to `DM` associated with the `PC` will be used.
946 
947   Not Collective
948 
949   Input Parameter:
950 . pc - the preconditioner context
951 
952   Output Parameter:
953 . v - the flag
954 
955   Level: advanced
956 
957 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`
958 @*/
959 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc, PetscBool *v)
960 {
961   PetscFunctionBegin;
962   PetscUseMethod(pc, "PCTelescopeGetUseCoarseDM_C", (PC, PetscBool *), (pc, v));
963   PetscFunctionReturn(PETSC_SUCCESS);
964 }
965 
966 /*@
967   PCTelescopeSetUseCoarseDM - Set a flag to query the `DM` attached to the `PC` if it also has a coarse `DM`
968 
969   Not Collective
970 
971   Input Parameter:
972 . pc - the preconditioner context
973 
974   Output Parameter:
975 . v - Use `PETSC_FALSE` to ignore any coarse `DM`
976 
977   Notes:
978   When you have specified to use a coarse `DM`, the communicator used to create the sub-KSP within `PCTELESCOPE`
979   will be that of the coarse `DM`. Hence the flags -pc_telescope_reduction_factor and
980   -pc_telescope_subcomm_type will no longer have any meaning.
981   It is required that the communicator associated with the parent (fine) and the coarse `DM` are of different sizes.
982   An error will occur of the size of the communicator associated with the coarse `DM`
983   is the same as that of the parent `DM`.
984   Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
985   This will be checked at the time the preconditioner is setup and an error will occur if
986   the coarse DM does not define a sub-communicator of that used by the parent DM.
987 
988   The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
989   the `DM` used (e.g. it supports `DMSHELL`, `DMPLEX`, etc).
990 
991   Support is currently only provided for the case when you are using `KSPSetComputeOperators()`
992 
993   The user is required to compose a function with the parent DM to facilitate the transfer of fields (`Vec`) between the different decompositions defined by the fine and coarse `DM`s.
994   In the user code, this is achieved via
995 .vb
996    {
997      DM dm_fine;
998      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
999    }
1000 .ve
1001   The signature of the user provided field scatter method is
1002 .vb
1003    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1004 .ve
1005   The user must provide support for both mode = `SCATTER_FORWARD` and mode = `SCATTER_REVERSE`.
1006   `SCATTER_FORWARD` implies the direction of transfer is from the parent (fine) `DM` to the coarse `DM`.
1007 
1008   Optionally, the user may also compose a function with the parent DM to facilitate the transfer
1009   of state variables between the fine and coarse `DM`s.
1010   In the context of a finite element discretization, an example state variable might be
1011   values associated with quadrature points within each element.
1012   A user provided state scatter method is composed via
1013 .vb
1014    {
1015      DM dm_fine;
1016      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1017    }
1018 .ve
1019   The signature of the user provided state scatter method is
1020 .vb
1021    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1022 .ve
1023   `SCATTER_FORWARD` implies the direction of transfer is from the fine `DM` to the coarse `DM`.
1024   The user is only required to support mode = `SCATTER_FORWARD`.
1025   No assumption is made about the data type of the state variables.
1026   These must be managed by the user and must be accessible from the `DM`.
1027 
1028   Care must be taken in defining the user context passed to `KSPSetComputeOperators()` which is to be
1029   associated with the sub-`KSP` residing within `PCTELESCOPE`.
1030   In general, `PCTELESCOPE` assumes that the context on the fine and coarse `DM` used with
1031   `KSPSetComputeOperators()` should be "similar" in type or origin.
1032   Specifically the following rules are used to infer what context on the sub-`KSP` should be.
1033 
1034   First the contexts from the `KSP` and the fine and coarse `DM`s are retrieved.
1035   Note that the special case of a `DMSHELL` context is queried.
1036 
1037 .vb
1038    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1039    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1040    DMShellGetContext(dm_fine,&dmfine_shellctx);
1041 
1042    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1043    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1044 .ve
1045 
1046   The following rules are then enforced:
1047 
1048   1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1049   `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,NULL);
1050 
1051   2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1052 
1053   check that dmcoarse_appctx is also non-NULL. If this is true, then:
1054   `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1055 
1056   3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1057 
1058   check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1059   `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1060 
1061   If neither of the above three tests passed, then `PCTELESCOPE` cannot safely determine what
1062   context should be provided to `KSPSetComputeOperators()` for use with the sub-`KSP`.
1063   In this case, an additional mechanism is provided via a composed function which will return
1064   the actual context to be used. To use this feature you must compose the "getter" function
1065   with the coarse `DM`, e.g.
1066 .vb
1067    {
1068      DM dm_coarse;
1069      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1070    }
1071 .ve
1072   The signature of the user provided method is
1073 .vb
1074    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1075 .ve
1076 
1077   Level: advanced
1078 
1079 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`
1080 @*/
1081 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc, PetscBool v)
1082 {
1083   PetscFunctionBegin;
1084   PetscTryMethod(pc, "PCTelescopeSetUseCoarseDM_C", (PC, PetscBool), (pc, v));
1085   PetscFunctionReturn(PETSC_SUCCESS);
1086 }
1087 
1088 /*@
1089   PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if `KSPComputeOperators()` will be used.
1090 
1091   Not Collective
1092 
1093   Input Parameter:
1094 . pc - the preconditioner context
1095 
1096   Output Parameter:
1097 . v - the flag
1098 
1099   Level: advanced
1100 
1101 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeSetIgnoreKSPComputeOperators()`
1102 @*/
1103 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc, PetscBool *v)
1104 {
1105   PetscFunctionBegin;
1106   PetscUseMethod(pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", (PC, PetscBool *), (pc, v));
1107   PetscFunctionReturn(PETSC_SUCCESS);
1108 }
1109 
1110 /*@
1111   PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore `KSPComputeOperators()`.
1112 
1113   Not Collective
1114 
1115   Input Parameter:
1116 . pc - the preconditioner context
1117 
1118   Output Parameter:
1119 . v - Use `PETSC_TRUE` to ignore the method (if defined) set via `KSPSetComputeOperators()` on pc
1120 
1121   Level: advanced
1122 
1123 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()`
1124 @*/
1125 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc, PetscBool v)
1126 {
1127   PetscFunctionBegin;
1128   PetscTryMethod(pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", (PC, PetscBool), (pc, v));
1129   PetscFunctionReturn(PETSC_SUCCESS);
1130 }
1131 
1132 /*@
1133   PCTelescopeGetDM - Get the re-partitioned `DM` attached to the sub-`KSP`.
1134 
1135   Not Collective
1136 
1137   Input Parameter:
1138 . pc - the preconditioner context
1139 
1140   Output Parameter:
1141 . subdm - The re-partitioned DM
1142 
1143   Level: advanced
1144 
1145 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()`
1146 @*/
1147 PetscErrorCode PCTelescopeGetDM(PC pc, DM *subdm)
1148 {
1149   PetscFunctionBegin;
1150   PetscUseMethod(pc, "PCTelescopeGetDM_C", (PC, DM *), (pc, subdm));
1151   PetscFunctionReturn(PETSC_SUCCESS);
1152 }
1153 
1154 /*@
1155   PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1156 
1157   Logically Collective
1158 
1159   Input Parameters:
1160 + pc          - the preconditioner context
1161 - subcommtype - the subcommunicator type (see `PetscSubcommType`)
1162 
1163   Level: advanced
1164 
1165 .seealso: `PetscSubcommType`, `PetscSubcomm`, `PCTELESCOPE`
1166 @*/
1167 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1168 {
1169   PetscFunctionBegin;
1170   PetscTryMethod(pc, "PCTelescopeSetSubcommType_C", (PC, PetscSubcommType), (pc, subcommtype));
1171   PetscFunctionReturn(PETSC_SUCCESS);
1172 }
1173 
1174 /*@
1175   PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1176 
1177   Not Collective
1178 
1179   Input Parameter:
1180 . pc - the preconditioner context
1181 
1182   Output Parameter:
1183 . subcommtype - the subcommunicator type (see `PetscSubcommType`)
1184 
1185   Level: advanced
1186 
1187 .seealso: `PetscSubcomm`, `PetscSubcommType`, `PCTELESCOPE`
1188 @*/
1189 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1190 {
1191   PetscFunctionBegin;
1192   PetscUseMethod(pc, "PCTelescopeGetSubcommType_C", (PC, PetscSubcommType *), (pc, subcommtype));
1193   PetscFunctionReturn(PETSC_SUCCESS);
1194 }
1195 
1196 /*MC
1197    PCTELESCOPE - Runs a `KSP` solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1198 
1199    Options Database Keys:
1200 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1201 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1202 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1203 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether `KSPSetComputeOperators()` should be used on the sub-KSP.
1204 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse `DM` should be used to define the sub-communicator.
1205 
1206    Level: advanced
1207 
1208    Notes:
1209    Assuming that the parent preconditioner `PC` is defined on a communicator c, this implementation
1210    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner `PC`.
1211    The preconditioner is deemed telescopic as it only calls `KSPSolve()` on a single
1212    sub-communicator, in contrast with `PCREDUNDANT` which calls `KSPSolve()` on N sub-communicators.
1213    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1214    Additionally, in comparison with `PCREDUNDANT`, `PCTELESCOPE` can utilize an attached `DM`.
1215 
1216    The default type of the sub `KSP` (the `KSP` defined on c') is `KSPPREONLY`.
1217 
1218    There are three setup mechanisms for `PCTELESCOPE`. Features support by each type are described below.
1219    In the following, we will refer to the operators B and B', these are the Bmat provided to the `KSP` on the
1220    communicators c and c' respectively.
1221 
1222    [1] Default setup
1223    The sub-communicator c' is created via `PetscSubcommCreate()`.
1224    Explicitly defined nullspace and near nullspace vectors will be propagated from B to B'.
1225    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1226    No support is provided for `KSPSetComputeOperators()`.
1227    Currently there is no support for the flag -pc_use_amat.
1228 
1229    [2] `DM` aware setup
1230    If a `DM` is attached to the `PC`, it is re-partitioned on the sub-communicator c'.
1231    c' is created via `PetscSubcommCreate()`.
1232    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned `DM`.
1233    Currently only support for re-partitioning a `DMDA` is provided.
1234    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1235    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1236    Support is provided for `KSPSetComputeOperators()`. The user provided function and context is propagated to the sub `KSP`.
1237    This is fragile since the user must ensure that their user context is valid for use on c'.
1238    Currently there is no support for the flag -pc_use_amat.
1239 
1240    [3] Coarse `DM` setup
1241    If a `DM` (dmfine) is attached to the `PC`, dmfine is queried for a "coarse" `DM` (call this dmcoarse) via `DMGetCoarseDM()`.
1242    `PCTELESCOPE` will interpret the coarse `DM` as being defined on a sub-communicator of c.
1243    The communicator associated with dmcoarse will define the c' to be used within `PCTELESCOPE`.
1244    `PCTELESCOPE` will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1245    The intention of this setup type is that `PCTELESCOPE` will use an existing (e.g. user defined) communicator hierarchy, say as would be
1246    available with using multi-grid on unstructured meshes.
1247    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1248    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1249    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1250    There is no general method to permute field orderings, hence only `KSPSetComputeOperators()` is supported.
1251    The user must use `PetscObjectComposeFunction()` with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1252    Propagation of the user context for `KSPSetComputeOperators()` on the sub `KSP` is attempted by querying the `DM` contexts associated with dmfine and dmcoarse. Alternatively, the user may use `PetscObjectComposeFunction()` with dmcoarse to define a method which will return the appropriate user context for `KSPSetComputeOperators()`.
1253    Currently there is no support for the flag -pc_use_amat.
1254    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling `PCTelescopeSetUseCoarseDM`(pc,`PETSC_TRUE`);
1255    Further information about the user-provided methods required by this setup type are described here `PCTelescopeSetUseCoarseDM()`.
1256 
1257    Developer Notes:
1258    During `PCSetup()`, the B operator is scattered onto c'.
1259    Within `PCApply()`, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1260    Then, `KSPSolve()` is executed on the c' communicator.
1261 
1262    The communicator used within the telescoping preconditioner is defined by a `PetscSubcomm` using the INTERLACED
1263    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub `KSP` on only the ranks within the communicator which have a color equal to zero.
1264 
1265    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1266    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1267    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1268 
1269    The telescoping preconditioner can re-partition an attached DM if it is a `DMDA` (2D or 3D -
1270    support for 1D `DMDA`s is not provided). If a `DMDA` is found, a topologically equivalent `DMDA` is created on c'
1271    and this new `DM` is attached the sub `KSP`. The design of telescope is such that it should be possible to extend support
1272    for re-partitioning other to DM's (e.g. `DMPLEX`). The user can supply a flag to ignore attached DMs.
1273    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1274 
1275    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1276 
1277    When a `DMDA` is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1278    into the ordering defined by the `DMDA` on c', (ii) extracting the local chunks via `MatCreateSubMatrices()`, (iii) fusing the
1279    locally (sequential) matrices defined on the ranks common to c and c' into B' using `MatCreateMPIMatConcatenateSeqMat()`
1280 
1281    Limitations/improvements include the following.
1282    `VecPlaceArray()` could be used within `PCApply()` to improve efficiency and reduce memory usage.
1283    A unified mechanism to query for user contexts as required by `KSPSetComputeOperators()` and `MatNullSpaceSetFunction()`.
1284 
1285    The symmetric permutation used when a `DMDA` is encountered is performed via explicitly assembling a permutation matrix P,
1286    and performing P^T.A.P. Possibly it might be more efficient to use `MatPermute()`. We opted to use P^T.A.P as it appears
1287    `VecPermute()` does not support the use case required here. By computing P, one can permute both the operator and RHS in a
1288    consistent manner.
1289 
1290    Mapping of vectors (default setup mode) is performed in the following way.
1291    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1292    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1293    We perform the scatter to the sub-communicator in the following way.
1294    [1] Given a vector x defined on communicator c
1295 
1296 .vb
1297    rank(c)  local values of x
1298    ------- ----------------------------------------
1299         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1300         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1301         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1302         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1303 .ve
1304 
1305    scatter into xtmp defined also on comm c, so that we have the following values
1306 
1307 .vb
1308    rank(c)  local values of xtmp
1309    ------- ----------------------------------------------------------------------------
1310         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1311         1   [ ]
1312         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1313         3   [ ]
1314 .ve
1315 
1316    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1317 
1318    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1319    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1320 
1321 .vb
1322    rank(c')  local values of xred
1323    -------- ----------------------------------------------------------------------------
1324          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1325          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1326 .ve
1327 
1328   Contributed by Dave May
1329 
1330   Reference:
1331   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1332 
1333 .seealso: `PCTelescopeGetKSP()`, `PCTelescopeGetDM()`, `PCTelescopeGetReductionFactor()`, `PCTelescopeSetReductionFactor()`, `PCTelescopeGetIgnoreDM()`, `PCTelescopeSetIgnoreDM()`, `PCREDUNDANT`
1334 M*/
1335 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1336 {
1337   struct _PC_Telescope *sred;
1338 
1339   PetscFunctionBegin;
1340   PetscCall(PetscNew(&sred));
1341   sred->psubcomm                   = NULL;
1342   sred->subcommtype                = PETSC_SUBCOMM_INTERLACED;
1343   sred->subcomm                    = MPI_COMM_NULL;
1344   sred->redfactor                  = 1;
1345   sred->ignore_dm                  = PETSC_FALSE;
1346   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1347   sred->use_coarse_dm              = PETSC_FALSE;
1348   pc->data                         = (void *)sred;
1349 
1350   pc->ops->apply           = PCApply_Telescope;
1351   pc->ops->applytranspose  = NULL;
1352   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1353   pc->ops->setup           = PCSetUp_Telescope;
1354   pc->ops->destroy         = PCDestroy_Telescope;
1355   pc->ops->reset           = PCReset_Telescope;
1356   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1357   pc->ops->view            = PCView_Telescope;
1358 
1359   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1360   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1361   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1362   sred->pctelescope_reset_type              = NULL;
1363 
1364   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", PCTelescopeGetKSP_Telescope));
1365   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", PCTelescopeGetSubcommType_Telescope));
1366   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", PCTelescopeSetSubcommType_Telescope));
1367   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", PCTelescopeGetReductionFactor_Telescope));
1368   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", PCTelescopeSetReductionFactor_Telescope));
1369   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", PCTelescopeGetIgnoreDM_Telescope));
1370   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", PCTelescopeSetIgnoreDM_Telescope));
1371   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", PCTelescopeGetIgnoreKSPComputeOperators_Telescope));
1372   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", PCTelescopeSetIgnoreKSPComputeOperators_Telescope));
1373   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", PCTelescopeGetDM_Telescope));
1374   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", PCTelescopeGetUseCoarseDM_Telescope));
1375   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", PCTelescopeSetUseCoarseDM_Telescope));
1376   PetscFunctionReturn(PETSC_SUCCESS);
1377 }
1378