xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision e0f629dd96229b85b8d4fbf4a5ed4f197d232daa)
1 #include <petsc/private/petscimpl.h>
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h>  /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7 
8 static PetscBool  cited      = PETSC_FALSE;
9 static const char citation[] = "@inproceedings{MaySananRuppKnepleySmith2016,\n"
10                                "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
11                                "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
12                                "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
13                                "  series    = {PASC '16},\n"
14                                "  isbn      = {978-1-4503-4126-4},\n"
15                                "  location  = {Lausanne, Switzerland},\n"
16                                "  pages     = {5:1--5:12},\n"
17                                "  articleno = {5},\n"
18                                "  numpages  = {12},\n"
19                                "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
20                                "  doi       = {10.1145/2929908.2929913},\n"
21                                "  acmid     = {2929913},\n"
22                                "  publisher = {ACM},\n"
23                                "  address   = {New York, NY, USA},\n"
24                                "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
25                                "  year      = {2016}\n"
26                                "}\n";
27 
28 /*
29  default setup mode
30 
31  [1a] scatter to (FORWARD)
32  x(comm) -> xtmp(comm)
33  [1b] local copy (to) ranks with color = 0
34  xred(subcomm) <- xtmp
35 
36  [2] solve on sub KSP to obtain yred(subcomm)
37 
38  [3a] local copy (from) ranks with color = 0
39  yred(subcomm) --> xtmp
40  [2b] scatter from (REVERSE)
41  xtmp(comm) -> y(comm)
42 */
43 
44 /*
45   Collective[comm_f]
46   Notes
47    * Using comm_f = MPI_COMM_NULL will result in an error
48    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
49    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
50 */
51 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f, MPI_Comm comm_c, PetscBool *isvalid) {
52   PetscInt     valid = 1;
53   MPI_Group    group_f, group_c;
54   PetscMPIInt  count, k, size_f = 0, size_c = 0, size_c_sum = 0;
55   PetscMPIInt *ranks_f, *ranks_c;
56 
57   PetscFunctionBegin;
58   PetscCheck(comm_f != MPI_COMM_NULL, PETSC_COMM_SELF, PETSC_ERR_SUP, "comm_f cannot be MPI_COMM_NULL");
59 
60   PetscCallMPI(MPI_Comm_group(comm_f, &group_f));
61   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_group(comm_c, &group_c));
62 
63   PetscCallMPI(MPI_Comm_size(comm_f, &size_f));
64   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_size(comm_c, &size_c));
65 
66   /* check not all comm_c's are NULL */
67   size_c_sum = size_c;
68   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &size_c_sum, 1, MPI_INT, MPI_SUM, comm_f));
69   if (size_c_sum == 0) valid = 0;
70 
71   /* check we can map at least 1 rank in comm_c to comm_f */
72   PetscCall(PetscMalloc1(size_f, &ranks_f));
73   PetscCall(PetscMalloc1(size_c, &ranks_c));
74   for (k = 0; k < size_f; k++) ranks_f[k] = MPI_UNDEFINED;
75   for (k = 0; k < size_c; k++) ranks_c[k] = k;
76 
77   /*
78    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
79    I do not want the code to terminate immediately if this occurs, rather I want to throw
80    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
81    that comm_c is not a valid sub-communicator.
82    Hence I purposefully do not call PetscCall() after MPI_Group_translate_ranks().
83   */
84   count = 0;
85   if (comm_c != MPI_COMM_NULL) {
86     (void)MPI_Group_translate_ranks(group_c, size_c, ranks_c, group_f, ranks_f);
87     for (k = 0; k < size_f; k++) {
88       if (ranks_f[k] == MPI_UNDEFINED) count++;
89     }
90   }
91   if (count == size_f) valid = 0;
92 
93   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &valid, 1, MPIU_INT, MPI_MIN, comm_f));
94   if (valid == 1) *isvalid = PETSC_TRUE;
95   else *isvalid = PETSC_FALSE;
96 
97   PetscCall(PetscFree(ranks_f));
98   PetscCall(PetscFree(ranks_c));
99   PetscCallMPI(MPI_Group_free(&group_f));
100   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Group_free(&group_c));
101   PetscFunctionReturn(0);
102 }
103 
104 DM private_PCTelescopeGetSubDM(PC_Telescope sred) {
105   DM subdm = NULL;
106 
107   if (!PCTelescope_isActiveRank(sred)) {
108     subdm = NULL;
109   } else {
110     switch (sred->sr_type) {
111     case TELESCOPE_DEFAULT: subdm = NULL; break;
112     case TELESCOPE_DMDA: subdm = ((PC_Telescope_DMDACtx *)sred->dm_ctx)->dmrepart; break;
113     case TELESCOPE_DMPLEX: subdm = NULL; break;
114     case TELESCOPE_COARSEDM:
115       if (sred->ksp) KSPGetDM(sred->ksp, &subdm);
116       break;
117     }
118   }
119   return (subdm);
120 }
121 
122 PetscErrorCode PCTelescopeSetUp_default(PC pc, PC_Telescope sred) {
123   PetscInt   m, M, bs, st, ed;
124   Vec        x, xred, yred, xtmp;
125   Mat        B;
126   MPI_Comm   comm, subcomm;
127   VecScatter scatter;
128   IS         isin;
129   VecType    vectype;
130 
131   PetscFunctionBegin;
132   PetscCall(PetscInfo(pc, "PCTelescope: setup (default)\n"));
133   comm    = PetscSubcommParent(sred->psubcomm);
134   subcomm = PetscSubcommChild(sred->psubcomm);
135 
136   PetscCall(PCGetOperators(pc, NULL, &B));
137   PetscCall(MatGetSize(B, &M, NULL));
138   PetscCall(MatGetBlockSize(B, &bs));
139   PetscCall(MatCreateVecs(B, &x, NULL));
140   PetscCall(MatGetVecType(B, &vectype));
141 
142   xred = NULL;
143   m    = 0;
144   if (PCTelescope_isActiveRank(sred)) {
145     PetscCall(VecCreate(subcomm, &xred));
146     PetscCall(VecSetSizes(xred, PETSC_DECIDE, M));
147     PetscCall(VecSetBlockSize(xred, bs));
148     PetscCall(VecSetType(xred, vectype)); /* Use the preconditioner matrix's vectype by default */
149     PetscCall(VecSetFromOptions(xred));
150     PetscCall(VecGetLocalSize(xred, &m));
151   }
152 
153   yred = NULL;
154   if (PCTelescope_isActiveRank(sred)) PetscCall(VecDuplicate(xred, &yred));
155 
156   PetscCall(VecCreate(comm, &xtmp));
157   PetscCall(VecSetSizes(xtmp, m, PETSC_DECIDE));
158   PetscCall(VecSetBlockSize(xtmp, bs));
159   PetscCall(VecSetType(xtmp, vectype));
160 
161   if (PCTelescope_isActiveRank(sred)) {
162     PetscCall(VecGetOwnershipRange(xred, &st, &ed));
163     PetscCall(ISCreateStride(comm, (ed - st), st, 1, &isin));
164   } else {
165     PetscCall(VecGetOwnershipRange(x, &st, &ed));
166     PetscCall(ISCreateStride(comm, 0, st, 1, &isin));
167   }
168   PetscCall(ISSetBlockSize(isin, bs));
169 
170   PetscCall(VecScatterCreate(x, isin, xtmp, NULL, &scatter));
171 
172   sred->isin    = isin;
173   sred->scatter = scatter;
174   sred->xred    = xred;
175   sred->yred    = yred;
176   sred->xtmp    = xtmp;
177   PetscCall(VecDestroy(&x));
178   PetscFunctionReturn(0);
179 }
180 
181 PetscErrorCode PCTelescopeMatCreate_default(PC pc, PC_Telescope sred, MatReuse reuse, Mat *A) {
182   MPI_Comm comm, subcomm;
183   Mat      Bred, B;
184   PetscInt nr, nc, bs;
185   IS       isrow, iscol;
186   Mat      Blocal, *_Blocal;
187 
188   PetscFunctionBegin;
189   PetscCall(PetscInfo(pc, "PCTelescope: updating the redundant preconditioned operator (default)\n"));
190   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
191   subcomm = PetscSubcommChild(sred->psubcomm);
192   PetscCall(PCGetOperators(pc, NULL, &B));
193   PetscCall(MatGetSize(B, &nr, &nc));
194   isrow = sred->isin;
195   PetscCall(ISCreateStride(PETSC_COMM_SELF, nc, 0, 1, &iscol));
196   PetscCall(ISSetIdentity(iscol));
197   PetscCall(MatGetBlockSizes(B, NULL, &bs));
198   PetscCall(ISSetBlockSize(iscol, bs));
199   PetscCall(MatSetOption(B, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
200   PetscCall(MatCreateSubMatrices(B, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &_Blocal));
201   Blocal = *_Blocal;
202   PetscCall(PetscFree(_Blocal));
203   Bred = NULL;
204   if (PCTelescope_isActiveRank(sred)) {
205     PetscInt mm;
206 
207     if (reuse != MAT_INITIAL_MATRIX) Bred = *A;
208 
209     PetscCall(MatGetSize(Blocal, &mm, NULL));
210     PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, Blocal, mm, reuse, &Bred));
211   }
212   *A = Bred;
213   PetscCall(ISDestroy(&iscol));
214   PetscCall(MatDestroy(&Blocal));
215   PetscFunctionReturn(0);
216 }
217 
218 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc, PC_Telescope sred, MatNullSpace nullspace, MatNullSpace *sub_nullspace) {
219   PetscBool  has_const;
220   const Vec *vecs;
221   Vec       *sub_vecs = NULL;
222   PetscInt   i, k, n = 0;
223   MPI_Comm   subcomm;
224 
225   PetscFunctionBegin;
226   subcomm = PetscSubcommChild(sred->psubcomm);
227   PetscCall(MatNullSpaceGetVecs(nullspace, &has_const, &n, &vecs));
228 
229   if (PCTelescope_isActiveRank(sred)) {
230     if (n) PetscCall(VecDuplicateVecs(sred->xred, n, &sub_vecs));
231   }
232 
233   /* copy entries */
234   for (k = 0; k < n; k++) {
235     const PetscScalar *x_array;
236     PetscScalar       *LA_sub_vec;
237     PetscInt           st, ed;
238 
239     /* pull in vector x->xtmp */
240     PetscCall(VecScatterBegin(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
241     PetscCall(VecScatterEnd(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
242     if (sub_vecs) {
243       /* copy vector entries into xred */
244       PetscCall(VecGetArrayRead(sred->xtmp, &x_array));
245       if (sub_vecs[k]) {
246         PetscCall(VecGetOwnershipRange(sub_vecs[k], &st, &ed));
247         PetscCall(VecGetArray(sub_vecs[k], &LA_sub_vec));
248         for (i = 0; i < ed - st; i++) LA_sub_vec[i] = x_array[i];
249         PetscCall(VecRestoreArray(sub_vecs[k], &LA_sub_vec));
250       }
251       PetscCall(VecRestoreArrayRead(sred->xtmp, &x_array));
252     }
253   }
254 
255   if (PCTelescope_isActiveRank(sred)) {
256     /* create new (near) nullspace for redundant object */
257     PetscCall(MatNullSpaceCreate(subcomm, has_const, n, sub_vecs, sub_nullspace));
258     PetscCall(VecDestroyVecs(n, &sub_vecs));
259     PetscCheck(!nullspace->remove, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
260     PetscCheck(!nullspace->rmctx, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
261   }
262   PetscFunctionReturn(0);
263 }
264 
265 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc, PC_Telescope sred, Mat sub_mat) {
266   Mat B;
267 
268   PetscFunctionBegin;
269   PetscCall(PCGetOperators(pc, NULL, &B));
270   /* Propagate the nullspace if it exists */
271   {
272     MatNullSpace nullspace, sub_nullspace;
273     PetscCall(MatGetNullSpace(B, &nullspace));
274     if (nullspace) {
275       PetscCall(PetscInfo(pc, "PCTelescope: generating nullspace (default)\n"));
276       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nullspace, &sub_nullspace));
277       if (PCTelescope_isActiveRank(sred)) {
278         PetscCall(MatSetNullSpace(sub_mat, sub_nullspace));
279         PetscCall(MatNullSpaceDestroy(&sub_nullspace));
280       }
281     }
282   }
283   /* Propagate the near nullspace if it exists */
284   {
285     MatNullSpace nearnullspace, sub_nearnullspace;
286     PetscCall(MatGetNearNullSpace(B, &nearnullspace));
287     if (nearnullspace) {
288       PetscCall(PetscInfo(pc, "PCTelescope: generating near nullspace (default)\n"));
289       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nearnullspace, &sub_nearnullspace));
290       if (PCTelescope_isActiveRank(sred)) {
291         PetscCall(MatSetNearNullSpace(sub_mat, sub_nearnullspace));
292         PetscCall(MatNullSpaceDestroy(&sub_nearnullspace));
293       }
294     }
295   }
296   PetscFunctionReturn(0);
297 }
298 
299 static PetscErrorCode PCView_Telescope(PC pc, PetscViewer viewer) {
300   PC_Telescope sred = (PC_Telescope)pc->data;
301   PetscBool    iascii, isstring;
302   PetscViewer  subviewer;
303 
304   PetscFunctionBegin;
305   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
306   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
307   if (iascii) {
308     {
309       MPI_Comm    comm, subcomm;
310       PetscMPIInt comm_size, subcomm_size;
311       DM          dm = NULL, subdm = NULL;
312 
313       PetscCall(PCGetDM(pc, &dm));
314       subdm = private_PCTelescopeGetSubDM(sred);
315 
316       if (sred->psubcomm) {
317         comm    = PetscSubcommParent(sred->psubcomm);
318         subcomm = PetscSubcommChild(sred->psubcomm);
319         PetscCallMPI(MPI_Comm_size(comm, &comm_size));
320         PetscCallMPI(MPI_Comm_size(subcomm, &subcomm_size));
321 
322         PetscCall(PetscViewerASCIIPushTab(viewer));
323         PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent comm size reduction factor = %" PetscInt_FMT "\n", sred->redfactor));
324         PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent_size = %d , subcomm_size = %d\n", (int)comm_size, (int)subcomm_size));
325         switch (sred->subcommtype) {
326         case PETSC_SUBCOMM_INTERLACED: PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: type = %s\n", PetscSubcommTypes[sred->subcommtype])); break;
327         case PETSC_SUBCOMM_CONTIGUOUS: PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm type = %s\n", PetscSubcommTypes[sred->subcommtype])); break;
328         default: SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "General subcomm type not supported by PCTelescope");
329         }
330         PetscCall(PetscViewerASCIIPopTab(viewer));
331       } else {
332         PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
333         subcomm = sred->subcomm;
334         if (!PCTelescope_isActiveRank(sred)) subcomm = PETSC_COMM_SELF;
335 
336         PetscCall(PetscViewerASCIIPushTab(viewer));
337         PetscCall(PetscViewerASCIIPrintf(viewer, "subcomm: using user provided sub-communicator\n"));
338         PetscCall(PetscViewerASCIIPopTab(viewer));
339       }
340 
341       PetscCall(PetscViewerGetSubViewer(viewer, subcomm, &subviewer));
342       if (PCTelescope_isActiveRank(sred)) {
343         PetscCall(PetscViewerASCIIPushTab(subviewer));
344 
345         if (dm && sred->ignore_dm) PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring DM\n"));
346         if (sred->ignore_kspcomputeoperators) PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring KSPComputeOperators\n"));
347         switch (sred->sr_type) {
348         case TELESCOPE_DEFAULT: PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: default\n")); break;
349         case TELESCOPE_DMDA:
350           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMDA auto-repartitioning\n"));
351           PetscCall(DMView_DA_Short(subdm, subviewer));
352           break;
353         case TELESCOPE_DMPLEX: PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMPLEX auto-repartitioning\n")); break;
354         case TELESCOPE_COARSEDM: PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: coarse DM\n")); break;
355         }
356 
357         if (dm) {
358           PetscObject obj = (PetscObject)dm;
359           PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object:"));
360           PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE);
361           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name); }
362           if (obj->name) { PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name); }
363           if (obj->prefix) PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix);
364           PetscCall(PetscViewerASCIIPrintf(subviewer, "\n"));
365           PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE);
366         } else {
367           PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object: NULL\n"));
368         }
369         if (subdm) {
370           PetscObject obj = (PetscObject)subdm;
371           PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object:"));
372           PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE);
373           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name); }
374           if (obj->name) { PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name); }
375           if (obj->prefix) PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix);
376           PetscCall(PetscViewerASCIIPrintf(subviewer, "\n"));
377           PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE);
378         } else {
379           PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object: NULL\n"));
380         }
381 
382         PetscCall(KSPView(sred->ksp, subviewer));
383         PetscCall(PetscViewerASCIIPopTab(subviewer));
384       }
385       PetscCall(PetscViewerRestoreSubViewer(viewer, subcomm, &subviewer));
386     }
387   }
388   PetscFunctionReturn(0);
389 }
390 
391 static PetscErrorCode PCSetUp_Telescope(PC pc) {
392   PC_Telescope    sred = (PC_Telescope)pc->data;
393   MPI_Comm        comm, subcomm = 0;
394   PCTelescopeType sr_type;
395 
396   PetscFunctionBegin;
397   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
398 
399   /* Determine type of setup/update */
400   if (!pc->setupcalled) {
401     PetscBool has_dm, same;
402     DM        dm;
403 
404     sr_type = TELESCOPE_DEFAULT;
405     has_dm  = PETSC_FALSE;
406     PetscCall(PCGetDM(pc, &dm));
407     if (dm) has_dm = PETSC_TRUE;
408     if (has_dm) {
409       /* check for dmda */
410       PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMDA, &same));
411       if (same) {
412         PetscCall(PetscInfo(pc, "PCTelescope: found DMDA\n"));
413         sr_type = TELESCOPE_DMDA;
414       }
415       /* check for dmplex */
416       PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &same));
417       if (same) {
418         PetscCall(PetscInfo(pc, "PCTelescope: found DMPLEX\n"));
419         sr_type = TELESCOPE_DMPLEX;
420       }
421 
422       if (sred->use_coarse_dm) {
423         PetscCall(PetscInfo(pc, "PCTelescope: using coarse DM\n"));
424         sr_type = TELESCOPE_COARSEDM;
425       }
426 
427       if (sred->ignore_dm) {
428         PetscCall(PetscInfo(pc, "PCTelescope: ignoring DM\n"));
429         sr_type = TELESCOPE_DEFAULT;
430       }
431     }
432     sred->sr_type = sr_type;
433   } else {
434     sr_type = sred->sr_type;
435   }
436 
437   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
438   switch (sr_type) {
439   case TELESCOPE_DEFAULT:
440     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
441     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
442     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
443     sred->pctelescope_reset_type              = NULL;
444     break;
445   case TELESCOPE_DMDA:
446     pc->ops->apply                            = PCApply_Telescope_dmda;
447     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
448     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
449     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
450     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
451     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
452     break;
453   case TELESCOPE_DMPLEX: SETERRQ(comm, PETSC_ERR_SUP, "Support for DMPLEX is currently not available");
454   case TELESCOPE_COARSEDM:
455     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
456     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
457     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
458     sred->pctelescope_matcreate_type          = NULL;
459     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
460     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
461     break;
462   default: SETERRQ(comm, PETSC_ERR_SUP, "Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
463   }
464 
465   /* subcomm definition */
466   if (!pc->setupcalled) {
467     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
468       if (!sred->psubcomm) {
469         PetscCall(PetscSubcommCreate(comm, &sred->psubcomm));
470         PetscCall(PetscSubcommSetNumber(sred->psubcomm, sred->redfactor));
471         PetscCall(PetscSubcommSetType(sred->psubcomm, sred->subcommtype));
472         PetscCall(PetscLogObjectMemory((PetscObject)pc, sizeof(PetscSubcomm)));
473         sred->subcomm = PetscSubcommChild(sred->psubcomm);
474       }
475     } else { /* query PC for DM, check communicators */
476       DM          dm, dm_coarse_partition          = NULL;
477       MPI_Comm    comm_fine, comm_coarse_partition = MPI_COMM_NULL;
478       PetscMPIInt csize_fine = 0, csize_coarse_partition = 0, cs[2], csg[2], cnt = 0;
479       PetscBool   isvalidsubcomm;
480 
481       PetscCall(PCGetDM(pc, &dm));
482       comm_fine = PetscObjectComm((PetscObject)dm);
483       PetscCall(DMGetCoarseDM(dm, &dm_coarse_partition));
484       if (dm_coarse_partition) cnt = 1;
485       PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &cnt, 1, MPI_INT, MPI_SUM, comm_fine));
486       PetscCheck(cnt != 0, comm_fine, PETSC_ERR_SUP, "Zero instances of a coarse DM were found");
487 
488       PetscCallMPI(MPI_Comm_size(comm_fine, &csize_fine));
489       if (dm_coarse_partition) {
490         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
491         PetscCallMPI(MPI_Comm_size(comm_coarse_partition, &csize_coarse_partition));
492       }
493 
494       cs[0] = csize_fine;
495       cs[1] = csize_coarse_partition;
496       PetscCallMPI(MPI_Allreduce(cs, csg, 2, MPI_INT, MPI_MAX, comm_fine));
497       PetscCheck(csg[0] != csg[1], comm_fine, PETSC_ERR_SUP, "Coarse DM uses the same size communicator as the parent DM attached to the PC");
498 
499       PetscCall(PCTelescopeTestValidSubcomm(comm_fine, comm_coarse_partition, &isvalidsubcomm));
500       PetscCheck(isvalidsubcomm, comm_fine, PETSC_ERR_SUP, "Coarse DM communicator is not a sub-communicator of parentDM->comm");
501       sred->subcomm = comm_coarse_partition;
502     }
503   }
504   subcomm = sred->subcomm;
505 
506   /* internal KSP */
507   if (!pc->setupcalled) {
508     const char *prefix;
509 
510     if (PCTelescope_isActiveRank(sred)) {
511       PetscCall(KSPCreate(subcomm, &sred->ksp));
512       PetscCall(KSPSetErrorIfNotConverged(sred->ksp, pc->erroriffailure));
513       PetscCall(PetscObjectIncrementTabLevel((PetscObject)sred->ksp, (PetscObject)pc, 1));
514       PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)sred->ksp));
515       PetscCall(KSPSetType(sred->ksp, KSPPREONLY));
516       PetscCall(PCGetOptionsPrefix(pc, &prefix));
517       PetscCall(KSPSetOptionsPrefix(sred->ksp, prefix));
518       PetscCall(KSPAppendOptionsPrefix(sred->ksp, "telescope_"));
519     }
520   }
521 
522   /* setup */
523   if (!pc->setupcalled && sred->pctelescope_setup_type) PetscCall(sred->pctelescope_setup_type(pc, sred));
524   /* update */
525   if (!pc->setupcalled) {
526     if (sred->pctelescope_matcreate_type) PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_INITIAL_MATRIX, &sred->Bred));
527     if (sred->pctelescope_matnullspacecreate_type) PetscCall(sred->pctelescope_matnullspacecreate_type(pc, sred, sred->Bred));
528   } else {
529     if (sred->pctelescope_matcreate_type) PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_REUSE_MATRIX, &sred->Bred));
530   }
531 
532   /* common - no construction */
533   if (PCTelescope_isActiveRank(sred)) {
534     PetscCall(KSPSetOperators(sred->ksp, sred->Bred, sred->Bred));
535     if (pc->setfromoptionscalled && !pc->setupcalled) PetscCall(KSPSetFromOptions(sred->ksp));
536   }
537   PetscFunctionReturn(0);
538 }
539 
540 static PetscErrorCode PCApply_Telescope(PC pc, Vec x, Vec y) {
541   PC_Telescope       sred = (PC_Telescope)pc->data;
542   Vec                xtmp, xred, yred;
543   PetscInt           i, st, ed;
544   VecScatter         scatter;
545   PetscScalar       *array;
546   const PetscScalar *x_array;
547 
548   PetscFunctionBegin;
549   PetscCall(PetscCitationsRegister(citation, &cited));
550 
551   xtmp    = sred->xtmp;
552   scatter = sred->scatter;
553   xred    = sred->xred;
554   yred    = sred->yred;
555 
556   /* pull in vector x->xtmp */
557   PetscCall(VecScatterBegin(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD));
558   PetscCall(VecScatterEnd(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD));
559 
560   /* copy vector entries into xred */
561   PetscCall(VecGetArrayRead(xtmp, &x_array));
562   if (xred) {
563     PetscScalar *LA_xred;
564     PetscCall(VecGetOwnershipRange(xred, &st, &ed));
565     PetscCall(VecGetArray(xred, &LA_xred));
566     for (i = 0; i < ed - st; i++) LA_xred[i] = x_array[i];
567     PetscCall(VecRestoreArray(xred, &LA_xred));
568   }
569   PetscCall(VecRestoreArrayRead(xtmp, &x_array));
570   /* solve */
571   if (PCTelescope_isActiveRank(sred)) {
572     PetscCall(KSPSolve(sred->ksp, xred, yred));
573     PetscCall(KSPCheckSolve(sred->ksp, pc, yred));
574   }
575   /* return vector */
576   PetscCall(VecGetArray(xtmp, &array));
577   if (yred) {
578     const PetscScalar *LA_yred;
579     PetscCall(VecGetOwnershipRange(yred, &st, &ed));
580     PetscCall(VecGetArrayRead(yred, &LA_yred));
581     for (i = 0; i < ed - st; i++) array[i] = LA_yred[i];
582     PetscCall(VecRestoreArrayRead(yred, &LA_yred));
583   }
584   PetscCall(VecRestoreArray(xtmp, &array));
585   PetscCall(VecScatterBegin(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE));
586   PetscCall(VecScatterEnd(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE));
587   PetscFunctionReturn(0);
588 }
589 
590 static PetscErrorCode PCApplyRichardson_Telescope(PC pc, Vec x, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason) {
591   PC_Telescope       sred = (PC_Telescope)pc->data;
592   Vec                xtmp, yred;
593   PetscInt           i, st, ed;
594   VecScatter         scatter;
595   const PetscScalar *x_array;
596   PetscBool          default_init_guess_value;
597 
598   PetscFunctionBegin;
599   xtmp    = sred->xtmp;
600   scatter = sred->scatter;
601   yred    = sred->yred;
602 
603   PetscCheck(its <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCApplyRichardson_Telescope only supports max_it = 1");
604   *reason = (PCRichardsonConvergedReason)0;
605 
606   if (!zeroguess) {
607     PetscCall(PetscInfo(pc, "PCTelescope: Scattering y for non-zero initial guess\n"));
608     /* pull in vector y->xtmp */
609     PetscCall(VecScatterBegin(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD));
610     PetscCall(VecScatterEnd(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD));
611 
612     /* copy vector entries into xred */
613     PetscCall(VecGetArrayRead(xtmp, &x_array));
614     if (yred) {
615       PetscScalar *LA_yred;
616       PetscCall(VecGetOwnershipRange(yred, &st, &ed));
617       PetscCall(VecGetArray(yred, &LA_yred));
618       for (i = 0; i < ed - st; i++) LA_yred[i] = x_array[i];
619       PetscCall(VecRestoreArray(yred, &LA_yred));
620     }
621     PetscCall(VecRestoreArrayRead(xtmp, &x_array));
622   }
623 
624   if (PCTelescope_isActiveRank(sred)) {
625     PetscCall(KSPGetInitialGuessNonzero(sred->ksp, &default_init_guess_value));
626     if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, PETSC_TRUE));
627   }
628 
629   PetscCall(PCApply_Telescope(pc, x, y));
630 
631   if (PCTelescope_isActiveRank(sred)) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, default_init_guess_value));
632 
633   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
634   *outits = 1;
635   PetscFunctionReturn(0);
636 }
637 
638 static PetscErrorCode PCReset_Telescope(PC pc) {
639   PC_Telescope sred = (PC_Telescope)pc->data;
640 
641   PetscFunctionBegin;
642   PetscCall(ISDestroy(&sred->isin));
643   PetscCall(VecScatterDestroy(&sred->scatter));
644   PetscCall(VecDestroy(&sred->xred));
645   PetscCall(VecDestroy(&sred->yred));
646   PetscCall(VecDestroy(&sred->xtmp));
647   PetscCall(MatDestroy(&sred->Bred));
648   PetscCall(KSPReset(sred->ksp));
649   if (sred->pctelescope_reset_type) PetscCall(sred->pctelescope_reset_type(pc));
650   PetscFunctionReturn(0);
651 }
652 
653 static PetscErrorCode PCDestroy_Telescope(PC pc) {
654   PC_Telescope sred = (PC_Telescope)pc->data;
655 
656   PetscFunctionBegin;
657   PetscCall(PCReset_Telescope(pc));
658   PetscCall(KSPDestroy(&sred->ksp));
659   PetscCall(PetscSubcommDestroy(&sred->psubcomm));
660   PetscCall(PetscFree(sred->dm_ctx));
661   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", NULL));
662   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", NULL));
663   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", NULL));
664   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", NULL));
665   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", NULL));
666   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", NULL));
667   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", NULL));
668   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", NULL));
669   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", NULL));
670   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", NULL));
671   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", NULL));
672   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", NULL));
673   PetscCall(PetscFree(pc->data));
674   PetscFunctionReturn(0);
675 }
676 
677 static PetscErrorCode PCSetFromOptions_Telescope(PC pc, PetscOptionItems *PetscOptionsObject) {
678   PC_Telescope     sred = (PC_Telescope)pc->data;
679   MPI_Comm         comm;
680   PetscMPIInt      size;
681   PetscBool        flg;
682   PetscSubcommType subcommtype;
683 
684   PetscFunctionBegin;
685   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
686   PetscCallMPI(MPI_Comm_size(comm, &size));
687   PetscOptionsHeadBegin(PetscOptionsObject, "Telescope options");
688   PetscCall(PetscOptionsEnum("-pc_telescope_subcomm_type", "Subcomm type (interlaced or contiguous)", "PCTelescopeSetSubcommType", PetscSubcommTypes, (PetscEnum)sred->subcommtype, (PetscEnum *)&subcommtype, &flg));
689   if (flg) PetscCall(PCTelescopeSetSubcommType(pc, subcommtype));
690   PetscCall(PetscOptionsInt("-pc_telescope_reduction_factor", "Factor to reduce comm size by", "PCTelescopeSetReductionFactor", sred->redfactor, &sred->redfactor, NULL));
691   PetscCheck(sred->redfactor <= size, comm, PETSC_ERR_ARG_WRONG, "-pc_telescope_reduction_factor <= comm size");
692   PetscCall(PetscOptionsBool("-pc_telescope_ignore_dm", "Ignore any DM attached to the PC", "PCTelescopeSetIgnoreDM", sred->ignore_dm, &sred->ignore_dm, NULL));
693   PetscCall(PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators", "Ignore method used to compute A", "PCTelescopeSetIgnoreKSPComputeOperators", sred->ignore_kspcomputeoperators, &sred->ignore_kspcomputeoperators, NULL));
694   PetscCall(PetscOptionsBool("-pc_telescope_use_coarse_dm", "Define sub-communicator from the coarse DM", "PCTelescopeSetUseCoarseDM", sred->use_coarse_dm, &sred->use_coarse_dm, NULL));
695   PetscOptionsHeadEnd();
696   PetscFunctionReturn(0);
697 }
698 
699 /* PC simplementation specific API's */
700 
701 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc, KSP *ksp) {
702   PC_Telescope red = (PC_Telescope)pc->data;
703   PetscFunctionBegin;
704   if (ksp) *ksp = red->ksp;
705   PetscFunctionReturn(0);
706 }
707 
708 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc, PetscSubcommType *subcommtype) {
709   PC_Telescope red = (PC_Telescope)pc->data;
710   PetscFunctionBegin;
711   if (subcommtype) *subcommtype = red->subcommtype;
712   PetscFunctionReturn(0);
713 }
714 
715 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc, PetscSubcommType subcommtype) {
716   PC_Telescope red = (PC_Telescope)pc->data;
717 
718   PetscFunctionBegin;
719   PetscCheck(!pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "You cannot change the subcommunicator type for PCTelescope after it has been set up.");
720   red->subcommtype = subcommtype;
721   PetscFunctionReturn(0);
722 }
723 
724 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc, PetscInt *fact) {
725   PC_Telescope red = (PC_Telescope)pc->data;
726   PetscFunctionBegin;
727   if (fact) *fact = red->redfactor;
728   PetscFunctionReturn(0);
729 }
730 
731 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc, PetscInt fact) {
732   PC_Telescope red = (PC_Telescope)pc->data;
733   PetscMPIInt  size;
734 
735   PetscFunctionBegin;
736   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
737   PetscCheck(fact > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be positive", fact);
738   PetscCheck(fact <= size, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be <= comm.size", fact);
739   red->redfactor = fact;
740   PetscFunctionReturn(0);
741 }
742 
743 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc, PetscBool *v) {
744   PC_Telescope red = (PC_Telescope)pc->data;
745   PetscFunctionBegin;
746   if (v) *v = red->ignore_dm;
747   PetscFunctionReturn(0);
748 }
749 
750 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc, PetscBool v) {
751   PC_Telescope red = (PC_Telescope)pc->data;
752   PetscFunctionBegin;
753   red->ignore_dm = v;
754   PetscFunctionReturn(0);
755 }
756 
757 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc, PetscBool *v) {
758   PC_Telescope red = (PC_Telescope)pc->data;
759   PetscFunctionBegin;
760   if (v) *v = red->use_coarse_dm;
761   PetscFunctionReturn(0);
762 }
763 
764 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc, PetscBool v) {
765   PC_Telescope red = (PC_Telescope)pc->data;
766   PetscFunctionBegin;
767   red->use_coarse_dm = v;
768   PetscFunctionReturn(0);
769 }
770 
771 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool *v) {
772   PC_Telescope red = (PC_Telescope)pc->data;
773   PetscFunctionBegin;
774   if (v) *v = red->ignore_kspcomputeoperators;
775   PetscFunctionReturn(0);
776 }
777 
778 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool v) {
779   PC_Telescope red = (PC_Telescope)pc->data;
780   PetscFunctionBegin;
781   red->ignore_kspcomputeoperators = v;
782   PetscFunctionReturn(0);
783 }
784 
785 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc, DM *dm) {
786   PC_Telescope red = (PC_Telescope)pc->data;
787   PetscFunctionBegin;
788   *dm = private_PCTelescopeGetSubDM(red);
789   PetscFunctionReturn(0);
790 }
791 
792 /*@
793  PCTelescopeGetKSP - Gets the `KSP` created by the telescoping `PC`.
794 
795  Not Collective
796 
797  Input Parameter:
798 .  pc - the preconditioner context
799 
800  Output Parameter:
801 .  subksp - the `KSP` defined the smaller set of processes
802 
803  Level: advanced
804 
805 .seealso: `PCTELESCOPE`
806 @*/
807 PetscErrorCode PCTelescopeGetKSP(PC pc, KSP *subksp) {
808   PetscFunctionBegin;
809   PetscUseMethod(pc, "PCTelescopeGetKSP_C", (PC, KSP *), (pc, subksp));
810   PetscFunctionReturn(0);
811 }
812 
813 /*@
814  PCTelescopeGetReductionFactor - Gets the factor by which the original number of MPI ranks  has been reduced by.
815 
816  Not Collective
817 
818  Input Parameter:
819 .  pc - the preconditioner context
820 
821  Output Parameter:
822 .  fact - the reduction factor
823 
824  Level: advanced
825 
826 .seealso: `PCTELESCOPE`, `PCTelescopeSetReductionFactor()`
827 @*/
828 PetscErrorCode PCTelescopeGetReductionFactor(PC pc, PetscInt *fact) {
829   PetscFunctionBegin;
830   PetscUseMethod(pc, "PCTelescopeGetReductionFactor_C", (PC, PetscInt *), (pc, fact));
831   PetscFunctionReturn(0);
832 }
833 
834 /*@
835  PCTelescopeSetReductionFactor - Sets the factor by which the original number of MPI ranks will been reduced by.
836 
837  Not Collective
838 
839  Input Parameter:
840 .  pc - the preconditioner context
841 
842  Output Parameter:
843 .  fact - the reduction factor
844 
845  Level: advanced
846 
847 .seealso: `PCTELESCOPE`, `PCTelescopeGetReductionFactor()`
848 @*/
849 PetscErrorCode PCTelescopeSetReductionFactor(PC pc, PetscInt fact) {
850   PetscFunctionBegin;
851   PetscTryMethod(pc, "PCTelescopeSetReductionFactor_C", (PC, PetscInt), (pc, fact));
852   PetscFunctionReturn(0);
853 }
854 
855 /*@
856  PCTelescopeGetIgnoreDM - Get the flag indicating if any `DM` attached to the `PC` will be used.
857 
858  Not Collective
859 
860  Input Parameter:
861 .  pc - the preconditioner context
862 
863  Output Parameter:
864 .  v - the flag
865 
866  Level: advanced
867 
868 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`
869 @*/
870 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc, PetscBool *v) {
871   PetscFunctionBegin;
872   PetscUseMethod(pc, "PCTelescopeGetIgnoreDM_C", (PC, PetscBool *), (pc, v));
873   PetscFunctionReturn(0);
874 }
875 
876 /*@
877  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
878 
879  Not Collective
880 
881  Input Parameter:
882 .  pc - the preconditioner context
883 
884  Output Parameter:
885 .  v - Use PETSC_TRUE to ignore any DM
886 
887  Level: advanced
888 
889 .seealso: `PCTELESCOPE`, `PCTelescopeGetIgnoreDM()`
890 @*/
891 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc, PetscBool v) {
892   PetscFunctionBegin;
893   PetscTryMethod(pc, "PCTelescopeSetIgnoreDM_C", (PC, PetscBool), (pc, v));
894   PetscFunctionReturn(0);
895 }
896 
897 /*@
898  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse `DM` attached to `DM` associated with the `PC` will be used.
899 
900  Not Collective
901 
902  Input Parameter:
903 .  pc - the preconditioner context
904 
905  Output Parameter:
906 .  v - the flag
907 
908  Level: advanced
909 
910 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`
911 @*/
912 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc, PetscBool *v) {
913   PetscFunctionBegin;
914   PetscUseMethod(pc, "PCTelescopeGetUseCoarseDM_C", (PC, PetscBool *), (pc, v));
915   PetscFunctionReturn(0);
916 }
917 
918 /*@
919  PCTelescopeSetUseCoarseDM - Set a flag to query the `DM` attached to the `PC` if it also has a coarse `DM`
920 
921  Not Collective
922 
923  Input Parameter:
924 .  pc - the preconditioner context
925 
926  Output Parameter:
927 .  v - Use `PETSC_FALSE` to ignore any coarse `DM`
928 
929  Notes:
930  When you have specified to use a coarse `DM`, the communicator used to create the sub-KSP within `PCTELESCOPE`
931  will be that of the coarse `DM`. Hence the flags -pc_telescope_reduction_factor and
932  -pc_telescope_subcomm_type will no longer have any meaning.
933  It is required that the communicator associated with the parent (fine) and the coarse `DM` are of different sizes.
934  An error will occur of the size of the communicator associated with the coarse `DM`
935  is the same as that of the parent `DM`.
936  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
937  This will be checked at the time the preconditioner is setup and an error will occur if
938  the coarse DM does not define a sub-communicator of that used by the parent DM.
939 
940  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
941  the `DM` used (e.g. it supports `DMSHELL`, `DMPLEX`, etc).
942 
943  Support is currently only provided for the case when you are using `KSPSetComputeOperators()`
944 
945  The user is required to compose a function with the parent DM to facilitate the transfer of fields (`Vec`) between the different decompositions defined by the fine and coarse `DM`s.
946  In the user code, this is achieved via
947 .vb
948    {
949      DM dm_fine;
950      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
951    }
952 .ve
953  The signature of the user provided field scatter method is
954 .vb
955    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
956 .ve
957  The user must provide support for both mode = `SCATTER_FORWARD` and mode = `SCATTER_REVERSE`.
958  `SCATTER_FORWARD` implies the direction of transfer is from the parent (fine) `DM` to the coarse `DM`.
959 
960  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
961  of state variables between the fine and coarse `DM`s.
962  In the context of a finite element discretization, an example state variable might be
963  values associated with quadrature points within each element.
964  A user provided state scatter method is composed via
965 .vb
966    {
967      DM dm_fine;
968      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
969    }
970 .ve
971  The signature of the user provided state scatter method is
972 .vb
973    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
974 .ve
975  `SCATTER_FORWARD` implies the direction of transfer is from the fine `DM` to the coarse `DM`.
976  The user is only required to support mode = `SCATTER_FORWARD`.
977  No assumption is made about the data type of the state variables.
978  These must be managed by the user and must be accessible from the `DM`.
979 
980  Care must be taken in defining the user context passed to `KSPSetComputeOperators()` which is to be
981  associated with the sub-`KSP` residing within `PCTELESCOPE`.
982  In general, `PCTELESCOPE` assumes that the context on the fine and coarse `DM` used with
983  `KSPSetComputeOperators()` should be "similar" in type or origin.
984  Specifically the following rules are used to infer what context on the sub-`KSP` should be.
985 
986  First the contexts from the `KSP` and the fine and coarse `DM`s are retrieved.
987  Note that the special case of a `DMSHELL` context is queried.
988 
989 .vb
990    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
991    DMGetApplicationContext(dm_fine,&dmfine_appctx);
992    DMShellGetContext(dm_fine,&dmfine_shellctx);
993 
994    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
995    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
996 .ve
997 
998  The following rules are then enforced:
999 
1000  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1001  `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,NULL);
1002 
1003  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1004  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1005  `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1006 
1007  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1008  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1009  `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1010 
1011  If neither of the above three tests passed, then `PCTELESCOPE` cannot safely determine what
1012  context should be provided to `KSPSetComputeOperators()` for use with the sub-`KSP`.
1013  In this case, an additional mechanism is provided via a composed function which will return
1014  the actual context to be used. To use this feature you must compose the "getter" function
1015  with the coarse `DM`, e.g.
1016 .vb
1017    {
1018      DM dm_coarse;
1019      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1020    }
1021 .ve
1022  The signature of the user provided method is
1023 .vb
1024    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1025 .ve
1026 
1027  Level: advanced
1028 
1029 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`
1030 @*/
1031 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc, PetscBool v) {
1032   PetscFunctionBegin;
1033   PetscTryMethod(pc, "PCTelescopeSetUseCoarseDM_C", (PC, PetscBool), (pc, v));
1034   PetscFunctionReturn(0);
1035 }
1036 
1037 /*@
1038  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if `KSPComputeOperators()` will be used.
1039 
1040  Not Collective
1041 
1042  Input Parameter:
1043 .  pc - the preconditioner context
1044 
1045  Output Parameter:
1046 .  v - the flag
1047 
1048  Level: advanced
1049 
1050 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeSetIgnoreKSPComputeOperators()`
1051 @*/
1052 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc, PetscBool *v) {
1053   PetscFunctionBegin;
1054   PetscUseMethod(pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", (PC, PetscBool *), (pc, v));
1055   PetscFunctionReturn(0);
1056 }
1057 
1058 /*@
1059  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore `KSPComputeOperators()`.
1060 
1061  Not Collective
1062 
1063  Input Parameter:
1064 .  pc - the preconditioner context
1065 
1066  Output Parameter:
1067 .  v - Use `PETSC_TRUE` to ignore the method (if defined) set via `KSPSetComputeOperators()` on pc
1068 
1069  Level: advanced
1070 
1071 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()`
1072 @*/
1073 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc, PetscBool v) {
1074   PetscFunctionBegin;
1075   PetscTryMethod(pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", (PC, PetscBool), (pc, v));
1076   PetscFunctionReturn(0);
1077 }
1078 
1079 /*@
1080  PCTelescopeGetDM - Get the re-partitioned `DM` attached to the sub-`KSP`.
1081 
1082  Not Collective
1083 
1084  Input Parameter:
1085 .  pc - the preconditioner context
1086 
1087  Output Parameter:
1088 .  subdm - The re-partitioned DM
1089 
1090  Level: advanced
1091 
1092 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()`
1093 @*/
1094 PetscErrorCode PCTelescopeGetDM(PC pc, DM *subdm) {
1095   PetscFunctionBegin;
1096   PetscUseMethod(pc, "PCTelescopeGetDM_C", (PC, DM *), (pc, subdm));
1097   PetscFunctionReturn(0);
1098 }
1099 
1100 /*@
1101  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1102 
1103  Logically Collective
1104 
1105  Input Parameters:
1106 +  pc - the preconditioner context
1107 -  subcommtype - the subcommunicator type (see `PetscSubcommType`)
1108 
1109  Level: advanced
1110 
1111 .seealso: `PetscSubcommType`, `PetscSubcomm`, `PCTELESCOPE`
1112 @*/
1113 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype) {
1114   PetscFunctionBegin;
1115   PetscTryMethod(pc, "PCTelescopeSetSubcommType_C", (PC, PetscSubcommType), (pc, subcommtype));
1116   PetscFunctionReturn(0);
1117 }
1118 
1119 /*@
1120  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1121 
1122  Not Collective
1123 
1124  Input Parameter:
1125 .  pc - the preconditioner context
1126 
1127  Output Parameter:
1128 .  subcommtype - the subcommunicator type (see `PetscSubcommType`)
1129 
1130  Level: advanced
1131 
1132 .seealso: `PetscSubcomm`, `PetscSubcommType`, `PCTELESCOPE`
1133 @*/
1134 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype) {
1135   PetscFunctionBegin;
1136   PetscUseMethod(pc, "PCTelescopeGetSubcommType_C", (PC, PetscSubcommType *), (pc, subcommtype));
1137   PetscFunctionReturn(0);
1138 }
1139 
1140 /*MC
1141    PCTELESCOPE - Runs a `KSP` solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1142 
1143    Options Database Keys:
1144 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1145 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1146 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1147 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether `KSPSetComputeOperators()` should be used on the sub-KSP.
1148 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse `DM` should be used to define the sub-communicator.
1149 
1150    Level: advanced
1151 
1152    Notes:
1153    Assuming that the parent preconditioner `PC` is defined on a communicator c, this implementation
1154    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner `PC`.
1155    The preconditioner is deemed telescopic as it only calls `KSPSolve()` on a single
1156    sub-communicator, in contrast with `PCREDUNDANT` which calls `KSPSolve()` on N sub-communicators.
1157    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1158    Additionally, in comparison with `PCREDUNDANT`, `PCTELESCOPE` can utilize an attached `DM`.
1159 
1160    The default type of the sub `KSP` (the `KSP` defined on c') is `KSPPREONLY`.
1161 
1162    There are three setup mechanisms for `PCTELESCOPE`. Features support by each type are described below.
1163    In the following, we will refer to the operators B and B', these are the Bmat provided to the `KSP` on the
1164    communicators c and c' respectively.
1165 
1166    [1] Default setup
1167    The sub-communicator c' is created via `PetscSubcommCreate()`.
1168    Explicitly defined nullspace and near nullspace vectors will be propagated from B to B'.
1169    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1170    No support is provided for `KSPSetComputeOperators()`.
1171    Currently there is no support for the flag -pc_use_amat.
1172 
1173    [2] `DM` aware setup
1174    If a `DM` is attached to the `PC`, it is re-partitioned on the sub-communicator c'.
1175    c' is created via `PetscSubcommCreate()`.
1176    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned `DM`.
1177    Currently only support for re-partitioning a `DMDA` is provided.
1178    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1179    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1180    Support is provided for `KSPSetComputeOperators()`. The user provided function and context is propagated to the sub `KSP`.
1181    This is fragile since the user must ensure that their user context is valid for use on c'.
1182    Currently there is no support for the flag -pc_use_amat.
1183 
1184    [3] Coarse `DM` setup
1185    If a `DM` (dmfine) is attached to the `PC`, dmfine is queried for a "coarse" `DM` (call this dmcoarse) via `DMGetCoarseDM()`.
1186    `PCTELESCOPE` will interpret the coarse `DM` as being defined on a sub-communicator of c.
1187    The communicator associated with dmcoarse will define the c' to be used within `PCTELESCOPE`.
1188    `PCTELESCOPE` will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1189    The intention of this setup type is that` PCTELESCOPE` will use an existing (e.g. user defined) communicator hierarchy, say as would be
1190    available with using multi-grid on unstructured meshes.
1191    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1192    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1193    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1194    There is no general method to permute field orderings, hence only `KSPSetComputeOperators()` is supported.
1195    The user must use `PetscObjectComposeFunction()` with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1196    Propagation of the user context for `KSPSetComputeOperators()` on the sub `KSP` is attempted by querying the `DM` contexts associated with dmfine and dmcoarse. Alternatively, the user may use `PetscObjectComposeFunction()` with dmcoarse to define a method which will return the appropriate user context for `KSPSetComputeOperators()`.
1197    Currently there is no support for the flag -pc_use_amat.
1198    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling `PCTelescopeSetUseCoarseDM`(pc,`PETSC_TRUE`);
1199    Further information about the user-provided methods required by this setup type are described here `PCTelescopeSetUseCoarseDM()`.
1200 
1201    Developer Notes:
1202    During `PCSetup()`, the B operator is scattered onto c'.
1203    Within `PCApply()`, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1204    Then, `KSPSolve()` is executed on the c' communicator.
1205 
1206    The communicator used within the telescoping preconditioner is defined by a `PetscSubcomm` using the INTERLACED
1207    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub `KSP` on only the ranks within the communicator which have a color equal to zero.
1208 
1209    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1210    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1211    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1212 
1213    The telescoping preconditioner can re-partition an attached DM if it is a `DMDA` (2D or 3D -
1214    support for 1D `DMDA`s is not provided). If a `DMDA` is found, a topologically equivalent `DMDA` is created on c'
1215    and this new `DM` is attached the sub `KSP`. The design of telescope is such that it should be possible to extend support
1216    for re-partitioning other to DM's (e.g. `DMPLEX`). The user can supply a flag to ignore attached DMs.
1217    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1218 
1219    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1220 
1221    When a `DMDA` is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1222    into the ordering defined by the `DMDA` on c', (ii) extracting the local chunks via `MatCreateSubMatrices()`, (iii) fusing the
1223    locally (sequential) matrices defined on the ranks common to c and c' into B' using `MatCreateMPIMatConcatenateSeqMat()`
1224 
1225    Limitations/improvements include the following.
1226    `VecPlaceArray()` could be used within `PCApply()` to improve efficiency and reduce memory usage.
1227    A unified mechanism to query for user contexts as required by `KSPSetComputeOperators()` and `MatNullSpaceSetFunction()`.
1228 
1229    The symmetric permutation used when a `DMDA` is encountered is performed via explicitly assembling a permutation matrix P,
1230    and performing P^T.A.P. Possibly it might be more efficient to use `MatPermute()`. We opted to use P^T.A.P as it appears
1231    `VecPermute()` does not support the use case required here. By computing P, one can permute both the operator and RHS in a
1232    consistent manner.
1233 
1234    Mapping of vectors (default setup mode) is performed in the following way.
1235    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1236    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1237    We perform the scatter to the sub-communicator in the following way.
1238    [1] Given a vector x defined on communicator c
1239 
1240 .vb
1241    rank(c)  local values of x
1242    ------- ----------------------------------------
1243         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1244         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1245         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1246         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1247 .ve
1248 
1249    scatter into xtmp defined also on comm c, so that we have the following values
1250 
1251 .vb
1252    rank(c)  local values of xtmp
1253    ------- ----------------------------------------------------------------------------
1254         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1255         1   [ ]
1256         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1257         3   [ ]
1258 .ve
1259 
1260    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1261 
1262    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1263    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1264 
1265 .vb
1266    rank(c')  local values of xred
1267    -------- ----------------------------------------------------------------------------
1268          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1269          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1270 .ve
1271 
1272   Contributed by Dave May
1273 
1274   Reference:
1275   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1276 
1277 .seealso: `PCTelescopeGetKSP()`, `PCTelescopeGetDM()`, `PCTelescopeGetReductionFactor()`, `PCTelescopeSetReductionFactor()`, `PCTelescopeGetIgnoreDM()`, `PCTelescopeSetIgnoreDM()`, `PCREDUNDANT`
1278 M*/
1279 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc) {
1280   struct _PC_Telescope *sred;
1281 
1282   PetscFunctionBegin;
1283   PetscCall(PetscNewLog(pc, &sred));
1284   sred->psubcomm                   = NULL;
1285   sred->subcommtype                = PETSC_SUBCOMM_INTERLACED;
1286   sred->subcomm                    = MPI_COMM_NULL;
1287   sred->redfactor                  = 1;
1288   sred->ignore_dm                  = PETSC_FALSE;
1289   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1290   sred->use_coarse_dm              = PETSC_FALSE;
1291   pc->data                         = (void *)sred;
1292 
1293   pc->ops->apply           = PCApply_Telescope;
1294   pc->ops->applytranspose  = NULL;
1295   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1296   pc->ops->setup           = PCSetUp_Telescope;
1297   pc->ops->destroy         = PCDestroy_Telescope;
1298   pc->ops->reset           = PCReset_Telescope;
1299   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1300   pc->ops->view            = PCView_Telescope;
1301 
1302   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1303   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1304   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1305   sred->pctelescope_reset_type              = NULL;
1306 
1307   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", PCTelescopeGetKSP_Telescope));
1308   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", PCTelescopeGetSubcommType_Telescope));
1309   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", PCTelescopeSetSubcommType_Telescope));
1310   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", PCTelescopeGetReductionFactor_Telescope));
1311   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", PCTelescopeSetReductionFactor_Telescope));
1312   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", PCTelescopeGetIgnoreDM_Telescope));
1313   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", PCTelescopeSetIgnoreDM_Telescope));
1314   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", PCTelescopeGetIgnoreKSPComputeOperators_Telescope));
1315   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", PCTelescopeSetIgnoreKSPComputeOperators_Telescope));
1316   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", PCTelescopeGetDM_Telescope));
1317   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", PCTelescopeGetUseCoarseDM_Telescope));
1318   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", PCTelescopeSetUseCoarseDM_Telescope));
1319   PetscFunctionReturn(0);
1320 }
1321