xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision e0f5bfbec699682fa3e8b8532b1176849ea4e12a)
1 #include <petsc/private/petscimpl.h>
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h>  /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7 
8 static PetscBool  cited      = PETSC_FALSE;
9 static const char citation[] = "@inproceedings{MaySananRuppKnepleySmith2016,\n"
10                                "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
11                                "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
12                                "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
13                                "  series    = {PASC '16},\n"
14                                "  isbn      = {978-1-4503-4126-4},\n"
15                                "  location  = {Lausanne, Switzerland},\n"
16                                "  pages     = {5:1--5:12},\n"
17                                "  articleno = {5},\n"
18                                "  numpages  = {12},\n"
19                                "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
20                                "  doi       = {10.1145/2929908.2929913},\n"
21                                "  acmid     = {2929913},\n"
22                                "  publisher = {ACM},\n"
23                                "  address   = {New York, NY, USA},\n"
24                                "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
25                                "  year      = {2016}\n"
26                                "}\n";
27 
28 /*
29  default setup mode
30 
31  [1a] scatter to (FORWARD)
32  x(comm) -> xtmp(comm)
33  [1b] local copy (to) ranks with color = 0
34  xred(subcomm) <- xtmp
35 
36  [2] solve on sub KSP to obtain yred(subcomm)
37 
38  [3a] local copy (from) ranks with color = 0
39  yred(subcomm) --> xtmp
40  [2b] scatter from (REVERSE)
41  xtmp(comm) -> y(comm)
42 */
43 
44 /*
45   Collective[comm_f]
46   Notes
47    * Using comm_f = MPI_COMM_NULL will result in an error
48    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
49    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
50 */
51 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f, MPI_Comm comm_c, PetscBool *isvalid) {
52   PetscInt     valid = 1;
53   MPI_Group    group_f, group_c;
54   PetscMPIInt  count, k, size_f = 0, size_c = 0, size_c_sum = 0;
55   PetscMPIInt *ranks_f, *ranks_c;
56 
57   PetscFunctionBegin;
58   PetscCheck(comm_f != MPI_COMM_NULL, PETSC_COMM_SELF, PETSC_ERR_SUP, "comm_f cannot be MPI_COMM_NULL");
59 
60   PetscCallMPI(MPI_Comm_group(comm_f, &group_f));
61   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_group(comm_c, &group_c));
62 
63   PetscCallMPI(MPI_Comm_size(comm_f, &size_f));
64   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_size(comm_c, &size_c));
65 
66   /* check not all comm_c's are NULL */
67   size_c_sum = size_c;
68   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &size_c_sum, 1, MPI_INT, MPI_SUM, comm_f));
69   if (size_c_sum == 0) valid = 0;
70 
71   /* check we can map at least 1 rank in comm_c to comm_f */
72   PetscCall(PetscMalloc1(size_f, &ranks_f));
73   PetscCall(PetscMalloc1(size_c, &ranks_c));
74   for (k = 0; k < size_f; k++) ranks_f[k] = MPI_UNDEFINED;
75   for (k = 0; k < size_c; k++) ranks_c[k] = k;
76 
77   /*
78    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
79    I do not want the code to terminate immediately if this occurs, rather I want to throw
80    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
81    that comm_c is not a valid sub-communicator.
82    Hence I purposefully do not call PetscCall() after MPI_Group_translate_ranks().
83   */
84   count = 0;
85   if (comm_c != MPI_COMM_NULL) {
86     (void)MPI_Group_translate_ranks(group_c, size_c, ranks_c, group_f, ranks_f);
87     for (k = 0; k < size_f; k++) {
88       if (ranks_f[k] == MPI_UNDEFINED) count++;
89     }
90   }
91   if (count == size_f) valid = 0;
92 
93   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &valid, 1, MPIU_INT, MPI_MIN, comm_f));
94   if (valid == 1) *isvalid = PETSC_TRUE;
95   else *isvalid = PETSC_FALSE;
96 
97   PetscCall(PetscFree(ranks_f));
98   PetscCall(PetscFree(ranks_c));
99   PetscCallMPI(MPI_Group_free(&group_f));
100   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Group_free(&group_c));
101   PetscFunctionReturn(0);
102 }
103 
104 DM private_PCTelescopeGetSubDM(PC_Telescope sred) {
105   DM subdm = NULL;
106 
107   if (!PCTelescope_isActiveRank(sred)) {
108     subdm = NULL;
109   } else {
110     switch (sred->sr_type) {
111     case TELESCOPE_DEFAULT: subdm = NULL; break;
112     case TELESCOPE_DMDA: subdm = ((PC_Telescope_DMDACtx *)sred->dm_ctx)->dmrepart; break;
113     case TELESCOPE_DMPLEX: subdm = NULL; break;
114     case TELESCOPE_COARSEDM:
115       if (sred->ksp) KSPGetDM(sred->ksp, &subdm);
116       break;
117     }
118   }
119   return (subdm);
120 }
121 
122 PetscErrorCode PCTelescopeSetUp_default(PC pc, PC_Telescope sred) {
123   PetscInt   m, M, bs, st, ed;
124   Vec        x, xred, yred, xtmp;
125   Mat        B;
126   MPI_Comm   comm, subcomm;
127   VecScatter scatter;
128   IS         isin;
129   VecType    vectype;
130 
131   PetscFunctionBegin;
132   PetscCall(PetscInfo(pc, "PCTelescope: setup (default)\n"));
133   comm    = PetscSubcommParent(sred->psubcomm);
134   subcomm = PetscSubcommChild(sred->psubcomm);
135 
136   PetscCall(PCGetOperators(pc, NULL, &B));
137   PetscCall(MatGetSize(B, &M, NULL));
138   PetscCall(MatGetBlockSize(B, &bs));
139   PetscCall(MatCreateVecs(B, &x, NULL));
140   PetscCall(MatGetVecType(B, &vectype));
141 
142   xred = NULL;
143   m    = 0;
144   if (PCTelescope_isActiveRank(sred)) {
145     PetscCall(VecCreate(subcomm, &xred));
146     PetscCall(VecSetSizes(xred, PETSC_DECIDE, M));
147     PetscCall(VecSetBlockSize(xred, bs));
148     PetscCall(VecSetType(xred, vectype)); /* Use the preconditioner matrix's vectype by default */
149     PetscCall(VecSetFromOptions(xred));
150     PetscCall(VecGetLocalSize(xred, &m));
151   }
152 
153   yred = NULL;
154   if (PCTelescope_isActiveRank(sred)) PetscCall(VecDuplicate(xred, &yred));
155 
156   PetscCall(VecCreate(comm, &xtmp));
157   PetscCall(VecSetSizes(xtmp, m, PETSC_DECIDE));
158   PetscCall(VecSetBlockSize(xtmp, bs));
159   PetscCall(VecSetType(xtmp, vectype));
160 
161   if (PCTelescope_isActiveRank(sred)) {
162     PetscCall(VecGetOwnershipRange(xred, &st, &ed));
163     PetscCall(ISCreateStride(comm, (ed - st), st, 1, &isin));
164   } else {
165     PetscCall(VecGetOwnershipRange(x, &st, &ed));
166     PetscCall(ISCreateStride(comm, 0, st, 1, &isin));
167   }
168   PetscCall(ISSetBlockSize(isin, bs));
169 
170   PetscCall(VecScatterCreate(x, isin, xtmp, NULL, &scatter));
171 
172   sred->isin    = isin;
173   sred->scatter = scatter;
174   sred->xred    = xred;
175   sred->yred    = yred;
176   sred->xtmp    = xtmp;
177   PetscCall(VecDestroy(&x));
178   PetscFunctionReturn(0);
179 }
180 
181 PetscErrorCode PCTelescopeMatCreate_default(PC pc, PC_Telescope sred, MatReuse reuse, Mat *A) {
182   MPI_Comm comm, subcomm;
183   Mat      Bred, B;
184   PetscInt nr, nc, bs;
185   IS       isrow, iscol;
186   Mat      Blocal, *_Blocal;
187 
188   PetscFunctionBegin;
189   PetscCall(PetscInfo(pc, "PCTelescope: updating the redundant preconditioned operator (default)\n"));
190   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
191   subcomm = PetscSubcommChild(sred->psubcomm);
192   PetscCall(PCGetOperators(pc, NULL, &B));
193   PetscCall(MatGetSize(B, &nr, &nc));
194   isrow = sred->isin;
195   PetscCall(ISCreateStride(PETSC_COMM_SELF, nc, 0, 1, &iscol));
196   PetscCall(ISSetIdentity(iscol));
197   PetscCall(MatGetBlockSizes(B, NULL, &bs));
198   PetscCall(ISSetBlockSize(iscol, bs));
199   PetscCall(MatSetOption(B, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
200   PetscCall(MatCreateSubMatrices(B, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &_Blocal));
201   Blocal = *_Blocal;
202   PetscCall(PetscFree(_Blocal));
203   Bred = NULL;
204   if (PCTelescope_isActiveRank(sred)) {
205     PetscInt mm;
206 
207     if (reuse != MAT_INITIAL_MATRIX) Bred = *A;
208 
209     PetscCall(MatGetSize(Blocal, &mm, NULL));
210     PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, Blocal, mm, reuse, &Bred));
211   }
212   *A = Bred;
213   PetscCall(ISDestroy(&iscol));
214   PetscCall(MatDestroy(&Blocal));
215   PetscFunctionReturn(0);
216 }
217 
218 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc, PC_Telescope sred, MatNullSpace nullspace, MatNullSpace *sub_nullspace) {
219   PetscBool  has_const;
220   const Vec *vecs;
221   Vec       *sub_vecs = NULL;
222   PetscInt   i, k, n = 0;
223   MPI_Comm   subcomm;
224 
225   PetscFunctionBegin;
226   subcomm = PetscSubcommChild(sred->psubcomm);
227   PetscCall(MatNullSpaceGetVecs(nullspace, &has_const, &n, &vecs));
228 
229   if (PCTelescope_isActiveRank(sred)) {
230     if (n) PetscCall(VecDuplicateVecs(sred->xred, n, &sub_vecs));
231   }
232 
233   /* copy entries */
234   for (k = 0; k < n; k++) {
235     const PetscScalar *x_array;
236     PetscScalar       *LA_sub_vec;
237     PetscInt           st, ed;
238 
239     /* pull in vector x->xtmp */
240     PetscCall(VecScatterBegin(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
241     PetscCall(VecScatterEnd(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
242     if (sub_vecs) {
243       /* copy vector entries into xred */
244       PetscCall(VecGetArrayRead(sred->xtmp, &x_array));
245       if (sub_vecs[k]) {
246         PetscCall(VecGetOwnershipRange(sub_vecs[k], &st, &ed));
247         PetscCall(VecGetArray(sub_vecs[k], &LA_sub_vec));
248         for (i = 0; i < ed - st; i++) LA_sub_vec[i] = x_array[i];
249         PetscCall(VecRestoreArray(sub_vecs[k], &LA_sub_vec));
250       }
251       PetscCall(VecRestoreArrayRead(sred->xtmp, &x_array));
252     }
253   }
254 
255   if (PCTelescope_isActiveRank(sred)) {
256     /* create new (near) nullspace for redundant object */
257     PetscCall(MatNullSpaceCreate(subcomm, has_const, n, sub_vecs, sub_nullspace));
258     PetscCall(VecDestroyVecs(n, &sub_vecs));
259     PetscCheck(!nullspace->remove, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
260     PetscCheck(!nullspace->rmctx, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
261   }
262   PetscFunctionReturn(0);
263 }
264 
265 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc, PC_Telescope sred, Mat sub_mat) {
266   Mat B;
267 
268   PetscFunctionBegin;
269   PetscCall(PCGetOperators(pc, NULL, &B));
270   /* Propagate the nullspace if it exists */
271   {
272     MatNullSpace nullspace, sub_nullspace;
273     PetscCall(MatGetNullSpace(B, &nullspace));
274     if (nullspace) {
275       PetscCall(PetscInfo(pc, "PCTelescope: generating nullspace (default)\n"));
276       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nullspace, &sub_nullspace));
277       if (PCTelescope_isActiveRank(sred)) {
278         PetscCall(MatSetNullSpace(sub_mat, sub_nullspace));
279         PetscCall(MatNullSpaceDestroy(&sub_nullspace));
280       }
281     }
282   }
283   /* Propagate the near nullspace if it exists */
284   {
285     MatNullSpace nearnullspace, sub_nearnullspace;
286     PetscCall(MatGetNearNullSpace(B, &nearnullspace));
287     if (nearnullspace) {
288       PetscCall(PetscInfo(pc, "PCTelescope: generating near nullspace (default)\n"));
289       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nearnullspace, &sub_nearnullspace));
290       if (PCTelescope_isActiveRank(sred)) {
291         PetscCall(MatSetNearNullSpace(sub_mat, sub_nearnullspace));
292         PetscCall(MatNullSpaceDestroy(&sub_nearnullspace));
293       }
294     }
295   }
296   PetscFunctionReturn(0);
297 }
298 
299 static PetscErrorCode PCView_Telescope(PC pc, PetscViewer viewer) {
300   PC_Telescope sred = (PC_Telescope)pc->data;
301   PetscBool    iascii, isstring;
302   PetscViewer  subviewer;
303 
304   PetscFunctionBegin;
305   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
306   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
307   if (iascii) {
308     {
309       MPI_Comm    comm, subcomm;
310       PetscMPIInt comm_size, subcomm_size;
311       DM          dm = NULL, subdm = NULL;
312 
313       PetscCall(PCGetDM(pc, &dm));
314       subdm = private_PCTelescopeGetSubDM(sred);
315 
316       if (sred->psubcomm) {
317         comm    = PetscSubcommParent(sred->psubcomm);
318         subcomm = PetscSubcommChild(sred->psubcomm);
319         PetscCallMPI(MPI_Comm_size(comm, &comm_size));
320         PetscCallMPI(MPI_Comm_size(subcomm, &subcomm_size));
321 
322         PetscCall(PetscViewerASCIIPushTab(viewer));
323         PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent comm size reduction factor = %" PetscInt_FMT "\n", sred->redfactor));
324         PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent_size = %d , subcomm_size = %d\n", (int)comm_size, (int)subcomm_size));
325         switch (sred->subcommtype) {
326         case PETSC_SUBCOMM_INTERLACED: PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: type = %s\n", PetscSubcommTypes[sred->subcommtype])); break;
327         case PETSC_SUBCOMM_CONTIGUOUS: PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm type = %s\n", PetscSubcommTypes[sred->subcommtype])); break;
328         default: SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "General subcomm type not supported by PCTelescope");
329         }
330         PetscCall(PetscViewerASCIIPopTab(viewer));
331       } else {
332         PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
333         subcomm = sred->subcomm;
334         if (!PCTelescope_isActiveRank(sred)) subcomm = PETSC_COMM_SELF;
335 
336         PetscCall(PetscViewerASCIIPushTab(viewer));
337         PetscCall(PetscViewerASCIIPrintf(viewer, "subcomm: using user provided sub-communicator\n"));
338         PetscCall(PetscViewerASCIIPopTab(viewer));
339       }
340 
341       PetscCall(PetscViewerGetSubViewer(viewer, subcomm, &subviewer));
342       if (PCTelescope_isActiveRank(sred)) {
343         PetscCall(PetscViewerASCIIPushTab(subviewer));
344 
345         if (dm && sred->ignore_dm) PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring DM\n"));
346         if (sred->ignore_kspcomputeoperators) PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring KSPComputeOperators\n"));
347         switch (sred->sr_type) {
348         case TELESCOPE_DEFAULT: PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: default\n")); break;
349         case TELESCOPE_DMDA:
350           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMDA auto-repartitioning\n"));
351           PetscCall(DMView_DA_Short(subdm, subviewer));
352           break;
353         case TELESCOPE_DMPLEX: PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMPLEX auto-repartitioning\n")); break;
354         case TELESCOPE_COARSEDM: PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: coarse DM\n")); break;
355         }
356 
357         if (dm) {
358           PetscObject obj = (PetscObject)dm;
359           PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object:"));
360           PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE);
361           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name); }
362           if (obj->name) { PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name); }
363           if (obj->prefix) PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix);
364           PetscCall(PetscViewerASCIIPrintf(subviewer, "\n"));
365           PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE);
366         } else {
367           PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object: NULL\n"));
368         }
369         if (subdm) {
370           PetscObject obj = (PetscObject)subdm;
371           PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object:"));
372           PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE);
373           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name); }
374           if (obj->name) { PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name); }
375           if (obj->prefix) PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix);
376           PetscCall(PetscViewerASCIIPrintf(subviewer, "\n"));
377           PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE);
378         } else {
379           PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object: NULL\n"));
380         }
381 
382         PetscCall(KSPView(sred->ksp, subviewer));
383         PetscCall(PetscViewerASCIIPopTab(subviewer));
384       }
385       PetscCall(PetscViewerRestoreSubViewer(viewer, subcomm, &subviewer));
386     }
387   }
388   PetscFunctionReturn(0);
389 }
390 
391 static PetscErrorCode PCSetUp_Telescope(PC pc) {
392   PC_Telescope    sred = (PC_Telescope)pc->data;
393   MPI_Comm        comm, subcomm = 0;
394   PCTelescopeType sr_type;
395 
396   PetscFunctionBegin;
397   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
398 
399   /* Determine type of setup/update */
400   if (!pc->setupcalled) {
401     PetscBool has_dm, same;
402     DM        dm;
403 
404     sr_type = TELESCOPE_DEFAULT;
405     has_dm  = PETSC_FALSE;
406     PetscCall(PCGetDM(pc, &dm));
407     if (dm) has_dm = PETSC_TRUE;
408     if (has_dm) {
409       /* check for dmda */
410       PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMDA, &same));
411       if (same) {
412         PetscCall(PetscInfo(pc, "PCTelescope: found DMDA\n"));
413         sr_type = TELESCOPE_DMDA;
414       }
415       /* check for dmplex */
416       PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &same));
417       if (same) {
418         PetscCall(PetscInfo(pc, "PCTelescope: found DMPLEX\n"));
419         sr_type = TELESCOPE_DMPLEX;
420       }
421 
422       if (sred->use_coarse_dm) {
423         PetscCall(PetscInfo(pc, "PCTelescope: using coarse DM\n"));
424         sr_type = TELESCOPE_COARSEDM;
425       }
426 
427       if (sred->ignore_dm) {
428         PetscCall(PetscInfo(pc, "PCTelescope: ignoring DM\n"));
429         sr_type = TELESCOPE_DEFAULT;
430       }
431     }
432     sred->sr_type = sr_type;
433   } else {
434     sr_type = sred->sr_type;
435   }
436 
437   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
438   switch (sr_type) {
439   case TELESCOPE_DEFAULT:
440     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
441     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
442     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
443     sred->pctelescope_reset_type              = NULL;
444     break;
445   case TELESCOPE_DMDA:
446     pc->ops->apply                            = PCApply_Telescope_dmda;
447     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
448     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
449     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
450     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
451     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
452     break;
453   case TELESCOPE_DMPLEX: SETERRQ(comm, PETSC_ERR_SUP, "Support for DMPLEX is currently not available");
454   case TELESCOPE_COARSEDM:
455     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
456     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
457     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
458     sred->pctelescope_matcreate_type          = NULL;
459     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
460     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
461     break;
462   default: SETERRQ(comm, PETSC_ERR_SUP, "Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
463   }
464 
465   /* subcomm definition */
466   if (!pc->setupcalled) {
467     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
468       if (!sred->psubcomm) {
469         PetscCall(PetscSubcommCreate(comm, &sred->psubcomm));
470         PetscCall(PetscSubcommSetNumber(sred->psubcomm, sred->redfactor));
471         PetscCall(PetscSubcommSetType(sred->psubcomm, sred->subcommtype));
472         sred->subcomm = PetscSubcommChild(sred->psubcomm);
473       }
474     } else { /* query PC for DM, check communicators */
475       DM          dm, dm_coarse_partition          = NULL;
476       MPI_Comm    comm_fine, comm_coarse_partition = MPI_COMM_NULL;
477       PetscMPIInt csize_fine = 0, csize_coarse_partition = 0, cs[2], csg[2], cnt = 0;
478       PetscBool   isvalidsubcomm;
479 
480       PetscCall(PCGetDM(pc, &dm));
481       comm_fine = PetscObjectComm((PetscObject)dm);
482       PetscCall(DMGetCoarseDM(dm, &dm_coarse_partition));
483       if (dm_coarse_partition) cnt = 1;
484       PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &cnt, 1, MPI_INT, MPI_SUM, comm_fine));
485       PetscCheck(cnt != 0, comm_fine, PETSC_ERR_SUP, "Zero instances of a coarse DM were found");
486 
487       PetscCallMPI(MPI_Comm_size(comm_fine, &csize_fine));
488       if (dm_coarse_partition) {
489         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
490         PetscCallMPI(MPI_Comm_size(comm_coarse_partition, &csize_coarse_partition));
491       }
492 
493       cs[0] = csize_fine;
494       cs[1] = csize_coarse_partition;
495       PetscCallMPI(MPI_Allreduce(cs, csg, 2, MPI_INT, MPI_MAX, comm_fine));
496       PetscCheck(csg[0] != csg[1], comm_fine, PETSC_ERR_SUP, "Coarse DM uses the same size communicator as the parent DM attached to the PC");
497 
498       PetscCall(PCTelescopeTestValidSubcomm(comm_fine, comm_coarse_partition, &isvalidsubcomm));
499       PetscCheck(isvalidsubcomm, comm_fine, PETSC_ERR_SUP, "Coarse DM communicator is not a sub-communicator of parentDM->comm");
500       sred->subcomm = comm_coarse_partition;
501     }
502   }
503   subcomm = sred->subcomm;
504 
505   /* internal KSP */
506   if (!pc->setupcalled) {
507     const char *prefix;
508 
509     if (PCTelescope_isActiveRank(sred)) {
510       PetscCall(KSPCreate(subcomm, &sred->ksp));
511       PetscCall(KSPSetErrorIfNotConverged(sred->ksp, pc->erroriffailure));
512       PetscCall(PetscObjectIncrementTabLevel((PetscObject)sred->ksp, (PetscObject)pc, 1));
513       PetscCall(KSPSetType(sred->ksp, KSPPREONLY));
514       PetscCall(PCGetOptionsPrefix(pc, &prefix));
515       PetscCall(KSPSetOptionsPrefix(sred->ksp, prefix));
516       PetscCall(KSPAppendOptionsPrefix(sred->ksp, "telescope_"));
517     }
518   }
519 
520   /* setup */
521   if (!pc->setupcalled && sred->pctelescope_setup_type) PetscCall(sred->pctelescope_setup_type(pc, sred));
522   /* update */
523   if (!pc->setupcalled) {
524     if (sred->pctelescope_matcreate_type) PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_INITIAL_MATRIX, &sred->Bred));
525     if (sred->pctelescope_matnullspacecreate_type) PetscCall(sred->pctelescope_matnullspacecreate_type(pc, sred, sred->Bred));
526   } else {
527     if (sred->pctelescope_matcreate_type) PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_REUSE_MATRIX, &sred->Bred));
528   }
529 
530   /* common - no construction */
531   if (PCTelescope_isActiveRank(sred)) {
532     PetscCall(KSPSetOperators(sred->ksp, sred->Bred, sred->Bred));
533     if (pc->setfromoptionscalled && !pc->setupcalled) PetscCall(KSPSetFromOptions(sred->ksp));
534   }
535   PetscFunctionReturn(0);
536 }
537 
538 static PetscErrorCode PCApply_Telescope(PC pc, Vec x, Vec y) {
539   PC_Telescope       sred = (PC_Telescope)pc->data;
540   Vec                xtmp, xred, yred;
541   PetscInt           i, st, ed;
542   VecScatter         scatter;
543   PetscScalar       *array;
544   const PetscScalar *x_array;
545 
546   PetscFunctionBegin;
547   PetscCall(PetscCitationsRegister(citation, &cited));
548 
549   xtmp    = sred->xtmp;
550   scatter = sred->scatter;
551   xred    = sred->xred;
552   yred    = sred->yred;
553 
554   /* pull in vector x->xtmp */
555   PetscCall(VecScatterBegin(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD));
556   PetscCall(VecScatterEnd(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD));
557 
558   /* copy vector entries into xred */
559   PetscCall(VecGetArrayRead(xtmp, &x_array));
560   if (xred) {
561     PetscScalar *LA_xred;
562     PetscCall(VecGetOwnershipRange(xred, &st, &ed));
563     PetscCall(VecGetArray(xred, &LA_xred));
564     for (i = 0; i < ed - st; i++) LA_xred[i] = x_array[i];
565     PetscCall(VecRestoreArray(xred, &LA_xred));
566   }
567   PetscCall(VecRestoreArrayRead(xtmp, &x_array));
568   /* solve */
569   if (PCTelescope_isActiveRank(sred)) {
570     PetscCall(KSPSolve(sred->ksp, xred, yred));
571     PetscCall(KSPCheckSolve(sred->ksp, pc, yred));
572   }
573   /* return vector */
574   PetscCall(VecGetArray(xtmp, &array));
575   if (yred) {
576     const PetscScalar *LA_yred;
577     PetscCall(VecGetOwnershipRange(yred, &st, &ed));
578     PetscCall(VecGetArrayRead(yred, &LA_yred));
579     for (i = 0; i < ed - st; i++) array[i] = LA_yred[i];
580     PetscCall(VecRestoreArrayRead(yred, &LA_yred));
581   }
582   PetscCall(VecRestoreArray(xtmp, &array));
583   PetscCall(VecScatterBegin(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE));
584   PetscCall(VecScatterEnd(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE));
585   PetscFunctionReturn(0);
586 }
587 
588 static PetscErrorCode PCApplyRichardson_Telescope(PC pc, Vec x, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason) {
589   PC_Telescope       sred = (PC_Telescope)pc->data;
590   Vec                xtmp, yred;
591   PetscInt           i, st, ed;
592   VecScatter         scatter;
593   const PetscScalar *x_array;
594   PetscBool          default_init_guess_value;
595 
596   PetscFunctionBegin;
597   xtmp    = sred->xtmp;
598   scatter = sred->scatter;
599   yred    = sred->yred;
600 
601   PetscCheck(its <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCApplyRichardson_Telescope only supports max_it = 1");
602   *reason = (PCRichardsonConvergedReason)0;
603 
604   if (!zeroguess) {
605     PetscCall(PetscInfo(pc, "PCTelescope: Scattering y for non-zero initial guess\n"));
606     /* pull in vector y->xtmp */
607     PetscCall(VecScatterBegin(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD));
608     PetscCall(VecScatterEnd(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD));
609 
610     /* copy vector entries into xred */
611     PetscCall(VecGetArrayRead(xtmp, &x_array));
612     if (yred) {
613       PetscScalar *LA_yred;
614       PetscCall(VecGetOwnershipRange(yred, &st, &ed));
615       PetscCall(VecGetArray(yred, &LA_yred));
616       for (i = 0; i < ed - st; i++) LA_yred[i] = x_array[i];
617       PetscCall(VecRestoreArray(yred, &LA_yred));
618     }
619     PetscCall(VecRestoreArrayRead(xtmp, &x_array));
620   }
621 
622   if (PCTelescope_isActiveRank(sred)) {
623     PetscCall(KSPGetInitialGuessNonzero(sred->ksp, &default_init_guess_value));
624     if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, PETSC_TRUE));
625   }
626 
627   PetscCall(PCApply_Telescope(pc, x, y));
628 
629   if (PCTelescope_isActiveRank(sred)) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, default_init_guess_value));
630 
631   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
632   *outits = 1;
633   PetscFunctionReturn(0);
634 }
635 
636 static PetscErrorCode PCReset_Telescope(PC pc) {
637   PC_Telescope sred = (PC_Telescope)pc->data;
638 
639   PetscFunctionBegin;
640   PetscCall(ISDestroy(&sred->isin));
641   PetscCall(VecScatterDestroy(&sred->scatter));
642   PetscCall(VecDestroy(&sred->xred));
643   PetscCall(VecDestroy(&sred->yred));
644   PetscCall(VecDestroy(&sred->xtmp));
645   PetscCall(MatDestroy(&sred->Bred));
646   PetscCall(KSPReset(sred->ksp));
647   if (sred->pctelescope_reset_type) PetscCall(sred->pctelescope_reset_type(pc));
648   PetscFunctionReturn(0);
649 }
650 
651 static PetscErrorCode PCDestroy_Telescope(PC pc) {
652   PC_Telescope sred = (PC_Telescope)pc->data;
653 
654   PetscFunctionBegin;
655   PetscCall(PCReset_Telescope(pc));
656   PetscCall(KSPDestroy(&sred->ksp));
657   PetscCall(PetscSubcommDestroy(&sred->psubcomm));
658   PetscCall(PetscFree(sred->dm_ctx));
659   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", NULL));
660   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", NULL));
661   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", NULL));
662   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", NULL));
663   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", NULL));
664   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", NULL));
665   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", NULL));
666   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", NULL));
667   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", NULL));
668   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", NULL));
669   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", NULL));
670   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", NULL));
671   PetscCall(PetscFree(pc->data));
672   PetscFunctionReturn(0);
673 }
674 
675 static PetscErrorCode PCSetFromOptions_Telescope(PC pc, PetscOptionItems *PetscOptionsObject) {
676   PC_Telescope     sred = (PC_Telescope)pc->data;
677   MPI_Comm         comm;
678   PetscMPIInt      size;
679   PetscBool        flg;
680   PetscSubcommType subcommtype;
681 
682   PetscFunctionBegin;
683   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
684   PetscCallMPI(MPI_Comm_size(comm, &size));
685   PetscOptionsHeadBegin(PetscOptionsObject, "Telescope options");
686   PetscCall(PetscOptionsEnum("-pc_telescope_subcomm_type", "Subcomm type (interlaced or contiguous)", "PCTelescopeSetSubcommType", PetscSubcommTypes, (PetscEnum)sred->subcommtype, (PetscEnum *)&subcommtype, &flg));
687   if (flg) PetscCall(PCTelescopeSetSubcommType(pc, subcommtype));
688   PetscCall(PetscOptionsInt("-pc_telescope_reduction_factor", "Factor to reduce comm size by", "PCTelescopeSetReductionFactor", sred->redfactor, &sred->redfactor, NULL));
689   PetscCheck(sred->redfactor <= size, comm, PETSC_ERR_ARG_WRONG, "-pc_telescope_reduction_factor <= comm size");
690   PetscCall(PetscOptionsBool("-pc_telescope_ignore_dm", "Ignore any DM attached to the PC", "PCTelescopeSetIgnoreDM", sred->ignore_dm, &sred->ignore_dm, NULL));
691   PetscCall(PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators", "Ignore method used to compute A", "PCTelescopeSetIgnoreKSPComputeOperators", sred->ignore_kspcomputeoperators, &sred->ignore_kspcomputeoperators, NULL));
692   PetscCall(PetscOptionsBool("-pc_telescope_use_coarse_dm", "Define sub-communicator from the coarse DM", "PCTelescopeSetUseCoarseDM", sred->use_coarse_dm, &sred->use_coarse_dm, NULL));
693   PetscOptionsHeadEnd();
694   PetscFunctionReturn(0);
695 }
696 
697 /* PC simplementation specific API's */
698 
699 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc, KSP *ksp) {
700   PC_Telescope red = (PC_Telescope)pc->data;
701   PetscFunctionBegin;
702   if (ksp) *ksp = red->ksp;
703   PetscFunctionReturn(0);
704 }
705 
706 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc, PetscSubcommType *subcommtype) {
707   PC_Telescope red = (PC_Telescope)pc->data;
708   PetscFunctionBegin;
709   if (subcommtype) *subcommtype = red->subcommtype;
710   PetscFunctionReturn(0);
711 }
712 
713 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc, PetscSubcommType subcommtype) {
714   PC_Telescope red = (PC_Telescope)pc->data;
715 
716   PetscFunctionBegin;
717   PetscCheck(!pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "You cannot change the subcommunicator type for PCTelescope after it has been set up.");
718   red->subcommtype = subcommtype;
719   PetscFunctionReturn(0);
720 }
721 
722 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc, PetscInt *fact) {
723   PC_Telescope red = (PC_Telescope)pc->data;
724   PetscFunctionBegin;
725   if (fact) *fact = red->redfactor;
726   PetscFunctionReturn(0);
727 }
728 
729 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc, PetscInt fact) {
730   PC_Telescope red = (PC_Telescope)pc->data;
731   PetscMPIInt  size;
732 
733   PetscFunctionBegin;
734   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
735   PetscCheck(fact > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be positive", fact);
736   PetscCheck(fact <= size, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be <= comm.size", fact);
737   red->redfactor = fact;
738   PetscFunctionReturn(0);
739 }
740 
741 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc, PetscBool *v) {
742   PC_Telescope red = (PC_Telescope)pc->data;
743   PetscFunctionBegin;
744   if (v) *v = red->ignore_dm;
745   PetscFunctionReturn(0);
746 }
747 
748 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc, PetscBool v) {
749   PC_Telescope red = (PC_Telescope)pc->data;
750   PetscFunctionBegin;
751   red->ignore_dm = v;
752   PetscFunctionReturn(0);
753 }
754 
755 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc, PetscBool *v) {
756   PC_Telescope red = (PC_Telescope)pc->data;
757   PetscFunctionBegin;
758   if (v) *v = red->use_coarse_dm;
759   PetscFunctionReturn(0);
760 }
761 
762 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc, PetscBool v) {
763   PC_Telescope red = (PC_Telescope)pc->data;
764   PetscFunctionBegin;
765   red->use_coarse_dm = v;
766   PetscFunctionReturn(0);
767 }
768 
769 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool *v) {
770   PC_Telescope red = (PC_Telescope)pc->data;
771   PetscFunctionBegin;
772   if (v) *v = red->ignore_kspcomputeoperators;
773   PetscFunctionReturn(0);
774 }
775 
776 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool v) {
777   PC_Telescope red = (PC_Telescope)pc->data;
778   PetscFunctionBegin;
779   red->ignore_kspcomputeoperators = v;
780   PetscFunctionReturn(0);
781 }
782 
783 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc, DM *dm) {
784   PC_Telescope red = (PC_Telescope)pc->data;
785   PetscFunctionBegin;
786   *dm = private_PCTelescopeGetSubDM(red);
787   PetscFunctionReturn(0);
788 }
789 
790 /*@
791  PCTelescopeGetKSP - Gets the `KSP` created by the telescoping `PC`.
792 
793  Not Collective
794 
795  Input Parameter:
796 .  pc - the preconditioner context
797 
798  Output Parameter:
799 .  subksp - the `KSP` defined the smaller set of processes
800 
801  Level: advanced
802 
803 .seealso: `PCTELESCOPE`
804 @*/
805 PetscErrorCode PCTelescopeGetKSP(PC pc, KSP *subksp) {
806   PetscFunctionBegin;
807   PetscUseMethod(pc, "PCTelescopeGetKSP_C", (PC, KSP *), (pc, subksp));
808   PetscFunctionReturn(0);
809 }
810 
811 /*@
812  PCTelescopeGetReductionFactor - Gets the factor by which the original number of MPI ranks  has been reduced by.
813 
814  Not Collective
815 
816  Input Parameter:
817 .  pc - the preconditioner context
818 
819  Output Parameter:
820 .  fact - the reduction factor
821 
822  Level: advanced
823 
824 .seealso: `PCTELESCOPE`, `PCTelescopeSetReductionFactor()`
825 @*/
826 PetscErrorCode PCTelescopeGetReductionFactor(PC pc, PetscInt *fact) {
827   PetscFunctionBegin;
828   PetscUseMethod(pc, "PCTelescopeGetReductionFactor_C", (PC, PetscInt *), (pc, fact));
829   PetscFunctionReturn(0);
830 }
831 
832 /*@
833  PCTelescopeSetReductionFactor - Sets the factor by which the original number of MPI ranks will been reduced by.
834 
835  Not Collective
836 
837  Input Parameter:
838 .  pc - the preconditioner context
839 
840  Output Parameter:
841 .  fact - the reduction factor
842 
843  Level: advanced
844 
845 .seealso: `PCTELESCOPE`, `PCTelescopeGetReductionFactor()`
846 @*/
847 PetscErrorCode PCTelescopeSetReductionFactor(PC pc, PetscInt fact) {
848   PetscFunctionBegin;
849   PetscTryMethod(pc, "PCTelescopeSetReductionFactor_C", (PC, PetscInt), (pc, fact));
850   PetscFunctionReturn(0);
851 }
852 
853 /*@
854  PCTelescopeGetIgnoreDM - Get the flag indicating if any `DM` attached to the `PC` will be used.
855 
856  Not Collective
857 
858  Input Parameter:
859 .  pc - the preconditioner context
860 
861  Output Parameter:
862 .  v - the flag
863 
864  Level: advanced
865 
866 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`
867 @*/
868 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc, PetscBool *v) {
869   PetscFunctionBegin;
870   PetscUseMethod(pc, "PCTelescopeGetIgnoreDM_C", (PC, PetscBool *), (pc, v));
871   PetscFunctionReturn(0);
872 }
873 
874 /*@
875  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
876 
877  Not Collective
878 
879  Input Parameter:
880 .  pc - the preconditioner context
881 
882  Output Parameter:
883 .  v - Use PETSC_TRUE to ignore any DM
884 
885  Level: advanced
886 
887 .seealso: `PCTELESCOPE`, `PCTelescopeGetIgnoreDM()`
888 @*/
889 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc, PetscBool v) {
890   PetscFunctionBegin;
891   PetscTryMethod(pc, "PCTelescopeSetIgnoreDM_C", (PC, PetscBool), (pc, v));
892   PetscFunctionReturn(0);
893 }
894 
895 /*@
896  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse `DM` attached to `DM` associated with the `PC` will be used.
897 
898  Not Collective
899 
900  Input Parameter:
901 .  pc - the preconditioner context
902 
903  Output Parameter:
904 .  v - the flag
905 
906  Level: advanced
907 
908 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`
909 @*/
910 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc, PetscBool *v) {
911   PetscFunctionBegin;
912   PetscUseMethod(pc, "PCTelescopeGetUseCoarseDM_C", (PC, PetscBool *), (pc, v));
913   PetscFunctionReturn(0);
914 }
915 
916 /*@
917  PCTelescopeSetUseCoarseDM - Set a flag to query the `DM` attached to the `PC` if it also has a coarse `DM`
918 
919  Not Collective
920 
921  Input Parameter:
922 .  pc - the preconditioner context
923 
924  Output Parameter:
925 .  v - Use `PETSC_FALSE` to ignore any coarse `DM`
926 
927  Notes:
928  When you have specified to use a coarse `DM`, the communicator used to create the sub-KSP within `PCTELESCOPE`
929  will be that of the coarse `DM`. Hence the flags -pc_telescope_reduction_factor and
930  -pc_telescope_subcomm_type will no longer have any meaning.
931  It is required that the communicator associated with the parent (fine) and the coarse `DM` are of different sizes.
932  An error will occur of the size of the communicator associated with the coarse `DM`
933  is the same as that of the parent `DM`.
934  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
935  This will be checked at the time the preconditioner is setup and an error will occur if
936  the coarse DM does not define a sub-communicator of that used by the parent DM.
937 
938  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
939  the `DM` used (e.g. it supports `DMSHELL`, `DMPLEX`, etc).
940 
941  Support is currently only provided for the case when you are using `KSPSetComputeOperators()`
942 
943  The user is required to compose a function with the parent DM to facilitate the transfer of fields (`Vec`) between the different decompositions defined by the fine and coarse `DM`s.
944  In the user code, this is achieved via
945 .vb
946    {
947      DM dm_fine;
948      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
949    }
950 .ve
951  The signature of the user provided field scatter method is
952 .vb
953    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
954 .ve
955  The user must provide support for both mode = `SCATTER_FORWARD` and mode = `SCATTER_REVERSE`.
956  `SCATTER_FORWARD` implies the direction of transfer is from the parent (fine) `DM` to the coarse `DM`.
957 
958  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
959  of state variables between the fine and coarse `DM`s.
960  In the context of a finite element discretization, an example state variable might be
961  values associated with quadrature points within each element.
962  A user provided state scatter method is composed via
963 .vb
964    {
965      DM dm_fine;
966      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
967    }
968 .ve
969  The signature of the user provided state scatter method is
970 .vb
971    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
972 .ve
973  `SCATTER_FORWARD` implies the direction of transfer is from the fine `DM` to the coarse `DM`.
974  The user is only required to support mode = `SCATTER_FORWARD`.
975  No assumption is made about the data type of the state variables.
976  These must be managed by the user and must be accessible from the `DM`.
977 
978  Care must be taken in defining the user context passed to `KSPSetComputeOperators()` which is to be
979  associated with the sub-`KSP` residing within `PCTELESCOPE`.
980  In general, `PCTELESCOPE` assumes that the context on the fine and coarse `DM` used with
981  `KSPSetComputeOperators()` should be "similar" in type or origin.
982  Specifically the following rules are used to infer what context on the sub-`KSP` should be.
983 
984  First the contexts from the `KSP` and the fine and coarse `DM`s are retrieved.
985  Note that the special case of a `DMSHELL` context is queried.
986 
987 .vb
988    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
989    DMGetApplicationContext(dm_fine,&dmfine_appctx);
990    DMShellGetContext(dm_fine,&dmfine_shellctx);
991 
992    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
993    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
994 .ve
995 
996  The following rules are then enforced:
997 
998  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
999  `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,NULL);
1000 
1001  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1002  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1003  `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1004 
1005  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1006  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1007  `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1008 
1009  If neither of the above three tests passed, then `PCTELESCOPE` cannot safely determine what
1010  context should be provided to `KSPSetComputeOperators()` for use with the sub-`KSP`.
1011  In this case, an additional mechanism is provided via a composed function which will return
1012  the actual context to be used. To use this feature you must compose the "getter" function
1013  with the coarse `DM`, e.g.
1014 .vb
1015    {
1016      DM dm_coarse;
1017      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1018    }
1019 .ve
1020  The signature of the user provided method is
1021 .vb
1022    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1023 .ve
1024 
1025  Level: advanced
1026 
1027 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`
1028 @*/
1029 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc, PetscBool v) {
1030   PetscFunctionBegin;
1031   PetscTryMethod(pc, "PCTelescopeSetUseCoarseDM_C", (PC, PetscBool), (pc, v));
1032   PetscFunctionReturn(0);
1033 }
1034 
1035 /*@
1036  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if `KSPComputeOperators()` will be used.
1037 
1038  Not Collective
1039 
1040  Input Parameter:
1041 .  pc - the preconditioner context
1042 
1043  Output Parameter:
1044 .  v - the flag
1045 
1046  Level: advanced
1047 
1048 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeSetIgnoreKSPComputeOperators()`
1049 @*/
1050 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc, PetscBool *v) {
1051   PetscFunctionBegin;
1052   PetscUseMethod(pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", (PC, PetscBool *), (pc, v));
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 /*@
1057  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore `KSPComputeOperators()`.
1058 
1059  Not Collective
1060 
1061  Input Parameter:
1062 .  pc - the preconditioner context
1063 
1064  Output Parameter:
1065 .  v - Use `PETSC_TRUE` to ignore the method (if defined) set via `KSPSetComputeOperators()` on pc
1066 
1067  Level: advanced
1068 
1069 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()`
1070 @*/
1071 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc, PetscBool v) {
1072   PetscFunctionBegin;
1073   PetscTryMethod(pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", (PC, PetscBool), (pc, v));
1074   PetscFunctionReturn(0);
1075 }
1076 
1077 /*@
1078  PCTelescopeGetDM - Get the re-partitioned `DM` attached to the sub-`KSP`.
1079 
1080  Not Collective
1081 
1082  Input Parameter:
1083 .  pc - the preconditioner context
1084 
1085  Output Parameter:
1086 .  subdm - The re-partitioned DM
1087 
1088  Level: advanced
1089 
1090 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()`
1091 @*/
1092 PetscErrorCode PCTelescopeGetDM(PC pc, DM *subdm) {
1093   PetscFunctionBegin;
1094   PetscUseMethod(pc, "PCTelescopeGetDM_C", (PC, DM *), (pc, subdm));
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 /*@
1099  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1100 
1101  Logically Collective
1102 
1103  Input Parameters:
1104 +  pc - the preconditioner context
1105 -  subcommtype - the subcommunicator type (see `PetscSubcommType`)
1106 
1107  Level: advanced
1108 
1109 .seealso: `PetscSubcommType`, `PetscSubcomm`, `PCTELESCOPE`
1110 @*/
1111 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype) {
1112   PetscFunctionBegin;
1113   PetscTryMethod(pc, "PCTelescopeSetSubcommType_C", (PC, PetscSubcommType), (pc, subcommtype));
1114   PetscFunctionReturn(0);
1115 }
1116 
1117 /*@
1118  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1119 
1120  Not Collective
1121 
1122  Input Parameter:
1123 .  pc - the preconditioner context
1124 
1125  Output Parameter:
1126 .  subcommtype - the subcommunicator type (see `PetscSubcommType`)
1127 
1128  Level: advanced
1129 
1130 .seealso: `PetscSubcomm`, `PetscSubcommType`, `PCTELESCOPE`
1131 @*/
1132 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype) {
1133   PetscFunctionBegin;
1134   PetscUseMethod(pc, "PCTelescopeGetSubcommType_C", (PC, PetscSubcommType *), (pc, subcommtype));
1135   PetscFunctionReturn(0);
1136 }
1137 
1138 /*MC
1139    PCTELESCOPE - Runs a `KSP` solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1140 
1141    Options Database Keys:
1142 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1143 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1144 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1145 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether `KSPSetComputeOperators()` should be used on the sub-KSP.
1146 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse `DM` should be used to define the sub-communicator.
1147 
1148    Level: advanced
1149 
1150    Notes:
1151    Assuming that the parent preconditioner `PC` is defined on a communicator c, this implementation
1152    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner `PC`.
1153    The preconditioner is deemed telescopic as it only calls `KSPSolve()` on a single
1154    sub-communicator, in contrast with `PCREDUNDANT` which calls `KSPSolve()` on N sub-communicators.
1155    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1156    Additionally, in comparison with `PCREDUNDANT`, `PCTELESCOPE` can utilize an attached `DM`.
1157 
1158    The default type of the sub `KSP` (the `KSP` defined on c') is `KSPPREONLY`.
1159 
1160    There are three setup mechanisms for `PCTELESCOPE`. Features support by each type are described below.
1161    In the following, we will refer to the operators B and B', these are the Bmat provided to the `KSP` on the
1162    communicators c and c' respectively.
1163 
1164    [1] Default setup
1165    The sub-communicator c' is created via `PetscSubcommCreate()`.
1166    Explicitly defined nullspace and near nullspace vectors will be propagated from B to B'.
1167    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1168    No support is provided for `KSPSetComputeOperators()`.
1169    Currently there is no support for the flag -pc_use_amat.
1170 
1171    [2] `DM` aware setup
1172    If a `DM` is attached to the `PC`, it is re-partitioned on the sub-communicator c'.
1173    c' is created via `PetscSubcommCreate()`.
1174    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned `DM`.
1175    Currently only support for re-partitioning a `DMDA` is provided.
1176    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1177    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1178    Support is provided for `KSPSetComputeOperators()`. The user provided function and context is propagated to the sub `KSP`.
1179    This is fragile since the user must ensure that their user context is valid for use on c'.
1180    Currently there is no support for the flag -pc_use_amat.
1181 
1182    [3] Coarse `DM` setup
1183    If a `DM` (dmfine) is attached to the `PC`, dmfine is queried for a "coarse" `DM` (call this dmcoarse) via `DMGetCoarseDM()`.
1184    `PCTELESCOPE` will interpret the coarse `DM` as being defined on a sub-communicator of c.
1185    The communicator associated with dmcoarse will define the c' to be used within `PCTELESCOPE`.
1186    `PCTELESCOPE` will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1187    The intention of this setup type is that` PCTELESCOPE` will use an existing (e.g. user defined) communicator hierarchy, say as would be
1188    available with using multi-grid on unstructured meshes.
1189    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1190    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1191    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1192    There is no general method to permute field orderings, hence only `KSPSetComputeOperators()` is supported.
1193    The user must use `PetscObjectComposeFunction()` with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1194    Propagation of the user context for `KSPSetComputeOperators()` on the sub `KSP` is attempted by querying the `DM` contexts associated with dmfine and dmcoarse. Alternatively, the user may use `PetscObjectComposeFunction()` with dmcoarse to define a method which will return the appropriate user context for `KSPSetComputeOperators()`.
1195    Currently there is no support for the flag -pc_use_amat.
1196    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling `PCTelescopeSetUseCoarseDM`(pc,`PETSC_TRUE`);
1197    Further information about the user-provided methods required by this setup type are described here `PCTelescopeSetUseCoarseDM()`.
1198 
1199    Developer Notes:
1200    During `PCSetup()`, the B operator is scattered onto c'.
1201    Within `PCApply()`, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1202    Then, `KSPSolve()` is executed on the c' communicator.
1203 
1204    The communicator used within the telescoping preconditioner is defined by a `PetscSubcomm` using the INTERLACED
1205    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub `KSP` on only the ranks within the communicator which have a color equal to zero.
1206 
1207    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1208    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1209    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1210 
1211    The telescoping preconditioner can re-partition an attached DM if it is a `DMDA` (2D or 3D -
1212    support for 1D `DMDA`s is not provided). If a `DMDA` is found, a topologically equivalent `DMDA` is created on c'
1213    and this new `DM` is attached the sub `KSP`. The design of telescope is such that it should be possible to extend support
1214    for re-partitioning other to DM's (e.g. `DMPLEX`). The user can supply a flag to ignore attached DMs.
1215    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1216 
1217    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1218 
1219    When a `DMDA` is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1220    into the ordering defined by the `DMDA` on c', (ii) extracting the local chunks via `MatCreateSubMatrices()`, (iii) fusing the
1221    locally (sequential) matrices defined on the ranks common to c and c' into B' using `MatCreateMPIMatConcatenateSeqMat()`
1222 
1223    Limitations/improvements include the following.
1224    `VecPlaceArray()` could be used within `PCApply()` to improve efficiency and reduce memory usage.
1225    A unified mechanism to query for user contexts as required by `KSPSetComputeOperators()` and `MatNullSpaceSetFunction()`.
1226 
1227    The symmetric permutation used when a `DMDA` is encountered is performed via explicitly assembling a permutation matrix P,
1228    and performing P^T.A.P. Possibly it might be more efficient to use `MatPermute()`. We opted to use P^T.A.P as it appears
1229    `VecPermute()` does not support the use case required here. By computing P, one can permute both the operator and RHS in a
1230    consistent manner.
1231 
1232    Mapping of vectors (default setup mode) is performed in the following way.
1233    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1234    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1235    We perform the scatter to the sub-communicator in the following way.
1236    [1] Given a vector x defined on communicator c
1237 
1238 .vb
1239    rank(c)  local values of x
1240    ------- ----------------------------------------
1241         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1242         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1243         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1244         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1245 .ve
1246 
1247    scatter into xtmp defined also on comm c, so that we have the following values
1248 
1249 .vb
1250    rank(c)  local values of xtmp
1251    ------- ----------------------------------------------------------------------------
1252         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1253         1   [ ]
1254         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1255         3   [ ]
1256 .ve
1257 
1258    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1259 
1260    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1261    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1262 
1263 .vb
1264    rank(c')  local values of xred
1265    -------- ----------------------------------------------------------------------------
1266          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1267          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1268 .ve
1269 
1270   Contributed by Dave May
1271 
1272   Reference:
1273   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1274 
1275 .seealso: `PCTelescopeGetKSP()`, `PCTelescopeGetDM()`, `PCTelescopeGetReductionFactor()`, `PCTelescopeSetReductionFactor()`, `PCTelescopeGetIgnoreDM()`, `PCTelescopeSetIgnoreDM()`, `PCREDUNDANT`
1276 M*/
1277 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc) {
1278   struct _PC_Telescope *sred;
1279 
1280   PetscFunctionBegin;
1281   PetscCall(PetscNew(&sred));
1282   sred->psubcomm                   = NULL;
1283   sred->subcommtype                = PETSC_SUBCOMM_INTERLACED;
1284   sred->subcomm                    = MPI_COMM_NULL;
1285   sred->redfactor                  = 1;
1286   sred->ignore_dm                  = PETSC_FALSE;
1287   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1288   sred->use_coarse_dm              = PETSC_FALSE;
1289   pc->data                         = (void *)sred;
1290 
1291   pc->ops->apply           = PCApply_Telescope;
1292   pc->ops->applytranspose  = NULL;
1293   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1294   pc->ops->setup           = PCSetUp_Telescope;
1295   pc->ops->destroy         = PCDestroy_Telescope;
1296   pc->ops->reset           = PCReset_Telescope;
1297   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1298   pc->ops->view            = PCView_Telescope;
1299 
1300   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1301   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1302   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1303   sred->pctelescope_reset_type              = NULL;
1304 
1305   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", PCTelescopeGetKSP_Telescope));
1306   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", PCTelescopeGetSubcommType_Telescope));
1307   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", PCTelescopeSetSubcommType_Telescope));
1308   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", PCTelescopeGetReductionFactor_Telescope));
1309   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", PCTelescopeSetReductionFactor_Telescope));
1310   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", PCTelescopeGetIgnoreDM_Telescope));
1311   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", PCTelescopeSetIgnoreDM_Telescope));
1312   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", PCTelescopeGetIgnoreKSPComputeOperators_Telescope));
1313   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", PCTelescopeSetIgnoreKSPComputeOperators_Telescope));
1314   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", PCTelescopeGetDM_Telescope));
1315   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", PCTelescopeGetUseCoarseDM_Telescope));
1316   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", PCTelescopeSetUseCoarseDM_Telescope));
1317   PetscFunctionReturn(0);
1318 }
1319