xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision d2522c19e8fa9bca20aaca277941d9a63e71db6a)
1 #include <petsc/private/petscimpl.h>
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h>  /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7 
8 static PetscBool  cited      = PETSC_FALSE;
9 static const char citation[] = "@inproceedings{MaySananRuppKnepleySmith2016,\n"
10                                "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
11                                "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
12                                "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
13                                "  series    = {PASC '16},\n"
14                                "  isbn      = {978-1-4503-4126-4},\n"
15                                "  location  = {Lausanne, Switzerland},\n"
16                                "  pages     = {5:1--5:12},\n"
17                                "  articleno = {5},\n"
18                                "  numpages  = {12},\n"
19                                "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
20                                "  doi       = {10.1145/2929908.2929913},\n"
21                                "  acmid     = {2929913},\n"
22                                "  publisher = {ACM},\n"
23                                "  address   = {New York, NY, USA},\n"
24                                "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
25                                "  year      = {2016}\n"
26                                "}\n";
27 
28 /*
29  default setup mode
30 
31  [1a] scatter to (FORWARD)
32  x(comm) -> xtmp(comm)
33  [1b] local copy (to) ranks with color = 0
34  xred(subcomm) <- xtmp
35 
36  [2] solve on sub KSP to obtain yred(subcomm)
37 
38  [3a] local copy (from) ranks with color = 0
39  yred(subcomm) --> xtmp
40  [2b] scatter from (REVERSE)
41  xtmp(comm) -> y(comm)
42 */
43 
44 /*
45   Collective[comm_f]
46   Notes
47    * Using comm_f = MPI_COMM_NULL will result in an error
48    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
49    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
50 */
51 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f, MPI_Comm comm_c, PetscBool *isvalid) {
52   PetscInt     valid = 1;
53   MPI_Group    group_f, group_c;
54   PetscMPIInt  count, k, size_f = 0, size_c = 0, size_c_sum = 0;
55   PetscMPIInt *ranks_f, *ranks_c;
56 
57   PetscFunctionBegin;
58   PetscCheck(comm_f != MPI_COMM_NULL, PETSC_COMM_SELF, PETSC_ERR_SUP, "comm_f cannot be MPI_COMM_NULL");
59 
60   PetscCallMPI(MPI_Comm_group(comm_f, &group_f));
61   if (comm_c != MPI_COMM_NULL) { PetscCallMPI(MPI_Comm_group(comm_c, &group_c)); }
62 
63   PetscCallMPI(MPI_Comm_size(comm_f, &size_f));
64   if (comm_c != MPI_COMM_NULL) { PetscCallMPI(MPI_Comm_size(comm_c, &size_c)); }
65 
66   /* check not all comm_c's are NULL */
67   size_c_sum = size_c;
68   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &size_c_sum, 1, MPI_INT, MPI_SUM, comm_f));
69   if (size_c_sum == 0) valid = 0;
70 
71   /* check we can map at least 1 rank in comm_c to comm_f */
72   PetscCall(PetscMalloc1(size_f, &ranks_f));
73   PetscCall(PetscMalloc1(size_c, &ranks_c));
74   for (k = 0; k < size_f; k++) ranks_f[k] = MPI_UNDEFINED;
75   for (k = 0; k < size_c; k++) ranks_c[k] = k;
76 
77   /*
78    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
79    I do not want the code to terminate immediately if this occurs, rather I want to throw
80    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
81    that comm_c is not a valid sub-communicator.
82    Hence I purposefully do not call PetscCall() after MPI_Group_translate_ranks().
83   */
84   count = 0;
85   if (comm_c != MPI_COMM_NULL) {
86     (void)MPI_Group_translate_ranks(group_c, size_c, ranks_c, group_f, ranks_f);
87     for (k = 0; k < size_f; k++) {
88       if (ranks_f[k] == MPI_UNDEFINED) { count++; }
89     }
90   }
91   if (count == size_f) valid = 0;
92 
93   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &valid, 1, MPIU_INT, MPI_MIN, comm_f));
94   if (valid == 1) *isvalid = PETSC_TRUE;
95   else *isvalid = PETSC_FALSE;
96 
97   PetscCall(PetscFree(ranks_f));
98   PetscCall(PetscFree(ranks_c));
99   PetscCallMPI(MPI_Group_free(&group_f));
100   if (comm_c != MPI_COMM_NULL) { PetscCallMPI(MPI_Group_free(&group_c)); }
101   PetscFunctionReturn(0);
102 }
103 
104 DM private_PCTelescopeGetSubDM(PC_Telescope sred) {
105   DM subdm = NULL;
106 
107   if (!PCTelescope_isActiveRank(sred)) {
108     subdm = NULL;
109   } else {
110     switch (sred->sr_type) {
111     case TELESCOPE_DEFAULT: subdm = NULL; break;
112     case TELESCOPE_DMDA: subdm = ((PC_Telescope_DMDACtx *)sred->dm_ctx)->dmrepart; break;
113     case TELESCOPE_DMPLEX: subdm = NULL; break;
114     case TELESCOPE_COARSEDM:
115       if (sred->ksp) { KSPGetDM(sred->ksp, &subdm); }
116       break;
117     }
118   }
119   return (subdm);
120 }
121 
122 PetscErrorCode PCTelescopeSetUp_default(PC pc, PC_Telescope sred) {
123   PetscInt   m, M, bs, st, ed;
124   Vec        x, xred, yred, xtmp;
125   Mat        B;
126   MPI_Comm   comm, subcomm;
127   VecScatter scatter;
128   IS         isin;
129   VecType    vectype;
130 
131   PetscFunctionBegin;
132   PetscCall(PetscInfo(pc, "PCTelescope: setup (default)\n"));
133   comm    = PetscSubcommParent(sred->psubcomm);
134   subcomm = PetscSubcommChild(sred->psubcomm);
135 
136   PetscCall(PCGetOperators(pc, NULL, &B));
137   PetscCall(MatGetSize(B, &M, NULL));
138   PetscCall(MatGetBlockSize(B, &bs));
139   PetscCall(MatCreateVecs(B, &x, NULL));
140   PetscCall(MatGetVecType(B, &vectype));
141 
142   xred = NULL;
143   m    = 0;
144   if (PCTelescope_isActiveRank(sred)) {
145     PetscCall(VecCreate(subcomm, &xred));
146     PetscCall(VecSetSizes(xred, PETSC_DECIDE, M));
147     PetscCall(VecSetBlockSize(xred, bs));
148     PetscCall(VecSetType(xred, vectype)); /* Use the preconditioner matrix's vectype by default */
149     PetscCall(VecSetFromOptions(xred));
150     PetscCall(VecGetLocalSize(xred, &m));
151   }
152 
153   yred = NULL;
154   if (PCTelescope_isActiveRank(sred)) { PetscCall(VecDuplicate(xred, &yred)); }
155 
156   PetscCall(VecCreate(comm, &xtmp));
157   PetscCall(VecSetSizes(xtmp, m, PETSC_DECIDE));
158   PetscCall(VecSetBlockSize(xtmp, bs));
159   PetscCall(VecSetType(xtmp, vectype));
160 
161   if (PCTelescope_isActiveRank(sred)) {
162     PetscCall(VecGetOwnershipRange(xred, &st, &ed));
163     PetscCall(ISCreateStride(comm, (ed - st), st, 1, &isin));
164   } else {
165     PetscCall(VecGetOwnershipRange(x, &st, &ed));
166     PetscCall(ISCreateStride(comm, 0, st, 1, &isin));
167   }
168   PetscCall(ISSetBlockSize(isin, bs));
169 
170   PetscCall(VecScatterCreate(x, isin, xtmp, NULL, &scatter));
171 
172   sred->isin    = isin;
173   sred->scatter = scatter;
174   sred->xred    = xred;
175   sred->yred    = yred;
176   sred->xtmp    = xtmp;
177   PetscCall(VecDestroy(&x));
178   PetscFunctionReturn(0);
179 }
180 
181 PetscErrorCode PCTelescopeMatCreate_default(PC pc, PC_Telescope sred, MatReuse reuse, Mat *A) {
182   MPI_Comm comm, subcomm;
183   Mat      Bred, B;
184   PetscInt nr, nc, bs;
185   IS       isrow, iscol;
186   Mat      Blocal, *_Blocal;
187 
188   PetscFunctionBegin;
189   PetscCall(PetscInfo(pc, "PCTelescope: updating the redundant preconditioned operator (default)\n"));
190   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
191   subcomm = PetscSubcommChild(sred->psubcomm);
192   PetscCall(PCGetOperators(pc, NULL, &B));
193   PetscCall(MatGetSize(B, &nr, &nc));
194   isrow = sred->isin;
195   PetscCall(ISCreateStride(PETSC_COMM_SELF, nc, 0, 1, &iscol));
196   PetscCall(ISSetIdentity(iscol));
197   PetscCall(MatGetBlockSizes(B, NULL, &bs));
198   PetscCall(ISSetBlockSize(iscol, bs));
199   PetscCall(MatSetOption(B, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
200   PetscCall(MatCreateSubMatrices(B, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &_Blocal));
201   Blocal = *_Blocal;
202   PetscCall(PetscFree(_Blocal));
203   Bred = NULL;
204   if (PCTelescope_isActiveRank(sred)) {
205     PetscInt mm;
206 
207     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
208 
209     PetscCall(MatGetSize(Blocal, &mm, NULL));
210     PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, Blocal, mm, reuse, &Bred));
211   }
212   *A = Bred;
213   PetscCall(ISDestroy(&iscol));
214   PetscCall(MatDestroy(&Blocal));
215   PetscFunctionReturn(0);
216 }
217 
218 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc, PC_Telescope sred, MatNullSpace nullspace, MatNullSpace *sub_nullspace) {
219   PetscBool  has_const;
220   const Vec *vecs;
221   Vec       *sub_vecs = NULL;
222   PetscInt   i, k, n = 0;
223   MPI_Comm   subcomm;
224 
225   PetscFunctionBegin;
226   subcomm = PetscSubcommChild(sred->psubcomm);
227   PetscCall(MatNullSpaceGetVecs(nullspace, &has_const, &n, &vecs));
228 
229   if (PCTelescope_isActiveRank(sred)) {
230     if (n) { PetscCall(VecDuplicateVecs(sred->xred, n, &sub_vecs)); }
231   }
232 
233   /* copy entries */
234   for (k = 0; k < n; k++) {
235     const PetscScalar *x_array;
236     PetscScalar       *LA_sub_vec;
237     PetscInt           st, ed;
238 
239     /* pull in vector x->xtmp */
240     PetscCall(VecScatterBegin(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
241     PetscCall(VecScatterEnd(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
242     if (sub_vecs) {
243       /* copy vector entries into xred */
244       PetscCall(VecGetArrayRead(sred->xtmp, &x_array));
245       if (sub_vecs[k]) {
246         PetscCall(VecGetOwnershipRange(sub_vecs[k], &st, &ed));
247         PetscCall(VecGetArray(sub_vecs[k], &LA_sub_vec));
248         for (i = 0; i < ed - st; i++) { LA_sub_vec[i] = x_array[i]; }
249         PetscCall(VecRestoreArray(sub_vecs[k], &LA_sub_vec));
250       }
251       PetscCall(VecRestoreArrayRead(sred->xtmp, &x_array));
252     }
253   }
254 
255   if (PCTelescope_isActiveRank(sred)) {
256     /* create new (near) nullspace for redundant object */
257     PetscCall(MatNullSpaceCreate(subcomm, has_const, n, sub_vecs, sub_nullspace));
258     PetscCall(VecDestroyVecs(n, &sub_vecs));
259     PetscCheck(!nullspace->remove, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
260     PetscCheck(!nullspace->rmctx, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
261   }
262   PetscFunctionReturn(0);
263 }
264 
265 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc, PC_Telescope sred, Mat sub_mat) {
266   Mat B;
267 
268   PetscFunctionBegin;
269   PetscCall(PCGetOperators(pc, NULL, &B));
270   /* Propagate the nullspace if it exists */
271   {
272     MatNullSpace nullspace, sub_nullspace;
273     PetscCall(MatGetNullSpace(B, &nullspace));
274     if (nullspace) {
275       PetscCall(PetscInfo(pc, "PCTelescope: generating nullspace (default)\n"));
276       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nullspace, &sub_nullspace));
277       if (PCTelescope_isActiveRank(sred)) {
278         PetscCall(MatSetNullSpace(sub_mat, sub_nullspace));
279         PetscCall(MatNullSpaceDestroy(&sub_nullspace));
280       }
281     }
282   }
283   /* Propagate the near nullspace if it exists */
284   {
285     MatNullSpace nearnullspace, sub_nearnullspace;
286     PetscCall(MatGetNearNullSpace(B, &nearnullspace));
287     if (nearnullspace) {
288       PetscCall(PetscInfo(pc, "PCTelescope: generating near nullspace (default)\n"));
289       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nearnullspace, &sub_nearnullspace));
290       if (PCTelescope_isActiveRank(sred)) {
291         PetscCall(MatSetNearNullSpace(sub_mat, sub_nearnullspace));
292         PetscCall(MatNullSpaceDestroy(&sub_nearnullspace));
293       }
294     }
295   }
296   PetscFunctionReturn(0);
297 }
298 
299 static PetscErrorCode PCView_Telescope(PC pc, PetscViewer viewer) {
300   PC_Telescope sred = (PC_Telescope)pc->data;
301   PetscBool    iascii, isstring;
302   PetscViewer  subviewer;
303 
304   PetscFunctionBegin;
305   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
306   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
307   if (iascii) {
308     {
309       MPI_Comm    comm, subcomm;
310       PetscMPIInt comm_size, subcomm_size;
311       DM          dm = NULL, subdm = NULL;
312 
313       PetscCall(PCGetDM(pc, &dm));
314       subdm = private_PCTelescopeGetSubDM(sred);
315 
316       if (sred->psubcomm) {
317         comm    = PetscSubcommParent(sred->psubcomm);
318         subcomm = PetscSubcommChild(sred->psubcomm);
319         PetscCallMPI(MPI_Comm_size(comm, &comm_size));
320         PetscCallMPI(MPI_Comm_size(subcomm, &subcomm_size));
321 
322         PetscCall(PetscViewerASCIIPushTab(viewer));
323         PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent comm size reduction factor = %" PetscInt_FMT "\n", sred->redfactor));
324         PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent_size = %d , subcomm_size = %d\n", (int)comm_size, (int)subcomm_size));
325         switch (sred->subcommtype) {
326         case PETSC_SUBCOMM_INTERLACED: PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: type = %s\n", PetscSubcommTypes[sred->subcommtype])); break;
327         case PETSC_SUBCOMM_CONTIGUOUS: PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm type = %s\n", PetscSubcommTypes[sred->subcommtype])); break;
328         default: SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "General subcomm type not supported by PCTelescope");
329         }
330         PetscCall(PetscViewerASCIIPopTab(viewer));
331       } else {
332         PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
333         subcomm = sred->subcomm;
334         if (!PCTelescope_isActiveRank(sred)) { subcomm = PETSC_COMM_SELF; }
335 
336         PetscCall(PetscViewerASCIIPushTab(viewer));
337         PetscCall(PetscViewerASCIIPrintf(viewer, "subcomm: using user provided sub-communicator\n"));
338         PetscCall(PetscViewerASCIIPopTab(viewer));
339       }
340 
341       PetscCall(PetscViewerGetSubViewer(viewer, subcomm, &subviewer));
342       if (PCTelescope_isActiveRank(sred)) {
343         PetscCall(PetscViewerASCIIPushTab(subviewer));
344 
345         if (dm && sred->ignore_dm) { PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring DM\n")); }
346         if (sred->ignore_kspcomputeoperators) { PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring KSPComputeOperators\n")); }
347         switch (sred->sr_type) {
348         case TELESCOPE_DEFAULT: PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: default\n")); break;
349         case TELESCOPE_DMDA:
350           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMDA auto-repartitioning\n"));
351           PetscCall(DMView_DA_Short(subdm, subviewer));
352           break;
353         case TELESCOPE_DMPLEX: PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMPLEX auto-repartitioning\n")); break;
354         case TELESCOPE_COARSEDM: PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: coarse DM\n")); break;
355         }
356 
357         if (dm) {
358           PetscObject obj = (PetscObject)dm;
359           PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object:"));
360           PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE);
361           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name); }
362           if (obj->name) { PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name); }
363           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix); }
364           PetscCall(PetscViewerASCIIPrintf(subviewer, "\n"));
365           PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE);
366         } else {
367           PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object: NULL\n"));
368         }
369         if (subdm) {
370           PetscObject obj = (PetscObject)subdm;
371           PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object:"));
372           PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE);
373           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name); }
374           if (obj->name) { PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name); }
375           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix); }
376           PetscCall(PetscViewerASCIIPrintf(subviewer, "\n"));
377           PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE);
378         } else {
379           PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object: NULL\n"));
380         }
381 
382         PetscCall(KSPView(sred->ksp, subviewer));
383         PetscCall(PetscViewerASCIIPopTab(subviewer));
384       }
385       PetscCall(PetscViewerRestoreSubViewer(viewer, subcomm, &subviewer));
386     }
387   }
388   PetscFunctionReturn(0);
389 }
390 
391 static PetscErrorCode PCSetUp_Telescope(PC pc) {
392   PC_Telescope    sred = (PC_Telescope)pc->data;
393   MPI_Comm        comm, subcomm = 0;
394   PCTelescopeType sr_type;
395 
396   PetscFunctionBegin;
397   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
398 
399   /* Determine type of setup/update */
400   if (!pc->setupcalled) {
401     PetscBool has_dm, same;
402     DM        dm;
403 
404     sr_type = TELESCOPE_DEFAULT;
405     has_dm  = PETSC_FALSE;
406     PetscCall(PCGetDM(pc, &dm));
407     if (dm) { has_dm = PETSC_TRUE; }
408     if (has_dm) {
409       /* check for dmda */
410       PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMDA, &same));
411       if (same) {
412         PetscCall(PetscInfo(pc, "PCTelescope: found DMDA\n"));
413         sr_type = TELESCOPE_DMDA;
414       }
415       /* check for dmplex */
416       PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &same));
417       if (same) {
418         PetscCall(PetscInfo(pc, "PCTelescope: found DMPLEX\n"));
419         sr_type = TELESCOPE_DMPLEX;
420       }
421 
422       if (sred->use_coarse_dm) {
423         PetscCall(PetscInfo(pc, "PCTelescope: using coarse DM\n"));
424         sr_type = TELESCOPE_COARSEDM;
425       }
426 
427       if (sred->ignore_dm) {
428         PetscCall(PetscInfo(pc, "PCTelescope: ignoring DM\n"));
429         sr_type = TELESCOPE_DEFAULT;
430       }
431     }
432     sred->sr_type = sr_type;
433   } else {
434     sr_type = sred->sr_type;
435   }
436 
437   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
438   switch (sr_type) {
439   case TELESCOPE_DEFAULT:
440     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
441     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
442     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
443     sred->pctelescope_reset_type              = NULL;
444     break;
445   case TELESCOPE_DMDA:
446     pc->ops->apply                            = PCApply_Telescope_dmda;
447     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
448     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
449     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
450     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
451     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
452     break;
453   case TELESCOPE_DMPLEX: SETERRQ(comm, PETSC_ERR_SUP, "Support for DMPLEX is currently not available");
454   case TELESCOPE_COARSEDM:
455     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
456     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
457     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
458     sred->pctelescope_matcreate_type          = NULL;
459     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
460     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
461     break;
462   default: SETERRQ(comm, PETSC_ERR_SUP, "Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
463   }
464 
465   /* subcomm definition */
466   if (!pc->setupcalled) {
467     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
468       if (!sred->psubcomm) {
469         PetscCall(PetscSubcommCreate(comm, &sred->psubcomm));
470         PetscCall(PetscSubcommSetNumber(sred->psubcomm, sred->redfactor));
471         PetscCall(PetscSubcommSetType(sred->psubcomm, sred->subcommtype));
472         PetscCall(PetscLogObjectMemory((PetscObject)pc, sizeof(PetscSubcomm)));
473         sred->subcomm = PetscSubcommChild(sred->psubcomm);
474       }
475     } else { /* query PC for DM, check communicators */
476       DM          dm, dm_coarse_partition          = NULL;
477       MPI_Comm    comm_fine, comm_coarse_partition = MPI_COMM_NULL;
478       PetscMPIInt csize_fine = 0, csize_coarse_partition = 0, cs[2], csg[2], cnt = 0;
479       PetscBool   isvalidsubcomm;
480 
481       PetscCall(PCGetDM(pc, &dm));
482       comm_fine = PetscObjectComm((PetscObject)dm);
483       PetscCall(DMGetCoarseDM(dm, &dm_coarse_partition));
484       if (dm_coarse_partition) { cnt = 1; }
485       PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &cnt, 1, MPI_INT, MPI_SUM, comm_fine));
486       PetscCheck(cnt != 0, comm_fine, PETSC_ERR_SUP, "Zero instances of a coarse DM were found");
487 
488       PetscCallMPI(MPI_Comm_size(comm_fine, &csize_fine));
489       if (dm_coarse_partition) {
490         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
491         PetscCallMPI(MPI_Comm_size(comm_coarse_partition, &csize_coarse_partition));
492       }
493 
494       cs[0] = csize_fine;
495       cs[1] = csize_coarse_partition;
496       PetscCallMPI(MPI_Allreduce(cs, csg, 2, MPI_INT, MPI_MAX, comm_fine));
497       PetscCheck(csg[0] != csg[1], comm_fine, PETSC_ERR_SUP, "Coarse DM uses the same size communicator as the parent DM attached to the PC");
498 
499       PetscCall(PCTelescopeTestValidSubcomm(comm_fine, comm_coarse_partition, &isvalidsubcomm));
500       PetscCheck(isvalidsubcomm, comm_fine, PETSC_ERR_SUP, "Coarse DM communicator is not a sub-communicator of parentDM->comm");
501       sred->subcomm = comm_coarse_partition;
502     }
503   }
504   subcomm = sred->subcomm;
505 
506   /* internal KSP */
507   if (!pc->setupcalled) {
508     const char *prefix;
509 
510     if (PCTelescope_isActiveRank(sred)) {
511       PetscCall(KSPCreate(subcomm, &sred->ksp));
512       PetscCall(KSPSetErrorIfNotConverged(sred->ksp, pc->erroriffailure));
513       PetscCall(PetscObjectIncrementTabLevel((PetscObject)sred->ksp, (PetscObject)pc, 1));
514       PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)sred->ksp));
515       PetscCall(KSPSetType(sred->ksp, KSPPREONLY));
516       PetscCall(PCGetOptionsPrefix(pc, &prefix));
517       PetscCall(KSPSetOptionsPrefix(sred->ksp, prefix));
518       PetscCall(KSPAppendOptionsPrefix(sred->ksp, "telescope_"));
519     }
520   }
521 
522   /* setup */
523   if (!pc->setupcalled && sred->pctelescope_setup_type) { PetscCall(sred->pctelescope_setup_type(pc, sred)); }
524   /* update */
525   if (!pc->setupcalled) {
526     if (sred->pctelescope_matcreate_type) { PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_INITIAL_MATRIX, &sred->Bred)); }
527     if (sred->pctelescope_matnullspacecreate_type) PetscCall(sred->pctelescope_matnullspacecreate_type(pc, sred, sred->Bred));
528   } else {
529     if (sred->pctelescope_matcreate_type) { PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_REUSE_MATRIX, &sred->Bred)); }
530   }
531 
532   /* common - no construction */
533   if (PCTelescope_isActiveRank(sred)) {
534     PetscCall(KSPSetOperators(sred->ksp, sred->Bred, sred->Bred));
535     if (pc->setfromoptionscalled && !pc->setupcalled) { PetscCall(KSPSetFromOptions(sred->ksp)); }
536   }
537   PetscFunctionReturn(0);
538 }
539 
540 static PetscErrorCode PCApply_Telescope(PC pc, Vec x, Vec y) {
541   PC_Telescope       sred = (PC_Telescope)pc->data;
542   Vec                xtmp, xred, yred;
543   PetscInt           i, st, ed;
544   VecScatter         scatter;
545   PetscScalar       *array;
546   const PetscScalar *x_array;
547 
548   PetscFunctionBegin;
549   PetscCall(PetscCitationsRegister(citation, &cited));
550 
551   xtmp    = sred->xtmp;
552   scatter = sred->scatter;
553   xred    = sred->xred;
554   yred    = sred->yred;
555 
556   /* pull in vector x->xtmp */
557   PetscCall(VecScatterBegin(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD));
558   PetscCall(VecScatterEnd(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD));
559 
560   /* copy vector entries into xred */
561   PetscCall(VecGetArrayRead(xtmp, &x_array));
562   if (xred) {
563     PetscScalar *LA_xred;
564     PetscCall(VecGetOwnershipRange(xred, &st, &ed));
565     PetscCall(VecGetArray(xred, &LA_xred));
566     for (i = 0; i < ed - st; i++) { LA_xred[i] = x_array[i]; }
567     PetscCall(VecRestoreArray(xred, &LA_xred));
568   }
569   PetscCall(VecRestoreArrayRead(xtmp, &x_array));
570   /* solve */
571   if (PCTelescope_isActiveRank(sred)) {
572     PetscCall(KSPSolve(sred->ksp, xred, yred));
573     PetscCall(KSPCheckSolve(sred->ksp, pc, yred));
574   }
575   /* return vector */
576   PetscCall(VecGetArray(xtmp, &array));
577   if (yred) {
578     const PetscScalar *LA_yred;
579     PetscCall(VecGetOwnershipRange(yred, &st, &ed));
580     PetscCall(VecGetArrayRead(yred, &LA_yred));
581     for (i = 0; i < ed - st; i++) { array[i] = LA_yred[i]; }
582     PetscCall(VecRestoreArrayRead(yred, &LA_yred));
583   }
584   PetscCall(VecRestoreArray(xtmp, &array));
585   PetscCall(VecScatterBegin(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE));
586   PetscCall(VecScatterEnd(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE));
587   PetscFunctionReturn(0);
588 }
589 
590 static PetscErrorCode PCApplyRichardson_Telescope(PC pc, Vec x, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason) {
591   PC_Telescope       sred = (PC_Telescope)pc->data;
592   Vec                xtmp, yred;
593   PetscInt           i, st, ed;
594   VecScatter         scatter;
595   const PetscScalar *x_array;
596   PetscBool          default_init_guess_value;
597 
598   PetscFunctionBegin;
599   xtmp    = sred->xtmp;
600   scatter = sred->scatter;
601   yred    = sred->yred;
602 
603   PetscCheck(its <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCApplyRichardson_Telescope only supports max_it = 1");
604   *reason = (PCRichardsonConvergedReason)0;
605 
606   if (!zeroguess) {
607     PetscCall(PetscInfo(pc, "PCTelescope: Scattering y for non-zero initial guess\n"));
608     /* pull in vector y->xtmp */
609     PetscCall(VecScatterBegin(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD));
610     PetscCall(VecScatterEnd(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD));
611 
612     /* copy vector entries into xred */
613     PetscCall(VecGetArrayRead(xtmp, &x_array));
614     if (yred) {
615       PetscScalar *LA_yred;
616       PetscCall(VecGetOwnershipRange(yred, &st, &ed));
617       PetscCall(VecGetArray(yred, &LA_yred));
618       for (i = 0; i < ed - st; i++) { LA_yred[i] = x_array[i]; }
619       PetscCall(VecRestoreArray(yred, &LA_yred));
620     }
621     PetscCall(VecRestoreArrayRead(xtmp, &x_array));
622   }
623 
624   if (PCTelescope_isActiveRank(sred)) {
625     PetscCall(KSPGetInitialGuessNonzero(sred->ksp, &default_init_guess_value));
626     if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, PETSC_TRUE));
627   }
628 
629   PetscCall(PCApply_Telescope(pc, x, y));
630 
631   if (PCTelescope_isActiveRank(sred)) { PetscCall(KSPSetInitialGuessNonzero(sred->ksp, default_init_guess_value)); }
632 
633   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
634   *outits = 1;
635   PetscFunctionReturn(0);
636 }
637 
638 static PetscErrorCode PCReset_Telescope(PC pc) {
639   PC_Telescope sred = (PC_Telescope)pc->data;
640 
641   PetscFunctionBegin;
642   PetscCall(ISDestroy(&sred->isin));
643   PetscCall(VecScatterDestroy(&sred->scatter));
644   PetscCall(VecDestroy(&sred->xred));
645   PetscCall(VecDestroy(&sred->yred));
646   PetscCall(VecDestroy(&sred->xtmp));
647   PetscCall(MatDestroy(&sred->Bred));
648   PetscCall(KSPReset(sred->ksp));
649   if (sred->pctelescope_reset_type) PetscCall(sred->pctelescope_reset_type(pc));
650   PetscFunctionReturn(0);
651 }
652 
653 static PetscErrorCode PCDestroy_Telescope(PC pc) {
654   PC_Telescope sred = (PC_Telescope)pc->data;
655 
656   PetscFunctionBegin;
657   PetscCall(PCReset_Telescope(pc));
658   PetscCall(KSPDestroy(&sred->ksp));
659   PetscCall(PetscSubcommDestroy(&sred->psubcomm));
660   PetscCall(PetscFree(sred->dm_ctx));
661   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", NULL));
662   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", NULL));
663   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", NULL));
664   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", NULL));
665   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", NULL));
666   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", NULL));
667   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", NULL));
668   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", NULL));
669   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", NULL));
670   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", NULL));
671   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", NULL));
672   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", NULL));
673   PetscCall(PetscFree(pc->data));
674   PetscFunctionReturn(0);
675 }
676 
677 static PetscErrorCode PCSetFromOptions_Telescope(PC pc, PetscOptionItems *PetscOptionsObject) {
678   PC_Telescope     sred = (PC_Telescope)pc->data;
679   MPI_Comm         comm;
680   PetscMPIInt      size;
681   PetscBool        flg;
682   PetscSubcommType subcommtype;
683 
684   PetscFunctionBegin;
685   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
686   PetscCallMPI(MPI_Comm_size(comm, &size));
687   PetscOptionsHeadBegin(PetscOptionsObject, "Telescope options");
688   PetscCall(PetscOptionsEnum("-pc_telescope_subcomm_type", "Subcomm type (interlaced or contiguous)", "PCTelescopeSetSubcommType", PetscSubcommTypes, (PetscEnum)sred->subcommtype, (PetscEnum *)&subcommtype, &flg));
689   if (flg) PetscCall(PCTelescopeSetSubcommType(pc, subcommtype));
690   PetscCall(PetscOptionsInt("-pc_telescope_reduction_factor", "Factor to reduce comm size by", "PCTelescopeSetReductionFactor", sred->redfactor, &sred->redfactor, NULL));
691   PetscCheck(sred->redfactor <= size, comm, PETSC_ERR_ARG_WRONG, "-pc_telescope_reduction_factor <= comm size");
692   PetscCall(PetscOptionsBool("-pc_telescope_ignore_dm", "Ignore any DM attached to the PC", "PCTelescopeSetIgnoreDM", sred->ignore_dm, &sred->ignore_dm, NULL));
693   PetscCall(PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators", "Ignore method used to compute A", "PCTelescopeSetIgnoreKSPComputeOperators", sred->ignore_kspcomputeoperators, &sred->ignore_kspcomputeoperators, NULL));
694   PetscCall(PetscOptionsBool("-pc_telescope_use_coarse_dm", "Define sub-communicator from the coarse DM", "PCTelescopeSetUseCoarseDM", sred->use_coarse_dm, &sred->use_coarse_dm, NULL));
695   PetscOptionsHeadEnd();
696   PetscFunctionReturn(0);
697 }
698 
699 /* PC simplementation specific API's */
700 
701 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc, KSP *ksp) {
702   PC_Telescope red = (PC_Telescope)pc->data;
703   PetscFunctionBegin;
704   if (ksp) *ksp = red->ksp;
705   PetscFunctionReturn(0);
706 }
707 
708 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc, PetscSubcommType *subcommtype) {
709   PC_Telescope red = (PC_Telescope)pc->data;
710   PetscFunctionBegin;
711   if (subcommtype) *subcommtype = red->subcommtype;
712   PetscFunctionReturn(0);
713 }
714 
715 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc, PetscSubcommType subcommtype) {
716   PC_Telescope red = (PC_Telescope)pc->data;
717 
718   PetscFunctionBegin;
719   PetscCheck(!pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "You cannot change the subcommunicator type for PCTelescope after it has been set up.");
720   red->subcommtype = subcommtype;
721   PetscFunctionReturn(0);
722 }
723 
724 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc, PetscInt *fact) {
725   PC_Telescope red = (PC_Telescope)pc->data;
726   PetscFunctionBegin;
727   if (fact) *fact = red->redfactor;
728   PetscFunctionReturn(0);
729 }
730 
731 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc, PetscInt fact) {
732   PC_Telescope red = (PC_Telescope)pc->data;
733   PetscMPIInt  size;
734 
735   PetscFunctionBegin;
736   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
737   PetscCheck(fact > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be positive", fact);
738   PetscCheck(fact <= size, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be <= comm.size", fact);
739   red->redfactor = fact;
740   PetscFunctionReturn(0);
741 }
742 
743 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc, PetscBool *v) {
744   PC_Telescope red = (PC_Telescope)pc->data;
745   PetscFunctionBegin;
746   if (v) *v = red->ignore_dm;
747   PetscFunctionReturn(0);
748 }
749 
750 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc, PetscBool v) {
751   PC_Telescope red = (PC_Telescope)pc->data;
752   PetscFunctionBegin;
753   red->ignore_dm = v;
754   PetscFunctionReturn(0);
755 }
756 
757 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc, PetscBool *v) {
758   PC_Telescope red = (PC_Telescope)pc->data;
759   PetscFunctionBegin;
760   if (v) *v = red->use_coarse_dm;
761   PetscFunctionReturn(0);
762 }
763 
764 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc, PetscBool v) {
765   PC_Telescope red = (PC_Telescope)pc->data;
766   PetscFunctionBegin;
767   red->use_coarse_dm = v;
768   PetscFunctionReturn(0);
769 }
770 
771 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool *v) {
772   PC_Telescope red = (PC_Telescope)pc->data;
773   PetscFunctionBegin;
774   if (v) *v = red->ignore_kspcomputeoperators;
775   PetscFunctionReturn(0);
776 }
777 
778 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool v) {
779   PC_Telescope red = (PC_Telescope)pc->data;
780   PetscFunctionBegin;
781   red->ignore_kspcomputeoperators = v;
782   PetscFunctionReturn(0);
783 }
784 
785 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc, DM *dm) {
786   PC_Telescope red = (PC_Telescope)pc->data;
787   PetscFunctionBegin;
788   *dm = private_PCTelescopeGetSubDM(red);
789   PetscFunctionReturn(0);
790 }
791 
792 /*@
793  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
794 
795  Not Collective
796 
797  Input Parameter:
798 .  pc - the preconditioner context
799 
800  Output Parameter:
801 .  subksp - the KSP defined the smaller set of processes
802 
803  Level: advanced
804 
805 @*/
806 PetscErrorCode PCTelescopeGetKSP(PC pc, KSP *subksp) {
807   PetscFunctionBegin;
808   PetscUseMethod(pc, "PCTelescopeGetKSP_C", (PC, KSP *), (pc, subksp));
809   PetscFunctionReturn(0);
810 }
811 
812 /*@
813  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
814 
815  Not Collective
816 
817  Input Parameter:
818 .  pc - the preconditioner context
819 
820  Output Parameter:
821 .  fact - the reduction factor
822 
823  Level: advanced
824 
825 @*/
826 PetscErrorCode PCTelescopeGetReductionFactor(PC pc, PetscInt *fact) {
827   PetscFunctionBegin;
828   PetscUseMethod(pc, "PCTelescopeGetReductionFactor_C", (PC, PetscInt *), (pc, fact));
829   PetscFunctionReturn(0);
830 }
831 
832 /*@
833  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
834 
835  Not Collective
836 
837  Input Parameter:
838 .  pc - the preconditioner context
839 
840  Output Parameter:
841 .  fact - the reduction factor
842 
843  Level: advanced
844 
845 @*/
846 PetscErrorCode PCTelescopeSetReductionFactor(PC pc, PetscInt fact) {
847   PetscFunctionBegin;
848   PetscTryMethod(pc, "PCTelescopeSetReductionFactor_C", (PC, PetscInt), (pc, fact));
849   PetscFunctionReturn(0);
850 }
851 
852 /*@
853  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
854 
855  Not Collective
856 
857  Input Parameter:
858 .  pc - the preconditioner context
859 
860  Output Parameter:
861 .  v - the flag
862 
863  Level: advanced
864 
865 @*/
866 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc, PetscBool *v) {
867   PetscFunctionBegin;
868   PetscUseMethod(pc, "PCTelescopeGetIgnoreDM_C", (PC, PetscBool *), (pc, v));
869   PetscFunctionReturn(0);
870 }
871 
872 /*@
873  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
874 
875  Not Collective
876 
877  Input Parameter:
878 .  pc - the preconditioner context
879 
880  Output Parameter:
881 .  v - Use PETSC_TRUE to ignore any DM
882 
883  Level: advanced
884 
885 @*/
886 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc, PetscBool v) {
887   PetscFunctionBegin;
888   PetscTryMethod(pc, "PCTelescopeSetIgnoreDM_C", (PC, PetscBool), (pc, v));
889   PetscFunctionReturn(0);
890 }
891 
892 /*@
893  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used.
894 
895  Not Collective
896 
897  Input Parameter:
898 .  pc - the preconditioner context
899 
900  Output Parameter:
901 .  v - the flag
902 
903  Level: advanced
904 
905 @*/
906 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc, PetscBool *v) {
907   PetscFunctionBegin;
908   PetscUseMethod(pc, "PCTelescopeGetUseCoarseDM_C", (PC, PetscBool *), (pc, v));
909   PetscFunctionReturn(0);
910 }
911 
912 /*@
913  PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM
914 
915  Not Collective
916 
917  Input Parameter:
918 .  pc - the preconditioner context
919 
920  Output Parameter:
921 .  v - Use PETSC_FALSE to ignore any coarse DM
922 
923  Notes:
924  When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope
925  will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and
926  -pc_telescope_subcomm_type will no longer have any meaning.
927  It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes.
928  An error will occur of the size of the communicator associated with the coarse DM
929  is the same as that of the parent DM.
930  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
931  This will be checked at the time the preconditioner is setup and an error will occur if
932  the coarse DM does not define a sub-communicator of that used by the parent DM.
933 
934  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
935  the DM used (e.g. it supports DMSHELL, DMPLEX, etc).
936 
937  Support is currently only provided for the case when you are using KSPSetComputeOperators()
938 
939  The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs.
940  In the user code, this is achieved via
941 .vb
942    {
943      DM dm_fine;
944      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
945    }
946 .ve
947  The signature of the user provided field scatter method is
948 .vb
949    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
950 .ve
951  The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE.
952  SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM.
953 
954  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
955  of state variables between the fine and coarse DMs.
956  In the context of a finite element discretization, an example state variable might be
957  values associated with quadrature points within each element.
958  A user provided state scatter method is composed via
959 .vb
960    {
961      DM dm_fine;
962      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
963    }
964 .ve
965  The signature of the user provided state scatter method is
966 .vb
967    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
968 .ve
969  SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM.
970  The user is only required to support mode = SCATTER_FORWARD.
971  No assumption is made about the data type of the state variables.
972  These must be managed by the user and must be accessible from the DM.
973 
974  Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be
975  associated with the sub-KSP residing within PCTelescope.
976  In general, PCTelescope assumes that the context on the fine and coarse DM used with
977  KSPSetComputeOperators() should be "similar" in type or origin.
978  Specifically the following rules are used to infer what context on the sub-KSP should be.
979 
980  First the contexts from the KSP and the fine and coarse DMs are retrieved.
981  Note that the special case of a DMSHELL context is queried.
982 
983 .vb
984    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
985    DMGetApplicationContext(dm_fine,&dmfine_appctx);
986    DMShellGetContext(dm_fine,&dmfine_shellctx);
987 
988    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
989    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
990 .ve
991 
992  The following rules are then enforced:
993 
994  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
995  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL);
996 
997  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
998  check that dmcoarse_appctx is also non-NULL. If this is true, then:
999  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1000 
1001  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1002  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1003  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1004 
1005  If neither of the above three tests passed, then PCTelescope cannot safely determine what
1006  context should be provided to KSPSetComputeOperators() for use with the sub-KSP.
1007  In this case, an additional mechanism is provided via a composed function which will return
1008  the actual context to be used. To use this feature you must compose the "getter" function
1009  with the coarse DM, e.g.
1010 .vb
1011    {
1012      DM dm_coarse;
1013      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1014    }
1015 .ve
1016  The signature of the user provided method is
1017 .vb
1018    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1019 .ve
1020 
1021  Level: advanced
1022 
1023 @*/
1024 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc, PetscBool v) {
1025   PetscFunctionBegin;
1026   PetscTryMethod(pc, "PCTelescopeSetUseCoarseDM_C", (PC, PetscBool), (pc, v));
1027   PetscFunctionReturn(0);
1028 }
1029 
1030 /*@
1031  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
1032 
1033  Not Collective
1034 
1035  Input Parameter:
1036 .  pc - the preconditioner context
1037 
1038  Output Parameter:
1039 .  v - the flag
1040 
1041  Level: advanced
1042 
1043 @*/
1044 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc, PetscBool *v) {
1045   PetscFunctionBegin;
1046   PetscUseMethod(pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", (PC, PetscBool *), (pc, v));
1047   PetscFunctionReturn(0);
1048 }
1049 
1050 /*@
1051  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
1052 
1053  Not Collective
1054 
1055  Input Parameter:
1056 .  pc - the preconditioner context
1057 
1058  Output Parameter:
1059 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
1060 
1061  Level: advanced
1062 
1063 @*/
1064 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc, PetscBool v) {
1065   PetscFunctionBegin;
1066   PetscTryMethod(pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", (PC, PetscBool), (pc, v));
1067   PetscFunctionReturn(0);
1068 }
1069 
1070 /*@
1071  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
1072 
1073  Not Collective
1074 
1075  Input Parameter:
1076 .  pc - the preconditioner context
1077 
1078  Output Parameter:
1079 .  subdm - The re-partitioned DM
1080 
1081  Level: advanced
1082 
1083 @*/
1084 PetscErrorCode PCTelescopeGetDM(PC pc, DM *subdm) {
1085   PetscFunctionBegin;
1086   PetscUseMethod(pc, "PCTelescopeGetDM_C", (PC, DM *), (pc, subdm));
1087   PetscFunctionReturn(0);
1088 }
1089 
1090 /*@
1091  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1092 
1093  Logically Collective
1094 
1095  Input Parameters:
1096 +  pc - the preconditioner context
1097 -  subcommtype - the subcommunicator type (see PetscSubcommType)
1098 
1099  Level: advanced
1100 
1101 .seealso: `PetscSubcommType`, `PetscSubcomm`, `PCTELESCOPE`
1102 @*/
1103 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype) {
1104   PetscFunctionBegin;
1105   PetscTryMethod(pc, "PCTelescopeSetSubcommType_C", (PC, PetscSubcommType), (pc, subcommtype));
1106   PetscFunctionReturn(0);
1107 }
1108 
1109 /*@
1110  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1111 
1112  Not Collective
1113 
1114  Input Parameter:
1115 .  pc - the preconditioner context
1116 
1117  Output Parameter:
1118 .  subcommtype - the subcommunicator type (see PetscSubcommType)
1119 
1120  Level: advanced
1121 
1122 .seealso: `PetscSubcomm`, `PetscSubcommType`, `PCTELESCOPE`
1123 @*/
1124 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype) {
1125   PetscFunctionBegin;
1126   PetscUseMethod(pc, "PCTelescopeGetSubcommType_C", (PC, PetscSubcommType *), (pc, subcommtype));
1127   PetscFunctionReturn(0);
1128 }
1129 
1130 /* -------------------------------------------------------------------------------------*/
1131 /*MC
1132    PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1133 
1134    Options Database:
1135 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1136 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1137 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1138 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP.
1139 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator.
1140 
1141    Level: advanced
1142 
1143    Notes:
1144    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1145    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC).
1146    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1147    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1148    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1149    Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM.
1150 
1151    The default type of the sub KSP (the KSP defined on c') is PREONLY.
1152 
1153    There are three setup mechanisms for PCTelescope. Features support by each type are described below.
1154    In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the
1155    communicators c and c' respectively.
1156 
1157    [1] Default setup
1158    The sub-communicator c' is created via PetscSubcommCreate().
1159    Explicitly defined nullspace and near nullspace vectors will be propagated from B to B'.
1160    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1161    No support is provided for KSPSetComputeOperators().
1162    Currently there is no support for the flag -pc_use_amat.
1163 
1164    [2] DM aware setup
1165    If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'.
1166    c' is created via PetscSubcommCreate().
1167    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1168    Currently only support for re-partitioning a DMDA is provided.
1169    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1170    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1171    Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP.
1172    This is fragile since the user must ensure that their user context is valid for use on c'.
1173    Currently there is no support for the flag -pc_use_amat.
1174 
1175    [3] Coarse DM setup
1176    If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM().
1177    PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c.
1178    The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE.
1179    PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1180    The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be
1181    available with using multi-grid on unstructured meshes.
1182    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1183    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1184    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1185    There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported.
1186    The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1187    Propagation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators().
1188    Currently there is no support for the flag -pc_use_amat.
1189    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
1190    Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM().
1191 
1192    Developer Notes:
1193    During PCSetup, the B operator is scattered onto c'.
1194    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1195    Then, KSPSolve() is executed on the c' communicator.
1196 
1197    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
1198    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
1199 
1200    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1201    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1202    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1203 
1204    The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D -
1205    support for 1D DMDAs is not provided). If a DMDA is found, a topologically equivalent DMDA is created on c'
1206    and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support
1207    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1208    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1209 
1210    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1211 
1212    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1213    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
1214    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
1215 
1216    Limitations/improvements include the following.
1217    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1218    A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction().
1219 
1220    The symmetric permutation used when a DMDA is encountered is performed via explicitly assembling a permutation matrix P,
1221    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1222    VecPermute() does not support the use case required here. By computing P, one can permute both the operator and RHS in a
1223    consistent manner.
1224 
1225    Mapping of vectors (default setup mode) is performed in the following way.
1226    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1227    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1228    We perform the scatter to the sub-communicator in the following way.
1229    [1] Given a vector x defined on communicator c
1230 
1231 .vb
1232    rank(c)  local values of x
1233    ------- ----------------------------------------
1234         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1235         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1236         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1237         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1238 .ve
1239 
1240    scatter into xtmp defined also on comm c, so that we have the following values
1241 
1242 .vb
1243    rank(c)  local values of xtmp
1244    ------- ----------------------------------------------------------------------------
1245         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1246         1   [ ]
1247         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1248         3   [ ]
1249 .ve
1250 
1251    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1252 
1253    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1254    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1255 
1256 .vb
1257    rank(c')  local values of xred
1258    -------- ----------------------------------------------------------------------------
1259          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1260          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1261 .ve
1262 
1263   Contributed by Dave May
1264 
1265   Reference:
1266   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1267 
1268 .seealso: `PCTelescopeGetKSP()`, `PCTelescopeGetDM()`, `PCTelescopeGetReductionFactor()`, `PCTelescopeSetReductionFactor()`, `PCTelescopeGetIgnoreDM()`, `PCTelescopeSetIgnoreDM()`, `PCREDUNDANT`
1269 M*/
1270 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc) {
1271   struct _PC_Telescope *sred;
1272 
1273   PetscFunctionBegin;
1274   PetscCall(PetscNewLog(pc, &sred));
1275   sred->psubcomm                   = NULL;
1276   sred->subcommtype                = PETSC_SUBCOMM_INTERLACED;
1277   sred->subcomm                    = MPI_COMM_NULL;
1278   sred->redfactor                  = 1;
1279   sred->ignore_dm                  = PETSC_FALSE;
1280   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1281   sred->use_coarse_dm              = PETSC_FALSE;
1282   pc->data                         = (void *)sred;
1283 
1284   pc->ops->apply           = PCApply_Telescope;
1285   pc->ops->applytranspose  = NULL;
1286   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1287   pc->ops->setup           = PCSetUp_Telescope;
1288   pc->ops->destroy         = PCDestroy_Telescope;
1289   pc->ops->reset           = PCReset_Telescope;
1290   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1291   pc->ops->view            = PCView_Telescope;
1292 
1293   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1294   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1295   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1296   sred->pctelescope_reset_type              = NULL;
1297 
1298   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", PCTelescopeGetKSP_Telescope));
1299   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", PCTelescopeGetSubcommType_Telescope));
1300   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", PCTelescopeSetSubcommType_Telescope));
1301   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", PCTelescopeGetReductionFactor_Telescope));
1302   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", PCTelescopeSetReductionFactor_Telescope));
1303   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", PCTelescopeGetIgnoreDM_Telescope));
1304   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", PCTelescopeSetIgnoreDM_Telescope));
1305   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", PCTelescopeGetIgnoreKSPComputeOperators_Telescope));
1306   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", PCTelescopeSetIgnoreKSPComputeOperators_Telescope));
1307   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", PCTelescopeGetDM_Telescope));
1308   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", PCTelescopeGetUseCoarseDM_Telescope));
1309   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", PCTelescopeSetUseCoarseDM_Telescope));
1310   PetscFunctionReturn(0);
1311 }
1312