xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision 40cbb1a031ea8f2be4fe2b92dc842b003ad37be3)
1 #include <petsc/private/petscimpl.h>
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h> /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7 
8 static PetscBool  cited = PETSC_FALSE;
9 static const char citation[] =
10 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
11 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
12 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
13 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
14 "  series    = {PASC '16},\n"
15 "  isbn      = {978-1-4503-4126-4},\n"
16 "  location  = {Lausanne, Switzerland},\n"
17 "  pages     = {5:1--5:12},\n"
18 "  articleno = {5},\n"
19 "  numpages  = {12},\n"
20 "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
21 "  doi       = {10.1145/2929908.2929913},\n"
22 "  acmid     = {2929913},\n"
23 "  publisher = {ACM},\n"
24 "  address   = {New York, NY, USA},\n"
25 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
26 "  year      = {2016}\n"
27 "}\n";
28 
29 /*
30  default setup mode
31 
32  [1a] scatter to (FORWARD)
33  x(comm) -> xtmp(comm)
34  [1b] local copy (to) ranks with color = 0
35  xred(subcomm) <- xtmp
36 
37  [2] solve on sub KSP to obtain yred(subcomm)
38 
39  [3a] local copy (from) ranks with color = 0
40  yred(subcomm) --> xtmp
41  [2b] scatter from (REVERSE)
42  xtmp(comm) -> y(comm)
43 */
44 
45 /*
46   Collective[comm_f]
47   Notes
48    * Using comm_f = MPI_COMM_NULL will result in an error
49    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
50    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
51 */
52 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool *isvalid)
53 {
54   PetscInt       valid = 1;
55   MPI_Group      group_f,group_c;
56   PetscMPIInt    count,k,size_f = 0,size_c = 0,size_c_sum = 0;
57   PetscMPIInt    *ranks_f,*ranks_c;
58 
59   PetscFunctionBegin;
60   PetscCheck(comm_f != MPI_COMM_NULL,PETSC_COMM_SELF,PETSC_ERR_SUP,"comm_f cannot be MPI_COMM_NULL");
61 
62   PetscCallMPI(MPI_Comm_group(comm_f,&group_f));
63   if (comm_c != MPI_COMM_NULL) {
64     PetscCallMPI(MPI_Comm_group(comm_c,&group_c));
65   }
66 
67   PetscCallMPI(MPI_Comm_size(comm_f,&size_f));
68   if (comm_c != MPI_COMM_NULL) {
69     PetscCallMPI(MPI_Comm_size(comm_c,&size_c));
70   }
71 
72   /* check not all comm_c's are NULL */
73   size_c_sum = size_c;
74   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE,&size_c_sum,1,MPI_INT,MPI_SUM,comm_f));
75   if (size_c_sum == 0) valid = 0;
76 
77   /* check we can map at least 1 rank in comm_c to comm_f */
78   PetscCall(PetscMalloc1(size_f,&ranks_f));
79   PetscCall(PetscMalloc1(size_c,&ranks_c));
80   for (k=0; k<size_f; k++) ranks_f[k] = MPI_UNDEFINED;
81   for (k=0; k<size_c; k++) ranks_c[k] = k;
82 
83   /*
84    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
85    I do not want the code to terminate immediately if this occurs, rather I want to throw
86    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
87    that comm_c is not a valid sub-communicator.
88    Hence I purposefully do not call PetscCall() after MPI_Group_translate_ranks().
89   */
90   count = 0;
91   if (comm_c != MPI_COMM_NULL) {
92     (void)MPI_Group_translate_ranks(group_c,size_c,ranks_c,group_f,ranks_f);
93     for (k=0; k<size_f; k++) {
94       if (ranks_f[k] == MPI_UNDEFINED) {
95         count++;
96       }
97     }
98   }
99   if (count == size_f) valid = 0;
100 
101   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE,&valid,1,MPIU_INT,MPI_MIN,comm_f));
102   if (valid == 1) *isvalid = PETSC_TRUE;
103   else *isvalid = PETSC_FALSE;
104 
105   PetscCall(PetscFree(ranks_f));
106   PetscCall(PetscFree(ranks_c));
107   PetscCallMPI(MPI_Group_free(&group_f));
108   if (comm_c != MPI_COMM_NULL) {
109     PetscCallMPI(MPI_Group_free(&group_c));
110   }
111   PetscFunctionReturn(0);
112 }
113 
114 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
115 {
116   DM subdm = NULL;
117 
118   if (!PCTelescope_isActiveRank(sred)) { subdm = NULL; }
119   else {
120     switch (sred->sr_type) {
121     case TELESCOPE_DEFAULT: subdm = NULL;
122       break;
123     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
124       break;
125     case TELESCOPE_DMPLEX:  subdm = NULL;
126       break;
127     case TELESCOPE_COARSEDM: if (sred->ksp) { KSPGetDM(sred->ksp,&subdm); }
128       break;
129     }
130   }
131   return(subdm);
132 }
133 
134 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
135 {
136   PetscInt       m,M,bs,st,ed;
137   Vec            x,xred,yred,xtmp;
138   Mat            B;
139   MPI_Comm       comm,subcomm;
140   VecScatter     scatter;
141   IS             isin;
142   VecType        vectype;
143 
144   PetscFunctionBegin;
145   PetscCall(PetscInfo(pc,"PCTelescope: setup (default)\n"));
146   comm = PetscSubcommParent(sred->psubcomm);
147   subcomm = PetscSubcommChild(sred->psubcomm);
148 
149   PetscCall(PCGetOperators(pc,NULL,&B));
150   PetscCall(MatGetSize(B,&M,NULL));
151   PetscCall(MatGetBlockSize(B,&bs));
152   PetscCall(MatCreateVecs(B,&x,NULL));
153   PetscCall(MatGetVecType(B,&vectype));
154 
155   xred = NULL;
156   m    = 0;
157   if (PCTelescope_isActiveRank(sred)) {
158     PetscCall(VecCreate(subcomm,&xred));
159     PetscCall(VecSetSizes(xred,PETSC_DECIDE,M));
160     PetscCall(VecSetBlockSize(xred,bs));
161     PetscCall(VecSetType(xred,vectype)); /* Use the preconditioner matrix's vectype by default */
162     PetscCall(VecSetFromOptions(xred));
163     PetscCall(VecGetLocalSize(xred,&m));
164   }
165 
166   yred = NULL;
167   if (PCTelescope_isActiveRank(sred)) {
168     PetscCall(VecDuplicate(xred,&yred));
169   }
170 
171   PetscCall(VecCreate(comm,&xtmp));
172   PetscCall(VecSetSizes(xtmp,m,PETSC_DECIDE));
173   PetscCall(VecSetBlockSize(xtmp,bs));
174   PetscCall(VecSetType(xtmp,vectype));
175 
176   if (PCTelescope_isActiveRank(sred)) {
177     PetscCall(VecGetOwnershipRange(xred,&st,&ed));
178     PetscCall(ISCreateStride(comm,(ed-st),st,1,&isin));
179   } else {
180     PetscCall(VecGetOwnershipRange(x,&st,&ed));
181     PetscCall(ISCreateStride(comm,0,st,1,&isin));
182   }
183   PetscCall(ISSetBlockSize(isin,bs));
184 
185   PetscCall(VecScatterCreate(x,isin,xtmp,NULL,&scatter));
186 
187   sred->isin    = isin;
188   sred->scatter = scatter;
189   sred->xred    = xred;
190   sred->yred    = yred;
191   sred->xtmp    = xtmp;
192   PetscCall(VecDestroy(&x));
193   PetscFunctionReturn(0);
194 }
195 
196 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
197 {
198   MPI_Comm       comm,subcomm;
199   Mat            Bred,B;
200   PetscInt       nr,nc,bs;
201   IS             isrow,iscol;
202   Mat            Blocal,*_Blocal;
203 
204   PetscFunctionBegin;
205   PetscCall(PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n"));
206   PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
207   subcomm = PetscSubcommChild(sred->psubcomm);
208   PetscCall(PCGetOperators(pc,NULL,&B));
209   PetscCall(MatGetSize(B,&nr,&nc));
210   isrow = sred->isin;
211   PetscCall(ISCreateStride(PETSC_COMM_SELF,nc,0,1,&iscol));
212   PetscCall(ISSetIdentity(iscol));
213   PetscCall(MatGetBlockSizes(B,NULL,&bs));
214   PetscCall(ISSetBlockSize(iscol,bs));
215   PetscCall(MatSetOption(B,MAT_SUBMAT_SINGLEIS,PETSC_TRUE));
216   PetscCall(MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal));
217   Blocal = *_Blocal;
218   PetscCall(PetscFree(_Blocal));
219   Bred = NULL;
220   if (PCTelescope_isActiveRank(sred)) {
221     PetscInt mm;
222 
223     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
224 
225     PetscCall(MatGetSize(Blocal,&mm,NULL));
226     PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred));
227   }
228   *A = Bred;
229   PetscCall(ISDestroy(&iscol));
230   PetscCall(MatDestroy(&Blocal));
231   PetscFunctionReturn(0);
232 }
233 
234 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
235 {
236   PetscBool      has_const;
237   const Vec      *vecs;
238   Vec            *sub_vecs = NULL;
239   PetscInt       i,k,n = 0;
240   MPI_Comm       subcomm;
241 
242   PetscFunctionBegin;
243   subcomm = PetscSubcommChild(sred->psubcomm);
244   PetscCall(MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs));
245 
246   if (PCTelescope_isActiveRank(sred)) {
247     if (n) {
248       PetscCall(VecDuplicateVecs(sred->xred,n,&sub_vecs));
249     }
250   }
251 
252   /* copy entries */
253   for (k=0; k<n; k++) {
254     const PetscScalar *x_array;
255     PetscScalar       *LA_sub_vec;
256     PetscInt          st,ed;
257 
258     /* pull in vector x->xtmp */
259     PetscCall(VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD));
260     PetscCall(VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD));
261     if (sub_vecs) {
262       /* copy vector entries into xred */
263       PetscCall(VecGetArrayRead(sred->xtmp,&x_array));
264       if (sub_vecs[k]) {
265         PetscCall(VecGetOwnershipRange(sub_vecs[k],&st,&ed));
266         PetscCall(VecGetArray(sub_vecs[k],&LA_sub_vec));
267         for (i=0; i<ed-st; i++) {
268           LA_sub_vec[i] = x_array[i];
269         }
270         PetscCall(VecRestoreArray(sub_vecs[k],&LA_sub_vec));
271       }
272       PetscCall(VecRestoreArrayRead(sred->xtmp,&x_array));
273     }
274   }
275 
276   if (PCTelescope_isActiveRank(sred)) {
277     /* create new (near) nullspace for redundant object */
278     PetscCall(MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace));
279     PetscCall(VecDestroyVecs(n,&sub_vecs));
280     PetscCheck(!nullspace->remove,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
281     PetscCheck(!nullspace->rmctx,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
282   }
283   PetscFunctionReturn(0);
284 }
285 
286 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
287 {
288   Mat            B;
289 
290   PetscFunctionBegin;
291   PetscCall(PCGetOperators(pc,NULL,&B));
292   /* Propagate the nullspace if it exists */
293   {
294     MatNullSpace nullspace,sub_nullspace;
295     PetscCall(MatGetNullSpace(B,&nullspace));
296     if (nullspace) {
297       PetscCall(PetscInfo(pc,"PCTelescope: generating nullspace (default)\n"));
298       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace));
299       if (PCTelescope_isActiveRank(sred)) {
300         PetscCall(MatSetNullSpace(sub_mat,sub_nullspace));
301         PetscCall(MatNullSpaceDestroy(&sub_nullspace));
302       }
303     }
304   }
305   /* Propagate the near nullspace if it exists */
306   {
307     MatNullSpace nearnullspace,sub_nearnullspace;
308     PetscCall(MatGetNearNullSpace(B,&nearnullspace));
309     if (nearnullspace) {
310       PetscCall(PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n"));
311       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace));
312       if (PCTelescope_isActiveRank(sred)) {
313         PetscCall(MatSetNearNullSpace(sub_mat,sub_nearnullspace));
314         PetscCall(MatNullSpaceDestroy(&sub_nearnullspace));
315       }
316     }
317   }
318   PetscFunctionReturn(0);
319 }
320 
321 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
322 {
323   PC_Telescope   sred = (PC_Telescope)pc->data;
324   PetscBool      iascii,isstring;
325   PetscViewer    subviewer;
326 
327   PetscFunctionBegin;
328   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
329   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring));
330   if (iascii) {
331     {
332       MPI_Comm    comm,subcomm;
333       PetscMPIInt comm_size,subcomm_size;
334       DM          dm = NULL,subdm = NULL;
335 
336       PetscCall(PCGetDM(pc,&dm));
337       subdm = private_PCTelescopeGetSubDM(sred);
338 
339       if (sred->psubcomm) {
340         comm = PetscSubcommParent(sred->psubcomm);
341         subcomm = PetscSubcommChild(sred->psubcomm);
342         PetscCallMPI(MPI_Comm_size(comm,&comm_size));
343         PetscCallMPI(MPI_Comm_size(subcomm,&subcomm_size));
344 
345         PetscCall(PetscViewerASCIIPushTab(viewer));
346         PetscCall(PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent comm size reduction factor = %" PetscInt_FMT "\n",sred->redfactor));
347         PetscCall(PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size));
348         switch (sred->subcommtype) {
349         case PETSC_SUBCOMM_INTERLACED :
350           PetscCall(PetscViewerASCIIPrintf(viewer,"petsc subcomm: type = %s\n",PetscSubcommTypes[sred->subcommtype]));
351           break;
352         case PETSC_SUBCOMM_CONTIGUOUS :
353           PetscCall(PetscViewerASCIIPrintf(viewer,"petsc subcomm type = %s\n",PetscSubcommTypes[sred->subcommtype]));
354           break;
355         default :
356           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
357         }
358         PetscCall(PetscViewerASCIIPopTab(viewer));
359       } else {
360         PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
361         subcomm = sred->subcomm;
362         if (!PCTelescope_isActiveRank(sred)) {
363           subcomm = PETSC_COMM_SELF;
364         }
365 
366         PetscCall(PetscViewerASCIIPushTab(viewer));
367         PetscCall(PetscViewerASCIIPrintf(viewer,"subcomm: using user provided sub-communicator\n"));
368         PetscCall(PetscViewerASCIIPopTab(viewer));
369       }
370 
371       PetscCall(PetscViewerGetSubViewer(viewer,subcomm,&subviewer));
372       if (PCTelescope_isActiveRank(sred)) {
373         PetscCall(PetscViewerASCIIPushTab(subviewer));
374 
375         if (dm && sred->ignore_dm) {
376           PetscCall(PetscViewerASCIIPrintf(subviewer,"ignoring DM\n"));
377         }
378         if (sred->ignore_kspcomputeoperators) {
379           PetscCall(PetscViewerASCIIPrintf(subviewer,"ignoring KSPComputeOperators\n"));
380         }
381         switch (sred->sr_type) {
382         case TELESCOPE_DEFAULT:
383           PetscCall(PetscViewerASCIIPrintf(subviewer,"setup type: default\n"));
384           break;
385         case TELESCOPE_DMDA:
386           PetscCall(PetscViewerASCIIPrintf(subviewer,"setup type: DMDA auto-repartitioning\n"));
387           PetscCall(DMView_DA_Short(subdm,subviewer));
388           break;
389         case TELESCOPE_DMPLEX:
390           PetscCall(PetscViewerASCIIPrintf(subviewer,"setup type: DMPLEX auto-repartitioning\n"));
391           break;
392         case TELESCOPE_COARSEDM:
393           PetscCall(PetscViewerASCIIPrintf(subviewer,"setup type: coarse DM\n"));
394           break;
395         }
396 
397         if (dm) {
398           PetscObject obj = (PetscObject)dm;
399           PetscCall(PetscViewerASCIIPrintf(subviewer,"Parent DM object:"));
400           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
401           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
402           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
403           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
404           PetscCall(PetscViewerASCIIPrintf(subviewer,"\n"));
405           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
406         } else {
407           PetscCall(PetscViewerASCIIPrintf(subviewer,"Parent DM object: NULL\n"));
408         }
409         if (subdm) {
410           PetscObject obj = (PetscObject)subdm;
411           PetscCall(PetscViewerASCIIPrintf(subviewer,"Sub DM object:"));
412           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
413           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
414           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
415           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
416           PetscCall(PetscViewerASCIIPrintf(subviewer,"\n"));
417           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
418         } else {
419           PetscCall(PetscViewerASCIIPrintf(subviewer,"Sub DM object: NULL\n"));
420         }
421 
422         PetscCall(KSPView(sred->ksp,subviewer));
423         PetscCall(PetscViewerASCIIPopTab(subviewer));
424       }
425       PetscCall(PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer));
426     }
427   }
428   PetscFunctionReturn(0);
429 }
430 
431 static PetscErrorCode PCSetUp_Telescope(PC pc)
432 {
433   PC_Telescope    sred = (PC_Telescope)pc->data;
434   MPI_Comm        comm,subcomm=0;
435   PCTelescopeType sr_type;
436 
437   PetscFunctionBegin;
438   PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
439 
440   /* Determine type of setup/update */
441   if (!pc->setupcalled) {
442     PetscBool has_dm,same;
443     DM        dm;
444 
445     sr_type = TELESCOPE_DEFAULT;
446     has_dm = PETSC_FALSE;
447     PetscCall(PCGetDM(pc,&dm));
448     if (dm) { has_dm = PETSC_TRUE; }
449     if (has_dm) {
450       /* check for dmda */
451       PetscCall(PetscObjectTypeCompare((PetscObject)dm,DMDA,&same));
452       if (same) {
453         PetscCall(PetscInfo(pc,"PCTelescope: found DMDA\n"));
454         sr_type = TELESCOPE_DMDA;
455       }
456       /* check for dmplex */
457       PetscCall(PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same));
458       if (same) {
459         PetscCall(PetscInfo(pc,"PCTelescope: found DMPLEX\n"));
460         sr_type = TELESCOPE_DMPLEX;
461       }
462 
463       if (sred->use_coarse_dm) {
464         PetscCall(PetscInfo(pc,"PCTelescope: using coarse DM\n"));
465         sr_type = TELESCOPE_COARSEDM;
466       }
467 
468       if (sred->ignore_dm) {
469         PetscCall(PetscInfo(pc,"PCTelescope: ignoring DM\n"));
470         sr_type = TELESCOPE_DEFAULT;
471       }
472     }
473     sred->sr_type = sr_type;
474   } else {
475     sr_type = sred->sr_type;
476   }
477 
478   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
479   switch (sr_type) {
480   case TELESCOPE_DEFAULT:
481     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
482     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
483     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
484     sred->pctelescope_reset_type              = NULL;
485     break;
486   case TELESCOPE_DMDA:
487     pc->ops->apply                            = PCApply_Telescope_dmda;
488     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
489     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
490     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
491     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
492     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
493     break;
494   case TELESCOPE_DMPLEX:
495     SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
496   case TELESCOPE_COARSEDM:
497     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
498     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
499     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
500     sred->pctelescope_matcreate_type          = NULL;
501     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
502     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
503     break;
504   default:
505     SETERRQ(comm,PETSC_ERR_SUP,"Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
506   }
507 
508   /* subcomm definition */
509   if (!pc->setupcalled) {
510     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
511       if (!sred->psubcomm) {
512         PetscCall(PetscSubcommCreate(comm,&sred->psubcomm));
513         PetscCall(PetscSubcommSetNumber(sred->psubcomm,sred->redfactor));
514         PetscCall(PetscSubcommSetType(sred->psubcomm,sred->subcommtype));
515         PetscCall(PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm)));
516         sred->subcomm = PetscSubcommChild(sred->psubcomm);
517       }
518     } else { /* query PC for DM, check communicators */
519       DM          dm,dm_coarse_partition = NULL;
520       MPI_Comm    comm_fine,comm_coarse_partition = MPI_COMM_NULL;
521       PetscMPIInt csize_fine=0,csize_coarse_partition=0,cs[2],csg[2],cnt=0;
522       PetscBool   isvalidsubcomm;
523 
524       PetscCall(PCGetDM(pc,&dm));
525       comm_fine = PetscObjectComm((PetscObject)dm);
526       PetscCall(DMGetCoarseDM(dm,&dm_coarse_partition));
527       if (dm_coarse_partition) { cnt = 1; }
528       PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE,&cnt,1,MPI_INT,MPI_SUM,comm_fine));
529       PetscCheck(cnt != 0,comm_fine,PETSC_ERR_SUP,"Zero instances of a coarse DM were found");
530 
531       PetscCallMPI(MPI_Comm_size(comm_fine,&csize_fine));
532       if (dm_coarse_partition) {
533         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
534         PetscCallMPI(MPI_Comm_size(comm_coarse_partition,&csize_coarse_partition));
535       }
536 
537       cs[0] = csize_fine;
538       cs[1] = csize_coarse_partition;
539       PetscCallMPI(MPI_Allreduce(cs,csg,2,MPI_INT,MPI_MAX,comm_fine));
540       PetscCheck(csg[0] != csg[1],comm_fine,PETSC_ERR_SUP,"Coarse DM uses the same size communicator as the parent DM attached to the PC");
541 
542       PetscCall(PCTelescopeTestValidSubcomm(comm_fine,comm_coarse_partition,&isvalidsubcomm));
543       PetscCheck(isvalidsubcomm,comm_fine,PETSC_ERR_SUP,"Coarse DM communicator is not a sub-communicator of parentDM->comm");
544       sred->subcomm = comm_coarse_partition;
545     }
546   }
547   subcomm = sred->subcomm;
548 
549   /* internal KSP */
550   if (!pc->setupcalled) {
551     const char *prefix;
552 
553     if (PCTelescope_isActiveRank(sred)) {
554       PetscCall(KSPCreate(subcomm,&sred->ksp));
555       PetscCall(KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure));
556       PetscCall(PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1));
557       PetscCall(PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp));
558       PetscCall(KSPSetType(sred->ksp,KSPPREONLY));
559       PetscCall(PCGetOptionsPrefix(pc,&prefix));
560       PetscCall(KSPSetOptionsPrefix(sred->ksp,prefix));
561       PetscCall(KSPAppendOptionsPrefix(sred->ksp,"telescope_"));
562     }
563   }
564 
565   /* setup */
566   if (!pc->setupcalled && sred->pctelescope_setup_type) {
567     PetscCall(sred->pctelescope_setup_type(pc,sred));
568   }
569   /* update */
570   if (!pc->setupcalled) {
571     if (sred->pctelescope_matcreate_type) {
572       PetscCall(sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred));
573     }
574     if (sred->pctelescope_matnullspacecreate_type) {
575       PetscCall(sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred));
576     }
577   } else {
578     if (sred->pctelescope_matcreate_type) {
579       PetscCall(sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred));
580     }
581   }
582 
583   /* common - no construction */
584   if (PCTelescope_isActiveRank(sred)) {
585     PetscCall(KSPSetOperators(sred->ksp,sred->Bred,sred->Bred));
586     if (pc->setfromoptionscalled && !pc->setupcalled) {
587       PetscCall(KSPSetFromOptions(sred->ksp));
588     }
589   }
590   PetscFunctionReturn(0);
591 }
592 
593 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
594 {
595   PC_Telescope      sred = (PC_Telescope)pc->data;
596   Vec               xtmp,xred,yred;
597   PetscInt          i,st,ed;
598   VecScatter        scatter;
599   PetscScalar       *array;
600   const PetscScalar *x_array;
601 
602   PetscFunctionBegin;
603   PetscCall(PetscCitationsRegister(citation,&cited));
604 
605   xtmp    = sred->xtmp;
606   scatter = sred->scatter;
607   xred    = sred->xred;
608   yred    = sred->yred;
609 
610   /* pull in vector x->xtmp */
611   PetscCall(VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD));
612   PetscCall(VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD));
613 
614   /* copy vector entries into xred */
615   PetscCall(VecGetArrayRead(xtmp,&x_array));
616   if (xred) {
617     PetscScalar *LA_xred;
618     PetscCall(VecGetOwnershipRange(xred,&st,&ed));
619     PetscCall(VecGetArray(xred,&LA_xred));
620     for (i=0; i<ed-st; i++) {
621       LA_xred[i] = x_array[i];
622     }
623     PetscCall(VecRestoreArray(xred,&LA_xred));
624   }
625   PetscCall(VecRestoreArrayRead(xtmp,&x_array));
626   /* solve */
627   if (PCTelescope_isActiveRank(sred)) {
628     PetscCall(KSPSolve(sred->ksp,xred,yred));
629     PetscCall(KSPCheckSolve(sred->ksp,pc,yred));
630   }
631   /* return vector */
632   PetscCall(VecGetArray(xtmp,&array));
633   if (yred) {
634     const PetscScalar *LA_yred;
635     PetscCall(VecGetOwnershipRange(yred,&st,&ed));
636     PetscCall(VecGetArrayRead(yred,&LA_yred));
637     for (i=0; i<ed-st; i++) {
638       array[i] = LA_yred[i];
639     }
640     PetscCall(VecRestoreArrayRead(yred,&LA_yred));
641   }
642   PetscCall(VecRestoreArray(xtmp,&array));
643   PetscCall(VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE));
644   PetscCall(VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE));
645   PetscFunctionReturn(0);
646 }
647 
648 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
649 {
650   PC_Telescope      sred = (PC_Telescope)pc->data;
651   Vec               xtmp,yred;
652   PetscInt          i,st,ed;
653   VecScatter        scatter;
654   const PetscScalar *x_array;
655   PetscBool         default_init_guess_value;
656 
657   PetscFunctionBegin;
658   xtmp    = sred->xtmp;
659   scatter = sred->scatter;
660   yred    = sred->yred;
661 
662   PetscCheck(its <= 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
663   *reason = (PCRichardsonConvergedReason)0;
664 
665   if (!zeroguess) {
666     PetscCall(PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n"));
667     /* pull in vector y->xtmp */
668     PetscCall(VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD));
669     PetscCall(VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD));
670 
671     /* copy vector entries into xred */
672     PetscCall(VecGetArrayRead(xtmp,&x_array));
673     if (yred) {
674       PetscScalar *LA_yred;
675       PetscCall(VecGetOwnershipRange(yred,&st,&ed));
676       PetscCall(VecGetArray(yred,&LA_yred));
677       for (i=0; i<ed-st; i++) {
678         LA_yred[i] = x_array[i];
679       }
680       PetscCall(VecRestoreArray(yred,&LA_yred));
681     }
682     PetscCall(VecRestoreArrayRead(xtmp,&x_array));
683   }
684 
685   if (PCTelescope_isActiveRank(sred)) {
686     PetscCall(KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value));
687     if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE));
688   }
689 
690   PetscCall(PCApply_Telescope(pc,x,y));
691 
692   if (PCTelescope_isActiveRank(sred)) {
693     PetscCall(KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value));
694   }
695 
696   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
697   *outits = 1;
698   PetscFunctionReturn(0);
699 }
700 
701 static PetscErrorCode PCReset_Telescope(PC pc)
702 {
703   PC_Telescope   sred = (PC_Telescope)pc->data;
704 
705   PetscFunctionBegin;
706   PetscCall(ISDestroy(&sred->isin));
707   PetscCall(VecScatterDestroy(&sred->scatter));
708   PetscCall(VecDestroy(&sred->xred));
709   PetscCall(VecDestroy(&sred->yred));
710   PetscCall(VecDestroy(&sred->xtmp));
711   PetscCall(MatDestroy(&sred->Bred));
712   PetscCall(KSPReset(sred->ksp));
713   if (sred->pctelescope_reset_type) {
714     PetscCall(sred->pctelescope_reset_type(pc));
715   }
716   PetscFunctionReturn(0);
717 }
718 
719 static PetscErrorCode PCDestroy_Telescope(PC pc)
720 {
721   PC_Telescope   sred = (PC_Telescope)pc->data;
722 
723   PetscFunctionBegin;
724   PetscCall(PCReset_Telescope(pc));
725   PetscCall(KSPDestroy(&sred->ksp));
726   PetscCall(PetscSubcommDestroy(&sred->psubcomm));
727   PetscCall(PetscFree(sred->dm_ctx));
728   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",NULL));
729   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",NULL));
730   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",NULL));
731   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",NULL));
732   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",NULL));
733   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",NULL));
734   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",NULL));
735   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",NULL));
736   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",NULL));
737   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",NULL));
738   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",NULL));
739   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",NULL));
740   PetscCall(PetscFree(pc->data));
741   PetscFunctionReturn(0);
742 }
743 
744 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
745 {
746   PC_Telescope     sred = (PC_Telescope)pc->data;
747   MPI_Comm         comm;
748   PetscMPIInt      size;
749   PetscBool        flg;
750   PetscSubcommType subcommtype;
751 
752   PetscFunctionBegin;
753   PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
754   PetscCallMPI(MPI_Comm_size(comm,&size));
755   PetscOptionsHeadBegin(PetscOptionsObject,"Telescope options");
756   PetscCall(PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg));
757   if (flg) {
758     PetscCall(PCTelescopeSetSubcommType(pc,subcommtype));
759   }
760   PetscCall(PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,NULL));
761   PetscCheck(sred->redfactor <= size,comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
762   PetscCall(PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,NULL));
763   PetscCall(PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,NULL));
764   PetscCall(PetscOptionsBool("-pc_telescope_use_coarse_dm","Define sub-communicator from the coarse DM","PCTelescopeSetUseCoarseDM",sred->use_coarse_dm,&sred->use_coarse_dm,NULL));
765   PetscOptionsHeadEnd();
766   PetscFunctionReturn(0);
767 }
768 
769 /* PC simplementation specific API's */
770 
771 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
772 {
773   PC_Telescope red = (PC_Telescope)pc->data;
774   PetscFunctionBegin;
775   if (ksp) *ksp = red->ksp;
776   PetscFunctionReturn(0);
777 }
778 
779 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
780 {
781   PC_Telescope red = (PC_Telescope)pc->data;
782   PetscFunctionBegin;
783   if (subcommtype) *subcommtype = red->subcommtype;
784   PetscFunctionReturn(0);
785 }
786 
787 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
788 {
789   PC_Telescope     red = (PC_Telescope)pc->data;
790 
791   PetscFunctionBegin;
792   PetscCheck(!pc->setupcalled,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
793   red->subcommtype = subcommtype;
794   PetscFunctionReturn(0);
795 }
796 
797 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
798 {
799   PC_Telescope red = (PC_Telescope)pc->data;
800   PetscFunctionBegin;
801   if (fact) *fact = red->redfactor;
802   PetscFunctionReturn(0);
803 }
804 
805 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
806 {
807   PC_Telescope     red = (PC_Telescope)pc->data;
808   PetscMPIInt      size;
809 
810   PetscFunctionBegin;
811   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
812   PetscCheck(fact > 0,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %" PetscInt_FMT " must be positive",fact);
813   PetscCheck(fact <= size,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %" PetscInt_FMT " must be <= comm.size",fact);
814   red->redfactor = fact;
815   PetscFunctionReturn(0);
816 }
817 
818 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
819 {
820   PC_Telescope red = (PC_Telescope)pc->data;
821   PetscFunctionBegin;
822   if (v) *v = red->ignore_dm;
823   PetscFunctionReturn(0);
824 }
825 
826 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
827 {
828   PC_Telescope red = (PC_Telescope)pc->data;
829   PetscFunctionBegin;
830   red->ignore_dm = v;
831   PetscFunctionReturn(0);
832 }
833 
834 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool *v)
835 {
836   PC_Telescope red = (PC_Telescope)pc->data;
837   PetscFunctionBegin;
838   if (v) *v = red->use_coarse_dm;
839   PetscFunctionReturn(0);
840 }
841 
842 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)
843 {
844   PC_Telescope red = (PC_Telescope)pc->data;
845   PetscFunctionBegin;
846   red->use_coarse_dm = v;
847   PetscFunctionReturn(0);
848 }
849 
850 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
851 {
852   PC_Telescope red = (PC_Telescope)pc->data;
853   PetscFunctionBegin;
854   if (v) *v = red->ignore_kspcomputeoperators;
855   PetscFunctionReturn(0);
856 }
857 
858 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
859 {
860   PC_Telescope red = (PC_Telescope)pc->data;
861   PetscFunctionBegin;
862   red->ignore_kspcomputeoperators = v;
863   PetscFunctionReturn(0);
864 }
865 
866 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
867 {
868   PC_Telescope red = (PC_Telescope)pc->data;
869   PetscFunctionBegin;
870   *dm = private_PCTelescopeGetSubDM(red);
871   PetscFunctionReturn(0);
872 }
873 
874 /*@
875  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
876 
877  Not Collective
878 
879  Input Parameter:
880 .  pc - the preconditioner context
881 
882  Output Parameter:
883 .  subksp - the KSP defined the smaller set of processes
884 
885  Level: advanced
886 
887 @*/
888 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
889 {
890   PetscFunctionBegin;
891   PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));
892   PetscFunctionReturn(0);
893 }
894 
895 /*@
896  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
897 
898  Not Collective
899 
900  Input Parameter:
901 .  pc - the preconditioner context
902 
903  Output Parameter:
904 .  fact - the reduction factor
905 
906  Level: advanced
907 
908 @*/
909 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
910 {
911   PetscFunctionBegin;
912   PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));
913   PetscFunctionReturn(0);
914 }
915 
916 /*@
917  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
918 
919  Not Collective
920 
921  Input Parameter:
922 .  pc - the preconditioner context
923 
924  Output Parameter:
925 .  fact - the reduction factor
926 
927  Level: advanced
928 
929 @*/
930 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
931 {
932   PetscFunctionBegin;
933   PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));
934   PetscFunctionReturn(0);
935 }
936 
937 /*@
938  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
939 
940  Not Collective
941 
942  Input Parameter:
943 .  pc - the preconditioner context
944 
945  Output Parameter:
946 .  v - the flag
947 
948  Level: advanced
949 
950 @*/
951 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
952 {
953   PetscFunctionBegin;
954   PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));
955   PetscFunctionReturn(0);
956 }
957 
958 /*@
959  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
960 
961  Not Collective
962 
963  Input Parameter:
964 .  pc - the preconditioner context
965 
966  Output Parameter:
967 .  v - Use PETSC_TRUE to ignore any DM
968 
969  Level: advanced
970 
971 @*/
972 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
973 {
974   PetscFunctionBegin;
975   PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));
976   PetscFunctionReturn(0);
977 }
978 
979 /*@
980  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used.
981 
982  Not Collective
983 
984  Input Parameter:
985 .  pc - the preconditioner context
986 
987  Output Parameter:
988 .  v - the flag
989 
990  Level: advanced
991 
992 @*/
993 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc,PetscBool *v)
994 {
995   PetscFunctionBegin;
996   PetscUseMethod(pc,"PCTelescopeGetUseCoarseDM_C",(PC,PetscBool*),(pc,v));
997   PetscFunctionReturn(0);
998 }
999 
1000 /*@
1001  PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM
1002 
1003  Not Collective
1004 
1005  Input Parameter:
1006 .  pc - the preconditioner context
1007 
1008  Output Parameter:
1009 .  v - Use PETSC_FALSE to ignore any coarse DM
1010 
1011  Notes:
1012  When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope
1013  will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and
1014  -pc_telescope_subcomm_type will no longer have any meaning.
1015  It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes.
1016  An error will occur of the size of the communicator associated with the coarse DM
1017  is the same as that of the parent DM.
1018  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
1019  This will be checked at the time the preconditioner is setup and an error will occur if
1020  the coarse DM does not define a sub-communicator of that used by the parent DM.
1021 
1022  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
1023  the DM used (e.g. it supports DMSHELL, DMPLEX, etc).
1024 
1025  Support is currently only provided for the case when you are using KSPSetComputeOperators()
1026 
1027  The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs.
1028  In the user code, this is achieved via
1029 .vb
1030    {
1031      DM dm_fine;
1032      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1033    }
1034 .ve
1035  The signature of the user provided field scatter method is
1036 .vb
1037    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1038 .ve
1039  The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE.
1040  SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM.
1041 
1042  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
1043  of state variables between the fine and coarse DMs.
1044  In the context of a finite element discretization, an example state variable might be
1045  values associated with quadrature points within each element.
1046  A user provided state scatter method is composed via
1047 .vb
1048    {
1049      DM dm_fine;
1050      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1051    }
1052 .ve
1053  The signature of the user provided state scatter method is
1054 .vb
1055    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1056 .ve
1057  SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM.
1058  The user is only required to support mode = SCATTER_FORWARD.
1059  No assumption is made about the data type of the state variables.
1060  These must be managed by the user and must be accessible from the DM.
1061 
1062  Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be
1063  associated with the sub-KSP residing within PCTelescope.
1064  In general, PCTelescope assumes that the context on the fine and coarse DM used with
1065  KSPSetComputeOperators() should be "similar" in type or origin.
1066  Specifically the following rules are used to infer what context on the sub-KSP should be.
1067 
1068  First the contexts from the KSP and the fine and coarse DMs are retrieved.
1069  Note that the special case of a DMSHELL context is queried.
1070 
1071 .vb
1072    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1073    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1074    DMShellGetContext(dm_fine,&dmfine_shellctx);
1075 
1076    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1077    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1078 .ve
1079 
1080  The following rules are then enforced:
1081 
1082  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1083  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL);
1084 
1085  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1086  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1087  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1088 
1089  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1090  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1091  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1092 
1093  If neither of the above three tests passed, then PCTelescope cannot safely determine what
1094  context should be provided to KSPSetComputeOperators() for use with the sub-KSP.
1095  In this case, an additional mechanism is provided via a composed function which will return
1096  the actual context to be used. To use this feature you must compose the "getter" function
1097  with the coarse DM, e.g.
1098 .vb
1099    {
1100      DM dm_coarse;
1101      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1102    }
1103 .ve
1104  The signature of the user provided method is
1105 .vb
1106    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1107 .ve
1108 
1109  Level: advanced
1110 
1111 @*/
1112 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)
1113 {
1114   PetscFunctionBegin;
1115   PetscTryMethod(pc,"PCTelescopeSetUseCoarseDM_C",(PC,PetscBool),(pc,v));
1116   PetscFunctionReturn(0);
1117 }
1118 
1119 /*@
1120  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
1121 
1122  Not Collective
1123 
1124  Input Parameter:
1125 .  pc - the preconditioner context
1126 
1127  Output Parameter:
1128 .  v - the flag
1129 
1130  Level: advanced
1131 
1132 @*/
1133 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
1134 {
1135   PetscFunctionBegin;
1136   PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));
1137   PetscFunctionReturn(0);
1138 }
1139 
1140 /*@
1141  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
1142 
1143  Not Collective
1144 
1145  Input Parameter:
1146 .  pc - the preconditioner context
1147 
1148  Output Parameter:
1149 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
1150 
1151  Level: advanced
1152 
1153 @*/
1154 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
1155 {
1156   PetscFunctionBegin;
1157   PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));
1158   PetscFunctionReturn(0);
1159 }
1160 
1161 /*@
1162  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
1163 
1164  Not Collective
1165 
1166  Input Parameter:
1167 .  pc - the preconditioner context
1168 
1169  Output Parameter:
1170 .  subdm - The re-partitioned DM
1171 
1172  Level: advanced
1173 
1174 @*/
1175 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
1176 {
1177   PetscFunctionBegin;
1178   PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));
1179   PetscFunctionReturn(0);
1180 }
1181 
1182 /*@
1183  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1184 
1185  Logically Collective
1186 
1187  Input Parameters:
1188 +  pc - the preconditioner context
1189 -  subcommtype - the subcommunicator type (see PetscSubcommType)
1190 
1191  Level: advanced
1192 
1193 .seealso: `PetscSubcommType`, `PetscSubcomm`, `PCTELESCOPE`
1194 @*/
1195 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1196 {
1197   PetscFunctionBegin;
1198   PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));
1199   PetscFunctionReturn(0);
1200 }
1201 
1202 /*@
1203  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1204 
1205  Not Collective
1206 
1207  Input Parameter:
1208 .  pc - the preconditioner context
1209 
1210  Output Parameter:
1211 .  subcommtype - the subcommunicator type (see PetscSubcommType)
1212 
1213  Level: advanced
1214 
1215 .seealso: `PetscSubcomm`, `PetscSubcommType`, `PCTELESCOPE`
1216 @*/
1217 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1218 {
1219   PetscFunctionBegin;
1220   PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));
1221   PetscFunctionReturn(0);
1222 }
1223 
1224 /* -------------------------------------------------------------------------------------*/
1225 /*MC
1226    PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1227 
1228    Options Database:
1229 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1230 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1231 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1232 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP.
1233 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator.
1234 
1235    Level: advanced
1236 
1237    Notes:
1238    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1239    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC).
1240    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1241    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1242    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1243    Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM.
1244 
1245    The default type of the sub KSP (the KSP defined on c') is PREONLY.
1246 
1247    There are three setup mechanisms for PCTelescope. Features support by each type are described below.
1248    In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the
1249    communicators c and c' respectively.
1250 
1251    [1] Default setup
1252    The sub-communicator c' is created via PetscSubcommCreate().
1253    Explicitly defined nullspace and near nullspace vectors will be propagated from B to B'.
1254    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1255    No support is provided for KSPSetComputeOperators().
1256    Currently there is no support for the flag -pc_use_amat.
1257 
1258    [2] DM aware setup
1259    If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'.
1260    c' is created via PetscSubcommCreate().
1261    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1262    Currently only support for re-partitioning a DMDA is provided.
1263    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1264    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1265    Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP.
1266    This is fragile since the user must ensure that their user context is valid for use on c'.
1267    Currently there is no support for the flag -pc_use_amat.
1268 
1269    [3] Coarse DM setup
1270    If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM().
1271    PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c.
1272    The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE.
1273    PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1274    The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be
1275    available with using multi-grid on unstructured meshes.
1276    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1277    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1278    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1279    There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported.
1280    The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1281    Propagation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators().
1282    Currently there is no support for the flag -pc_use_amat.
1283    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
1284    Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM().
1285 
1286    Developer Notes:
1287    During PCSetup, the B operator is scattered onto c'.
1288    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1289    Then, KSPSolve() is executed on the c' communicator.
1290 
1291    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
1292    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
1293 
1294    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1295    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1296    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1297 
1298    The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D -
1299    support for 1D DMDAs is not provided). If a DMDA is found, a topologically equivalent DMDA is created on c'
1300    and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support
1301    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1302    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1303 
1304    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1305 
1306    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1307    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
1308    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
1309 
1310    Limitations/improvements include the following.
1311    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1312    A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction().
1313 
1314    The symmetric permutation used when a DMDA is encountered is performed via explicitly assembling a permutation matrix P,
1315    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1316    VecPermute() does not support the use case required here. By computing P, one can permute both the operator and RHS in a
1317    consistent manner.
1318 
1319    Mapping of vectors (default setup mode) is performed in the following way.
1320    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1321    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1322    We perform the scatter to the sub-communicator in the following way.
1323    [1] Given a vector x defined on communicator c
1324 
1325 .vb
1326    rank(c)  local values of x
1327    ------- ----------------------------------------
1328         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1329         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1330         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1331         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1332 .ve
1333 
1334    scatter into xtmp defined also on comm c, so that we have the following values
1335 
1336 .vb
1337    rank(c)  local values of xtmp
1338    ------- ----------------------------------------------------------------------------
1339         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1340         1   [ ]
1341         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1342         3   [ ]
1343 .ve
1344 
1345    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1346 
1347    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1348    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1349 
1350 .vb
1351    rank(c')  local values of xred
1352    -------- ----------------------------------------------------------------------------
1353          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1354          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1355 .ve
1356 
1357   Contributed by Dave May
1358 
1359   Reference:
1360   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1361 
1362 .seealso: `PCTelescopeGetKSP()`, `PCTelescopeGetDM()`, `PCTelescopeGetReductionFactor()`, `PCTelescopeSetReductionFactor()`, `PCTelescopeGetIgnoreDM()`, `PCTelescopeSetIgnoreDM()`, `PCREDUNDANT`
1363 M*/
1364 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1365 {
1366   struct _PC_Telescope *sred;
1367 
1368   PetscFunctionBegin;
1369   PetscCall(PetscNewLog(pc,&sred));
1370   sred->psubcomm       = NULL;
1371   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1372   sred->subcomm        = MPI_COMM_NULL;
1373   sred->redfactor      = 1;
1374   sred->ignore_dm      = PETSC_FALSE;
1375   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1376   sred->use_coarse_dm  = PETSC_FALSE;
1377   pc->data             = (void*)sred;
1378 
1379   pc->ops->apply           = PCApply_Telescope;
1380   pc->ops->applytranspose  = NULL;
1381   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1382   pc->ops->setup           = PCSetUp_Telescope;
1383   pc->ops->destroy         = PCDestroy_Telescope;
1384   pc->ops->reset           = PCReset_Telescope;
1385   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1386   pc->ops->view            = PCView_Telescope;
1387 
1388   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1389   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1390   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1391   sred->pctelescope_reset_type              = NULL;
1392 
1393   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope));
1394   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope));
1395   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope));
1396   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope));
1397   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope));
1398   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope));
1399   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope));
1400   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope));
1401   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope));
1402   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope));
1403   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",PCTelescopeGetUseCoarseDM_Telescope));
1404   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",PCTelescopeSetUseCoarseDM_Telescope));
1405   PetscFunctionReturn(0);
1406 }
1407