xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision 5bd1e5768a3b141382a3ebb2e780cd2d3b3bfbf0)
1 
2 #include <petsc/private/petscimpl.h>
3 #include <petsc/private/matimpl.h>
4 #include <petsc/private/pcimpl.h>
5 #include <petscksp.h> /*I "petscksp.h" I*/
6 #include <petscdm.h> /*I "petscdm.h" I*/
7 #include "../src/ksp/pc/impls/telescope/telescope.h"
8 
9 static PetscBool  cited = PETSC_FALSE;
10 static const char citation[] =
11 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
12 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
13 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
14 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
15 "  series    = {PASC '16},\n"
16 "  isbn      = {978-1-4503-4126-4},\n"
17 "  location  = {Lausanne, Switzerland},\n"
18 "  pages     = {5:1--5:12},\n"
19 "  articleno = {5},\n"
20 "  numpages  = {12},\n"
21 "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
22 "  doi       = {10.1145/2929908.2929913},\n"
23 "  acmid     = {2929913},\n"
24 "  publisher = {ACM},\n"
25 "  address   = {New York, NY, USA},\n"
26 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
27 "  year      = {2016}\n"
28 "}\n";
29 
30 /*
31  default setup mode
32 
33  [1a] scatter to (FORWARD)
34  x(comm) -> xtmp(comm)
35  [1b] local copy (to) ranks with color = 0
36  xred(subcomm) <- xtmp
37 
38  [2] solve on sub KSP to obtain yred(subcomm)
39 
40  [3a] local copy (from) ranks with color = 0
41  yred(subcomm) --> xtmp
42  [2b] scatter from (REVERSE)
43  xtmp(comm) -> y(comm)
44 */
45 
46 /*
47   Collective[comm_f]
48   Notes
49    * Using comm_f = MPI_COMM_NULL will result in an error
50    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
51    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
52 */
53 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool *isvalid)
54 {
55   PetscInt       valid = 1;
56   MPI_Group      group_f,group_c;
57   PetscErrorCode ierr;
58   PetscMPIInt    count,k,size_f = 0,size_c = 0,size_c_sum = 0;
59   PetscMPIInt    *ranks_f,*ranks_c;
60 
61   PetscFunctionBegin;
62   if (comm_f == MPI_COMM_NULL) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"comm_f cannot be MPI_COMM_NULL");
63 
64   ierr = MPI_Comm_group(comm_f,&group_f);CHKERRQ(ierr);
65   if (comm_c != MPI_COMM_NULL) {
66     ierr = MPI_Comm_group(comm_c,&group_c);CHKERRQ(ierr);
67   }
68 
69   ierr = MPI_Comm_size(comm_f,&size_f);CHKERRQ(ierr);
70   if (comm_c != MPI_COMM_NULL) {
71     ierr = MPI_Comm_size(comm_c,&size_c);CHKERRQ(ierr);
72   }
73 
74   /* check not all comm_c's are NULL */
75   size_c_sum = size_c;
76   ierr = MPI_Allreduce(MPI_IN_PLACE,&size_c_sum,1,MPI_INT,MPI_SUM,comm_f);CHKERRQ(ierr);
77   if (size_c_sum == 0) valid = 0;
78 
79   /* check we can map at least 1 rank in comm_c to comm_f */
80   ierr = PetscMalloc1(size_f,&ranks_f);CHKERRQ(ierr);
81   ierr = PetscMalloc1(size_c,&ranks_c);CHKERRQ(ierr);
82   for (k=0; k<size_f; k++) ranks_f[k] = MPI_UNDEFINED;
83   for (k=0; k<size_c; k++) ranks_c[k] = k;
84 
85   /*
86    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
87    I do not want the code to terminate immediately if this occurs, rather I want to throw
88    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
89    that comm_c is not a valid sub-communicator.
90    Hence I purposefully do not call CHKERRQ() after MPI_Group_translate_ranks().
91   */
92   count = 0;
93   if (comm_c != MPI_COMM_NULL) {
94     (void)MPI_Group_translate_ranks(group_c,size_c,ranks_c,group_f,ranks_f);
95     for (k=0; k<size_f; k++) {
96       if (ranks_f[k] == MPI_UNDEFINED) {
97         count++;
98       }
99     }
100   }
101   if (count == size_f) valid = 0;
102 
103   ierr = MPI_Allreduce(MPI_IN_PLACE,&valid,1,MPIU_INT,MPI_MIN,comm_f);CHKERRQ(ierr);
104   if (valid == 1) *isvalid = PETSC_TRUE;
105   else *isvalid = PETSC_FALSE;
106 
107   ierr = PetscFree(ranks_f);CHKERRQ(ierr);
108   ierr = PetscFree(ranks_c);CHKERRQ(ierr);
109   ierr = MPI_Group_free(&group_f);CHKERRQ(ierr);
110   if (comm_c != MPI_COMM_NULL) {
111     ierr = MPI_Group_free(&group_c);CHKERRQ(ierr);
112   }
113   PetscFunctionReturn(0);
114 }
115 
116 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
117 {
118   DM subdm = NULL;
119 
120   if (!PCTelescope_isActiveRank(sred)) { subdm = NULL; }
121   else {
122     switch (sred->sr_type) {
123     case TELESCOPE_DEFAULT: subdm = NULL;
124       break;
125     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
126       break;
127     case TELESCOPE_DMPLEX:  subdm = NULL;
128       break;
129     case TELESCOPE_COARSEDM: if (sred->ksp) { KSPGetDM(sred->ksp,&subdm); }
130       break;
131     }
132   }
133   return(subdm);
134 }
135 
136 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
137 {
138   PetscErrorCode ierr;
139   PetscInt       m,M,bs,st,ed;
140   Vec            x,xred,yred,xtmp;
141   Mat            B;
142   MPI_Comm       comm,subcomm;
143   VecScatter     scatter;
144   IS             isin;
145 
146   PetscFunctionBegin;
147   ierr = PetscInfo(pc,"PCTelescope: setup (default)\n");CHKERRQ(ierr);
148   comm = PetscSubcommParent(sred->psubcomm);
149   subcomm = PetscSubcommChild(sred->psubcomm);
150 
151   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
152   ierr = MatGetSize(B,&M,NULL);CHKERRQ(ierr);
153   ierr = MatGetBlockSize(B,&bs);CHKERRQ(ierr);
154   ierr = MatCreateVecs(B,&x,NULL);CHKERRQ(ierr);
155 
156   xred = NULL;
157   m    = 0;
158   if (PCTelescope_isActiveRank(sred)) {
159     ierr = VecCreate(subcomm,&xred);CHKERRQ(ierr);
160     ierr = VecSetSizes(xred,PETSC_DECIDE,M);CHKERRQ(ierr);
161     ierr = VecSetBlockSize(xred,bs);CHKERRQ(ierr);
162     ierr = VecSetFromOptions(xred);CHKERRQ(ierr);
163     ierr = VecGetLocalSize(xred,&m);CHKERRQ(ierr);
164   }
165 
166   yred = NULL;
167   if (PCTelescope_isActiveRank(sred)) {
168     ierr = VecDuplicate(xred,&yred);CHKERRQ(ierr);
169   }
170 
171   ierr = VecCreate(comm,&xtmp);CHKERRQ(ierr);
172   ierr = VecSetSizes(xtmp,m,PETSC_DECIDE);CHKERRQ(ierr);
173   ierr = VecSetBlockSize(xtmp,bs);CHKERRQ(ierr);
174   ierr = VecSetType(xtmp,((PetscObject)x)->type_name);CHKERRQ(ierr);
175 
176   if (PCTelescope_isActiveRank(sred)) {
177     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
178     ierr = ISCreateStride(comm,(ed-st),st,1,&isin);CHKERRQ(ierr);
179   } else {
180     ierr = VecGetOwnershipRange(x,&st,&ed);CHKERRQ(ierr);
181     ierr = ISCreateStride(comm,0,st,1,&isin);CHKERRQ(ierr);
182   }
183   ierr = ISSetBlockSize(isin,bs);CHKERRQ(ierr);
184 
185   ierr = VecScatterCreate(x,isin,xtmp,NULL,&scatter);CHKERRQ(ierr);
186 
187   sred->isin    = isin;
188   sred->scatter = scatter;
189   sred->xred    = xred;
190   sred->yred    = yred;
191   sred->xtmp    = xtmp;
192   ierr = VecDestroy(&x);CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
197 {
198   PetscErrorCode ierr;
199   MPI_Comm       comm,subcomm;
200   Mat            Bred,B;
201   PetscInt       nr,nc;
202   IS             isrow,iscol;
203   Mat            Blocal,*_Blocal;
204 
205   PetscFunctionBegin;
206   ierr = PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");CHKERRQ(ierr);
207   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
208   subcomm = PetscSubcommChild(sred->psubcomm);
209   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
210   ierr = MatGetSize(B,&nr,&nc);CHKERRQ(ierr);
211   isrow = sred->isin;
212   ierr = ISCreateStride(comm,nc,0,1,&iscol);CHKERRQ(ierr);
213   ierr = MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);CHKERRQ(ierr);
214   Blocal = *_Blocal;
215   ierr = PetscFree(_Blocal);CHKERRQ(ierr);
216   Bred = NULL;
217   if (PCTelescope_isActiveRank(sred)) {
218     PetscInt mm;
219 
220     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
221 
222     ierr = MatGetSize(Blocal,&mm,NULL);CHKERRQ(ierr);
223     ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);CHKERRQ(ierr);
224   }
225   *A = Bred;
226   ierr = ISDestroy(&iscol);CHKERRQ(ierr);
227   ierr = MatDestroy(&Blocal);CHKERRQ(ierr);
228   PetscFunctionReturn(0);
229 }
230 
231 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
232 {
233   PetscErrorCode ierr;
234   PetscBool      has_const;
235   const Vec      *vecs;
236   Vec            *sub_vecs = NULL;
237   PetscInt       i,k,n = 0;
238   MPI_Comm       subcomm;
239 
240   PetscFunctionBegin;
241   subcomm = PetscSubcommChild(sred->psubcomm);
242   ierr = MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);CHKERRQ(ierr);
243 
244   if (PCTelescope_isActiveRank(sred)) {
245     if (n) {
246       ierr = VecDuplicateVecs(sred->xred,n,&sub_vecs);CHKERRQ(ierr);
247     }
248   }
249 
250   /* copy entries */
251   for (k=0; k<n; k++) {
252     const PetscScalar *x_array;
253     PetscScalar       *LA_sub_vec;
254     PetscInt          st,ed;
255 
256     /* pull in vector x->xtmp */
257     ierr = VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
258     ierr = VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
259     if (sub_vecs) {
260       /* copy vector entries into xred */
261       ierr = VecGetArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
262       if (sub_vecs[k]) {
263         ierr = VecGetOwnershipRange(sub_vecs[k],&st,&ed);CHKERRQ(ierr);
264         ierr = VecGetArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
265         for (i=0; i<ed-st; i++) {
266           LA_sub_vec[i] = x_array[i];
267         }
268         ierr = VecRestoreArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
269       }
270       ierr = VecRestoreArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
271     }
272   }
273 
274   if (PCTelescope_isActiveRank(sred)) {
275     /* create new (near) nullspace for redundant object */
276     ierr = MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);CHKERRQ(ierr);
277     ierr = VecDestroyVecs(n,&sub_vecs);CHKERRQ(ierr);
278     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
279     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
280   }
281   PetscFunctionReturn(0);
282 }
283 
284 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
285 {
286   PetscErrorCode ierr;
287   Mat            B;
288 
289   PetscFunctionBegin;
290   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
291   /* Propagate the nullspace if it exists */
292   {
293     MatNullSpace nullspace,sub_nullspace;
294     ierr = MatGetNullSpace(B,&nullspace);CHKERRQ(ierr);
295     if (nullspace) {
296       ierr = PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");CHKERRQ(ierr);
297       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace);CHKERRQ(ierr);
298       if (PCTelescope_isActiveRank(sred)) {
299         ierr = MatSetNullSpace(sub_mat,sub_nullspace);CHKERRQ(ierr);
300         ierr = MatNullSpaceDestroy(&sub_nullspace);CHKERRQ(ierr);
301       }
302     }
303   }
304   /* Propagate the near nullspace if it exists */
305   {
306     MatNullSpace nearnullspace,sub_nearnullspace;
307     ierr = MatGetNearNullSpace(B,&nearnullspace);CHKERRQ(ierr);
308     if (nearnullspace) {
309       ierr = PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n");CHKERRQ(ierr);
310       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);CHKERRQ(ierr);
311       if (PCTelescope_isActiveRank(sred)) {
312         ierr = MatSetNearNullSpace(sub_mat,sub_nearnullspace);CHKERRQ(ierr);
313         ierr = MatNullSpaceDestroy(&sub_nearnullspace);CHKERRQ(ierr);
314       }
315     }
316   }
317   PetscFunctionReturn(0);
318 }
319 
320 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
321 {
322   PC_Telescope   sred = (PC_Telescope)pc->data;
323   PetscErrorCode ierr;
324   PetscBool      iascii,isstring;
325   PetscViewer    subviewer;
326 
327   PetscFunctionBegin;
328   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
329   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
330   if (iascii) {
331     {
332       MPI_Comm    comm,subcomm;
333       PetscMPIInt comm_size,subcomm_size;
334       DM          dm = NULL,subdm = NULL;
335 
336       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
337       subdm = private_PCTelescopeGetSubDM(sred);
338 
339       if (sred->psubcomm) {
340         comm = PetscSubcommParent(sred->psubcomm);
341         subcomm = PetscSubcommChild(sred->psubcomm);
342         ierr = MPI_Comm_size(comm,&comm_size);CHKERRQ(ierr);
343         ierr = MPI_Comm_size(subcomm,&subcomm_size);CHKERRQ(ierr);
344 
345         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
346         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent comm size reduction factor = %D\n",sred->redfactor);CHKERRQ(ierr);
347         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);CHKERRQ(ierr);
348         switch (sred->subcommtype) {
349         case PETSC_SUBCOMM_INTERLACED :
350           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: type = interlaced\n",sred->subcommtype);CHKERRQ(ierr);
351           break;
352         case PETSC_SUBCOMM_CONTIGUOUS :
353           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm type = contiguous\n",sred->subcommtype);CHKERRQ(ierr);
354           break;
355         default :
356           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
357         }
358         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
359       } else {
360         ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
361         subcomm = sred->subcomm;
362         if (!PCTelescope_isActiveRank(sred)) {
363           subcomm = PETSC_COMM_SELF;
364         }
365 
366         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
367         ierr = PetscViewerASCIIPrintf(viewer,"subcomm: using user provided sub-communicator\n");CHKERRQ(ierr);
368         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
369       }
370 
371       ierr = PetscViewerGetSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
372       if (PCTelescope_isActiveRank(sred)) {
373         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
374 
375         if (dm && sred->ignore_dm) {
376           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring DM\n");CHKERRQ(ierr);
377         }
378         if (sred->ignore_kspcomputeoperators) {
379           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring KSPComputeOperators\n");CHKERRQ(ierr);
380         }
381         switch (sred->sr_type) {
382         case TELESCOPE_DEFAULT:
383           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: default\n");CHKERRQ(ierr);
384           break;
385         case TELESCOPE_DMDA:
386           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMDA auto-repartitioning\n");CHKERRQ(ierr);
387           ierr = DMView_DA_Short(subdm,subviewer);CHKERRQ(ierr);
388           break;
389         case TELESCOPE_DMPLEX:
390           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMPLEX auto-repartitioning\n");CHKERRQ(ierr);
391           break;
392         case TELESCOPE_COARSEDM:
393           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: coarse DM\n");CHKERRQ(ierr);
394           break;
395         }
396 
397         if (dm) {
398           PetscObject obj = (PetscObject)dm;
399           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object:");CHKERRQ(ierr);
400           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
401           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
402           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
403           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
404           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
405           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
406         } else {
407           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object: NULL\n");CHKERRQ(ierr);
408         }
409         if (subdm) {
410           PetscObject obj = (PetscObject)subdm;
411           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object:");CHKERRQ(ierr);
412           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
413           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
414           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
415           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
416           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
417           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
418         } else {
419           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object: NULL\n");CHKERRQ(ierr);
420         }
421 
422         ierr = KSPView(sred->ksp,subviewer);CHKERRQ(ierr);
423         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
424       }
425       ierr = PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
426     }
427   }
428   PetscFunctionReturn(0);
429 }
430 
431 static PetscErrorCode PCSetUp_Telescope(PC pc)
432 {
433   PC_Telescope    sred = (PC_Telescope)pc->data;
434   PetscErrorCode  ierr;
435   MPI_Comm        comm,subcomm=0;
436   PCTelescopeType sr_type;
437 
438   PetscFunctionBegin;
439   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
440 
441   /* Determine type of setup/update */
442   if (!pc->setupcalled) {
443     PetscBool has_dm,same;
444     DM        dm;
445 
446     sr_type = TELESCOPE_DEFAULT;
447     has_dm = PETSC_FALSE;
448     ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
449     if (dm) { has_dm = PETSC_TRUE; }
450     if (has_dm) {
451       /* check for dmda */
452       ierr = PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);CHKERRQ(ierr);
453       if (same) {
454         ierr = PetscInfo(pc,"PCTelescope: found DMDA\n");CHKERRQ(ierr);
455         sr_type = TELESCOPE_DMDA;
456       }
457       /* check for dmplex */
458       ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);CHKERRQ(ierr);
459       if (same) {
460         ierr = PetscInfo(pc,"PCTelescope: found DMPLEX\n");CHKERRQ(ierr);
461         sr_type = TELESCOPE_DMPLEX;
462       }
463 
464       if (sred->use_coarse_dm) {
465         ierr = PetscInfo(pc,"PCTelescope: using coarse DM\n");CHKERRQ(ierr);
466         sr_type = TELESCOPE_COARSEDM;
467       }
468 
469       if (sred->ignore_dm) {
470         ierr = PetscInfo(pc,"PCTelescope: ignoring DM\n");CHKERRQ(ierr);
471         sr_type = TELESCOPE_DEFAULT;
472       }
473     }
474     sred->sr_type = sr_type;
475   } else {
476     sr_type = sred->sr_type;
477   }
478 
479   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
480   switch (sr_type) {
481   case TELESCOPE_DEFAULT:
482     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
483     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
484     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
485     sred->pctelescope_reset_type              = NULL;
486     break;
487   case TELESCOPE_DMDA:
488     pc->ops->apply                            = PCApply_Telescope_dmda;
489     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
490     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
491     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
492     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
493     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
494     break;
495   case TELESCOPE_DMPLEX: SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
496     break;
497   case TELESCOPE_COARSEDM:
498     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
499     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
500     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
501     sred->pctelescope_matcreate_type          = NULL;
502     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
503     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
504     break;
505   default: SETERRQ(comm,PETSC_ERR_SUP,"Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
506     break;
507   }
508 
509   /* subcomm definition */
510   if (!pc->setupcalled) {
511     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
512       if (!sred->psubcomm) {
513         ierr = PetscSubcommCreate(comm,&sred->psubcomm);CHKERRQ(ierr);
514         ierr = PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);CHKERRQ(ierr);
515         ierr = PetscSubcommSetType(sred->psubcomm,sred->subcommtype);CHKERRQ(ierr);
516         ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
517         sred->subcomm = PetscSubcommChild(sred->psubcomm);
518       }
519     } else { /* query PC for DM, check communicators */
520       DM          dm,dm_coarse_partition = NULL;
521       MPI_Comm    comm_fine,comm_coarse_partition = MPI_COMM_NULL;
522       PetscMPIInt csize_fine=0,csize_coarse_partition=0,cs[2],csg[2],cnt=0;
523       PetscBool   isvalidsubcomm;
524 
525       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
526       comm_fine = PetscObjectComm((PetscObject)dm);
527       ierr = DMGetCoarseDM(dm,&dm_coarse_partition);CHKERRQ(ierr);
528       if (dm_coarse_partition) { cnt = 1; }
529       ierr = MPI_Allreduce(MPI_IN_PLACE,&cnt,1,MPI_INT,MPI_SUM,comm_fine);CHKERRQ(ierr);
530       if (cnt == 0) SETERRQ(comm_fine,PETSC_ERR_SUP,"Zero instances of a coarse DM were found");
531 
532       ierr = MPI_Comm_size(comm_fine,&csize_fine);CHKERRQ(ierr);
533       if (dm_coarse_partition) {
534         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
535         ierr = MPI_Comm_size(comm_coarse_partition,&csize_coarse_partition);CHKERRQ(ierr);
536       }
537 
538       cs[0] = csize_fine;
539       cs[1] = csize_coarse_partition;
540       ierr = MPI_Allreduce(cs,csg,2,MPI_INT,MPI_MAX,comm_fine);CHKERRQ(ierr);
541       if (csg[0] == csg[1]) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM uses the same size communicator as the parent DM attached to the PC");
542 
543       ierr = PCTelescopeTestValidSubcomm(comm_fine,comm_coarse_partition,&isvalidsubcomm);CHKERRQ(ierr);
544       if (!isvalidsubcomm) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM communicator is not a sub-communicator of parentDM->comm");
545       sred->subcomm = comm_coarse_partition;
546     }
547   }
548   subcomm = sred->subcomm;
549 
550   /* internal KSP */
551   if (!pc->setupcalled) {
552     const char *prefix;
553 
554     if (PCTelescope_isActiveRank(sred)) {
555       ierr = KSPCreate(subcomm,&sred->ksp);CHKERRQ(ierr);
556       ierr = KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);CHKERRQ(ierr);
557       ierr = PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
558       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);CHKERRQ(ierr);
559       ierr = KSPSetType(sred->ksp,KSPPREONLY);CHKERRQ(ierr);
560       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
561       ierr = KSPSetOptionsPrefix(sred->ksp,prefix);CHKERRQ(ierr);
562       ierr = KSPAppendOptionsPrefix(sred->ksp,"telescope_");CHKERRQ(ierr);
563     }
564   }
565 
566   /* setup */
567   if (!pc->setupcalled && sred->pctelescope_setup_type) {
568     ierr = sred->pctelescope_setup_type(pc,sred);CHKERRQ(ierr);
569   }
570   /* update */
571   if (!pc->setupcalled) {
572     if (sred->pctelescope_matcreate_type) {
573       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);CHKERRQ(ierr);
574     }
575     if (sred->pctelescope_matnullspacecreate_type) {
576       ierr = sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);CHKERRQ(ierr);
577     }
578   } else {
579     if (sred->pctelescope_matcreate_type) {
580       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);CHKERRQ(ierr);
581     }
582   }
583 
584   /* common - no construction */
585   if (PCTelescope_isActiveRank(sred)) {
586     ierr = KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);CHKERRQ(ierr);
587     if (pc->setfromoptionscalled && !pc->setupcalled){
588       ierr = KSPSetFromOptions(sred->ksp);CHKERRQ(ierr);
589     }
590   }
591   PetscFunctionReturn(0);
592 }
593 
594 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
595 {
596   PC_Telescope      sred = (PC_Telescope)pc->data;
597   PetscErrorCode    ierr;
598   Vec               xtmp,xred,yred;
599   PetscInt          i,st,ed;
600   VecScatter        scatter;
601   PetscScalar       *array;
602   const PetscScalar *x_array;
603 
604   PetscFunctionBegin;
605   ierr = PetscCitationsRegister(citation,&cited);CHKERRQ(ierr);
606 
607   xtmp    = sred->xtmp;
608   scatter = sred->scatter;
609   xred    = sred->xred;
610   yred    = sred->yred;
611 
612   /* pull in vector x->xtmp */
613   ierr = VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
614   ierr = VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
615 
616   /* copy vector entries into xred */
617   ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
618   if (xred) {
619     PetscScalar *LA_xred;
620     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
621     ierr = VecGetArray(xred,&LA_xred);CHKERRQ(ierr);
622     for (i=0; i<ed-st; i++) {
623       LA_xred[i] = x_array[i];
624     }
625     ierr = VecRestoreArray(xred,&LA_xred);CHKERRQ(ierr);
626   }
627   ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
628   /* solve */
629   if (PCTelescope_isActiveRank(sred)) {
630     ierr = KSPSolve(sred->ksp,xred,yred);CHKERRQ(ierr);
631     ierr = KSPCheckSolve(sred->ksp,pc,yred);CHKERRQ(ierr);
632   }
633   /* return vector */
634   ierr = VecGetArray(xtmp,&array);CHKERRQ(ierr);
635   if (yred) {
636     const PetscScalar *LA_yred;
637     ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
638     ierr = VecGetArrayRead(yred,&LA_yred);CHKERRQ(ierr);
639     for (i=0; i<ed-st; i++) {
640       array[i] = LA_yred[i];
641     }
642     ierr = VecRestoreArrayRead(yred,&LA_yred);CHKERRQ(ierr);
643   }
644   ierr = VecRestoreArray(xtmp,&array);CHKERRQ(ierr);
645   ierr = VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
646   ierr = VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
647   PetscFunctionReturn(0);
648 }
649 
650 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
651 {
652   PC_Telescope      sred = (PC_Telescope)pc->data;
653   PetscErrorCode    ierr;
654   Vec               xtmp,yred;
655   PetscInt          i,st,ed;
656   VecScatter        scatter;
657   const PetscScalar *x_array;
658   PetscBool         default_init_guess_value;
659 
660   PetscFunctionBegin;
661   xtmp    = sred->xtmp;
662   scatter = sred->scatter;
663   yred    = sred->yred;
664 
665   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
666   *reason = (PCRichardsonConvergedReason)0;
667 
668   if (!zeroguess) {
669     ierr = PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");CHKERRQ(ierr);
670     /* pull in vector y->xtmp */
671     ierr = VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
672     ierr = VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
673 
674     /* copy vector entries into xred */
675     ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
676     if (yred) {
677       PetscScalar *LA_yred;
678       ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
679       ierr = VecGetArray(yred,&LA_yred);CHKERRQ(ierr);
680       for (i=0; i<ed-st; i++) {
681         LA_yred[i] = x_array[i];
682       }
683       ierr = VecRestoreArray(yred,&LA_yred);CHKERRQ(ierr);
684     }
685     ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
686   }
687 
688   if (PCTelescope_isActiveRank(sred)) {
689     ierr = KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);CHKERRQ(ierr);
690     if (!zeroguess) ierr = KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);CHKERRQ(ierr);
691   }
692 
693   ierr = PCApply_Telescope(pc,x,y);CHKERRQ(ierr);
694 
695   if (PCTelescope_isActiveRank(sred)) {
696     ierr = KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);CHKERRQ(ierr);
697   }
698 
699   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
700   *outits = 1;
701   PetscFunctionReturn(0);
702 }
703 
704 static PetscErrorCode PCReset_Telescope(PC pc)
705 {
706   PC_Telescope   sred = (PC_Telescope)pc->data;
707   PetscErrorCode ierr;
708 
709   ierr = ISDestroy(&sred->isin);CHKERRQ(ierr);
710   ierr = VecScatterDestroy(&sred->scatter);CHKERRQ(ierr);
711   ierr = VecDestroy(&sred->xred);CHKERRQ(ierr);
712   ierr = VecDestroy(&sred->yred);CHKERRQ(ierr);
713   ierr = VecDestroy(&sred->xtmp);CHKERRQ(ierr);
714   ierr = MatDestroy(&sred->Bred);CHKERRQ(ierr);
715   ierr = KSPReset(sred->ksp);CHKERRQ(ierr);
716   if (sred->pctelescope_reset_type) {
717     ierr = sred->pctelescope_reset_type(pc);CHKERRQ(ierr);
718   }
719   PetscFunctionReturn(0);
720 }
721 
722 static PetscErrorCode PCDestroy_Telescope(PC pc)
723 {
724   PC_Telescope   sred = (PC_Telescope)pc->data;
725   PetscErrorCode ierr;
726 
727   PetscFunctionBegin;
728   ierr = PCReset_Telescope(pc);CHKERRQ(ierr);
729   ierr = KSPDestroy(&sred->ksp);CHKERRQ(ierr);
730   ierr = PetscSubcommDestroy(&sred->psubcomm);CHKERRQ(ierr);
731   ierr = PetscFree(sred->dm_ctx);CHKERRQ(ierr);
732   ierr = PetscFree(pc->data);CHKERRQ(ierr);
733   PetscFunctionReturn(0);
734 }
735 
736 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
737 {
738   PC_Telescope     sred = (PC_Telescope)pc->data;
739   PetscErrorCode   ierr;
740   MPI_Comm         comm;
741   PetscMPIInt      size;
742   PetscBool        flg;
743   PetscSubcommType subcommtype;
744 
745   PetscFunctionBegin;
746   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
747   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
748   ierr = PetscOptionsHead(PetscOptionsObject,"Telescope options");CHKERRQ(ierr);
749   ierr = PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg);CHKERRQ(ierr);
750   if (flg) {
751     ierr = PCTelescopeSetSubcommType(pc,subcommtype);CHKERRQ(ierr);
752   }
753   ierr = PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,0);CHKERRQ(ierr);
754   if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
755   ierr = PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,0);CHKERRQ(ierr);
756   ierr = PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,0);CHKERRQ(ierr);
757   ierr = PetscOptionsBool("-pc_telescope_use_coarse_dm","Define sub-communicator from the coarse DM","PCTelescopeSetUseCoarseDM",sred->use_coarse_dm,&sred->use_coarse_dm,0);CHKERRQ(ierr);
758   ierr = PetscOptionsTail();CHKERRQ(ierr);
759   PetscFunctionReturn(0);
760 }
761 
762 /* PC simplementation specific API's */
763 
764 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
765 {
766   PC_Telescope red = (PC_Telescope)pc->data;
767   PetscFunctionBegin;
768   if (ksp) *ksp = red->ksp;
769   PetscFunctionReturn(0);
770 }
771 
772 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
773 {
774   PC_Telescope red = (PC_Telescope)pc->data;
775   PetscFunctionBegin;
776   if (subcommtype) *subcommtype = red->subcommtype;
777   PetscFunctionReturn(0);
778 }
779 
780 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
781 {
782   PC_Telescope     red = (PC_Telescope)pc->data;
783 
784   PetscFunctionBegin;
785   if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
786   red->subcommtype = subcommtype;
787   PetscFunctionReturn(0);
788 }
789 
790 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
791 {
792   PC_Telescope red = (PC_Telescope)pc->data;
793   PetscFunctionBegin;
794   if (fact) *fact = red->redfactor;
795   PetscFunctionReturn(0);
796 }
797 
798 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
799 {
800   PC_Telescope     red = (PC_Telescope)pc->data;
801   PetscMPIInt      size;
802   PetscErrorCode   ierr;
803 
804   PetscFunctionBegin;
805   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
806   if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
807   if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
808   red->redfactor = fact;
809   PetscFunctionReturn(0);
810 }
811 
812 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
813 {
814   PC_Telescope red = (PC_Telescope)pc->data;
815   PetscFunctionBegin;
816   if (v) *v = red->ignore_dm;
817   PetscFunctionReturn(0);
818 }
819 
820 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
821 {
822   PC_Telescope red = (PC_Telescope)pc->data;
823   PetscFunctionBegin;
824   red->ignore_dm = v;
825   PetscFunctionReturn(0);
826 }
827 
828 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool *v)
829 {
830   PC_Telescope red = (PC_Telescope)pc->data;
831   PetscFunctionBegin;
832   if (v) *v = red->use_coarse_dm;
833   PetscFunctionReturn(0);
834 }
835 
836 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)
837 {
838   PC_Telescope red = (PC_Telescope)pc->data;
839   PetscFunctionBegin;
840   red->use_coarse_dm = v;
841   PetscFunctionReturn(0);
842 }
843 
844 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
845 {
846   PC_Telescope red = (PC_Telescope)pc->data;
847   PetscFunctionBegin;
848   if (v) *v = red->ignore_kspcomputeoperators;
849   PetscFunctionReturn(0);
850 }
851 
852 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
853 {
854   PC_Telescope red = (PC_Telescope)pc->data;
855   PetscFunctionBegin;
856   red->ignore_kspcomputeoperators = v;
857   PetscFunctionReturn(0);
858 }
859 
860 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
861 {
862   PC_Telescope red = (PC_Telescope)pc->data;
863   PetscFunctionBegin;
864   *dm = private_PCTelescopeGetSubDM(red);
865   PetscFunctionReturn(0);
866 }
867 
868 /*@
869  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
870 
871  Not Collective
872 
873  Input Parameter:
874 .  pc - the preconditioner context
875 
876  Output Parameter:
877 .  subksp - the KSP defined the smaller set of processes
878 
879  Level: advanced
880 
881 @*/
882 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
883 {
884   PetscErrorCode ierr;
885   PetscFunctionBegin;
886   ierr = PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));CHKERRQ(ierr);
887   PetscFunctionReturn(0);
888 }
889 
890 /*@
891  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
892 
893  Not Collective
894 
895  Input Parameter:
896 .  pc - the preconditioner context
897 
898  Output Parameter:
899 .  fact - the reduction factor
900 
901  Level: advanced
902 
903 @*/
904 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
905 {
906   PetscErrorCode ierr;
907   PetscFunctionBegin;
908   ierr = PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));CHKERRQ(ierr);
909   PetscFunctionReturn(0);
910 }
911 
912 /*@
913  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
914 
915  Not Collective
916 
917  Input Parameter:
918 .  pc - the preconditioner context
919 
920  Output Parameter:
921 .  fact - the reduction factor
922 
923  Level: advanced
924 
925 @*/
926 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
927 {
928   PetscErrorCode ierr;
929   PetscFunctionBegin;
930   ierr = PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));CHKERRQ(ierr);
931   PetscFunctionReturn(0);
932 }
933 
934 /*@
935  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
936 
937  Not Collective
938 
939  Input Parameter:
940 .  pc - the preconditioner context
941 
942  Output Parameter:
943 .  v - the flag
944 
945  Level: advanced
946 
947 @*/
948 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
949 {
950   PetscErrorCode ierr;
951   PetscFunctionBegin;
952   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
953   PetscFunctionReturn(0);
954 }
955 
956 /*@
957  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
958 
959  Not Collective
960 
961  Input Parameter:
962 .  pc - the preconditioner context
963 
964  Output Parameter:
965 .  v - Use PETSC_TRUE to ignore any DM
966 
967  Level: advanced
968 
969 @*/
970 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
971 {
972   PetscErrorCode ierr;
973   PetscFunctionBegin;
974   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
975   PetscFunctionReturn(0);
976 }
977 
978 /*@
979  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used.
980 
981  Not Collective
982 
983  Input Parameter:
984 .  pc - the preconditioner context
985 
986  Output Parameter:
987 .  v - the flag
988 
989  Level: advanced
990 
991 @*/
992 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc,PetscBool *v)
993 {
994   PetscErrorCode ierr;
995   PetscFunctionBegin;
996   ierr = PetscUseMethod(pc,"PCTelescopeGetUseCoarseDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
997   PetscFunctionReturn(0);
998 }
999 
1000 /*@
1001  PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM
1002 
1003  Not Collective
1004 
1005  Input Parameter:
1006 .  pc - the preconditioner context
1007 
1008  Output Parameter:
1009 .  v - Use PETSC_FALSE to ignore any coarse DM
1010 
1011  Notes:
1012  When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope
1013  will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and
1014  -pc_telescope_subcomm_type will no longer have any meaning.
1015  It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes.
1016  An error will occur of the size of the communicator associated with the coarse DM
1017  is the same as that of the parent DM.
1018  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
1019  This will be checked at the time the preconditioner is setup and an error will occur if
1020  the coarse DM does not define a sub-communicator of that used by the parent DM.
1021 
1022  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
1023  the DM used (e.g. it supports DMSHELL, DMPLEX, etc).
1024 
1025  Support is currently only provided for the case when you are using KSPSetComputeOperators()
1026 
1027  The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs.
1028  In the user code, this is achieved via
1029 .vb
1030    {
1031      DM dm_fine;
1032      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1033    }
1034 .ve
1035  The signature of the user provided field scatter method is
1036 .vb
1037    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1038 .ve
1039  The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE.
1040  SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM.
1041 
1042  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
1043  of state variables between the fine and coarse DMs.
1044  In the context of a finite element discretization, an example state variable might be
1045  values associated with quadrature points within each element.
1046  A user provided state scatter method is composed via
1047 .vb
1048    {
1049      DM dm_fine;
1050      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1051    }
1052 .ve
1053  The signature of the user provided state scatter method is
1054 .vb
1055    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1056 .ve
1057  SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM.
1058  The user is only required to support mode = SCATTER_FORWARD.
1059  No assumption is made about the data type of the state variables.
1060  These must be managed by the user and must be accessible from the DM.
1061 
1062  Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be
1063  associated with the sub-KSP residing within PCTelescope.
1064  In general, PCTelescope assumes that the context on the fine and coarse DM used with
1065  KSPSetComputeOperators() should be "similar" in type or origin.
1066  Specifically the following rules are used to infer what context on the sub-KSP should be.
1067 
1068  First the contexts from the KSP and the fine and coarse DMs are retrieved.
1069  Note that the special case of a DMSHELL context is queried.
1070 
1071 .vb
1072    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1073    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1074    DMShellGetContext(dm_fine,&dmfine_shellctx);
1075 
1076    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1077    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1078 .ve
1079 
1080  The following rules are then enforced:
1081 
1082  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1083  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL);
1084 
1085  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1086  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1087  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1088 
1089  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1090  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1091  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1092 
1093  If neither of the above three tests passed, then PCTelescope cannot safely determine what
1094  context should be provided to KSPSetComputeOperators() for use with the sub-KSP.
1095  In this case, an additional mechanism is provided via a composed function which will return
1096  the actual context to be used. To use this feature you must compose the "getter" function
1097  with the coarse DM, e.g.
1098 .vb
1099    {
1100      DM dm_coarse;
1101      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1102    }
1103 .ve
1104  The signature of the user provided method is
1105 .vb
1106    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1107 .ve
1108 
1109  Level: advanced
1110 
1111 @*/
1112 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)
1113 {
1114   PetscErrorCode ierr;
1115   PetscFunctionBegin;
1116   ierr = PetscTryMethod(pc,"PCTelescopeSetUseCoarseDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1117   PetscFunctionReturn(0);
1118 }
1119 
1120 /*@
1121  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
1122 
1123  Not Collective
1124 
1125  Input Parameter:
1126 .  pc - the preconditioner context
1127 
1128  Output Parameter:
1129 .  v - the flag
1130 
1131  Level: advanced
1132 
1133 @*/
1134 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
1135 {
1136   PetscErrorCode ierr;
1137   PetscFunctionBegin;
1138   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1139   PetscFunctionReturn(0);
1140 }
1141 
1142 /*@
1143  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
1144 
1145  Not Collective
1146 
1147  Input Parameter:
1148 .  pc - the preconditioner context
1149 
1150  Output Parameter:
1151 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
1152 
1153  Level: advanced
1154 
1155 @*/
1156 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
1157 {
1158   PetscErrorCode ierr;
1159   PetscFunctionBegin;
1160   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1161   PetscFunctionReturn(0);
1162 }
1163 
1164 /*@
1165  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
1166 
1167  Not Collective
1168 
1169  Input Parameter:
1170 .  pc - the preconditioner context
1171 
1172  Output Parameter:
1173 .  subdm - The re-partitioned DM
1174 
1175  Level: advanced
1176 
1177 @*/
1178 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
1179 {
1180   PetscErrorCode ierr;
1181   PetscFunctionBegin;
1182   ierr = PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));CHKERRQ(ierr);
1183   PetscFunctionReturn(0);
1184 }
1185 
1186 /*@
1187  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1188 
1189  Logically Collective
1190 
1191  Input Parameter:
1192 +  pc - the preconditioner context
1193 -  subcommtype - the subcommunicator type (see PetscSubcommType)
1194 
1195  Level: advanced
1196 
1197 .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE
1198 @*/
1199 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1200 {
1201   PetscErrorCode ierr;
1202   PetscFunctionBegin;
1203   ierr = PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));CHKERRQ(ierr);
1204   PetscFunctionReturn(0);
1205 }
1206 
1207 /*@
1208  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1209 
1210  Not Collective
1211 
1212  Input Parameter:
1213 .  pc - the preconditioner context
1214 
1215  Output Parameter:
1216 .  subcommtype - the subcommunicator type (see PetscSubcommType)
1217 
1218  Level: advanced
1219 
1220 .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE
1221 @*/
1222 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1223 {
1224   PetscErrorCode ierr;
1225   PetscFunctionBegin;
1226   ierr = PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));CHKERRQ(ierr);
1227   PetscFunctionReturn(0);
1228 }
1229 
1230 /* -------------------------------------------------------------------------------------*/
1231 /*MC
1232    PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1233 
1234    Options Database:
1235 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1236 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1237 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1238 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP.
1239 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator.
1240 
1241    Level: advanced
1242 
1243    Notes:
1244    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1245    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC).
1246    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1247    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1248    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1249    Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM.
1250 
1251    The default type of the sub KSP (the KSP defined on c') is PREONLY.
1252 
1253    There are three setup mechanisms for PCTelescope. Features support by each type are described below.
1254    In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the
1255    communicators c and c' respectively.
1256 
1257    [1] Default setup
1258    The sub-communicator c' is created via PetscSubcommCreate().
1259    Explicitly defined nullspace and near nullspace vectors will be propogated from B to B'.
1260    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1261    No support is provided for KSPSetComputeOperators().
1262    Currently there is no support for the flag -pc_use_amat.
1263 
1264    [2] DM aware setup
1265    If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'.
1266    c' is created via PetscSubcommCreate().
1267    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1268    Currently only support for re-partitioning a DMDA is provided.
1269    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1270    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1271    Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP.
1272    This is fragile since the user must ensure that their user context is valid for use on c'.
1273    Currently there is no support for the flag -pc_use_amat.
1274 
1275    [3] Coarse DM setup
1276    If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM().
1277    PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c.
1278    The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE.
1279    PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1280    The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be
1281    available with using multi-grid on unstructured meshes.
1282    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1283    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1284    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1285    There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported.
1286    The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1287    Propogation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators().
1288    Currently there is no support for the flag -pc_use_amat.
1289    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
1290    Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM().
1291 
1292    Developer Notes:
1293    During PCSetup, the B operator is scattered onto c'.
1294    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1295    Then, KSPSolve() is executed on the c' communicator.
1296 
1297    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
1298    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
1299 
1300    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1301    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1302    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1303 
1304    The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D -
1305    support for 1D DMDAs is not provided). If a DMDA is found, a topologically equivalent DMDA is created on c'
1306    and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support
1307    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1308    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1309 
1310    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1311 
1312    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1313    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
1314    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
1315 
1316    Limitations/improvements include the following.
1317    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1318    A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction().
1319 
1320    The symmetric permutation used when a DMDA is encountered is performed via explicitly assmbleming a permutation matrix P,
1321    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1322    VecPermute() does not supported for the use case required here. By computing P, one can permute both the operator and RHS in a
1323    consistent manner.
1324 
1325    Mapping of vectors (default setup mode) is performed in the following way.
1326    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1327    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1328    We perform the scatter to the sub-communicator in the following way.
1329    [1] Given a vector x defined on communicator c
1330 
1331 .vb
1332    rank(c)  local values of x
1333    ------- ----------------------------------------
1334         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1335         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1336         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1337         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1338 .ve
1339 
1340    scatter into xtmp defined also on comm c, so that we have the following values
1341 
1342 .vb
1343    rank(c)  local values of xtmp
1344    ------- ----------------------------------------------------------------------------
1345         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1346         1   [ ]
1347         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1348         3   [ ]
1349 .ve
1350 
1351    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1352 
1353 
1354    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1355    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1356 
1357 .vb
1358    rank(c')  local values of xred
1359    -------- ----------------------------------------------------------------------------
1360          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1361          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1362 .ve
1363 
1364   Contributed by Dave May
1365 
1366   Reference:
1367   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1368 
1369 .seealso:  PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
1370 M*/
1371 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1372 {
1373   PetscErrorCode       ierr;
1374   struct _PC_Telescope *sred;
1375 
1376   PetscFunctionBegin;
1377   ierr = PetscNewLog(pc,&sred);CHKERRQ(ierr);
1378   sred->psubcomm       = NULL;
1379   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1380   sred->subcomm        = MPI_COMM_NULL;
1381   sred->redfactor      = 1;
1382   sred->ignore_dm      = PETSC_FALSE;
1383   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1384   sred->use_coarse_dm  = PETSC_FALSE;
1385   pc->data             = (void*)sred;
1386 
1387   pc->ops->apply           = PCApply_Telescope;
1388   pc->ops->applytranspose  = NULL;
1389   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1390   pc->ops->setup           = PCSetUp_Telescope;
1391   pc->ops->destroy         = PCDestroy_Telescope;
1392   pc->ops->reset           = PCReset_Telescope;
1393   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1394   pc->ops->view            = PCView_Telescope;
1395 
1396   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1397   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1398   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1399   sred->pctelescope_reset_type              = NULL;
1400 
1401   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);CHKERRQ(ierr);
1402   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope);CHKERRQ(ierr);
1403   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope);CHKERRQ(ierr);
1404   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);CHKERRQ(ierr);
1405   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);CHKERRQ(ierr);
1406   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);CHKERRQ(ierr);
1407   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);CHKERRQ(ierr);
1408   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1409   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1410   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);CHKERRQ(ierr);
1411   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",PCTelescopeGetUseCoarseDM_Telescope);CHKERRQ(ierr);
1412   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",PCTelescopeSetUseCoarseDM_Telescope);CHKERRQ(ierr);
1413   PetscFunctionReturn(0);
1414 }
1415