xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision 7463b60c35e0d772c7cdc1e7c0df856b585e27c9)
1 
2 #include <petsc/private/petscimpl.h>
3 #include <petsc/private/matimpl.h>
4 #include <petsc/private/pcimpl.h>
5 #include <petscksp.h> /*I "petscksp.h" I*/
6 #include <petscdm.h> /*I "petscdm.h" I*/
7 #include "../src/ksp/pc/impls/telescope/telescope.h"
8 
9 static PetscBool  cited = PETSC_FALSE;
10 static const char citation[] =
11 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
12 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
13 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
14 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
15 "  series    = {PASC '16},\n"
16 "  isbn      = {978-1-4503-4126-4},\n"
17 "  location  = {Lausanne, Switzerland},\n"
18 "  pages     = {5:1--5:12},\n"
19 "  articleno = {5},\n"
20 "  numpages  = {12},\n"
21 "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
22 "  doi       = {10.1145/2929908.2929913},\n"
23 "  acmid     = {2929913},\n"
24 "  publisher = {ACM},\n"
25 "  address   = {New York, NY, USA},\n"
26 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
27 "  year      = {2016}\n"
28 "}\n";
29 
30 /*
31  default setup mode
32 
33  [1a] scatter to (FORWARD)
34  x(comm) -> xtmp(comm)
35  [1b] local copy (to) ranks with color = 0
36  xred(subcomm) <- xtmp
37 
38  [2] solve on sub KSP to obtain yred(subcomm)
39 
40  [3a] local copy (from) ranks with color = 0
41  yred(subcomm) --> xtmp
42  [2b] scatter from (REVERSE)
43  xtmp(comm) -> y(comm)
44 */
45 
46 /*
47   Collective[comm_f]
48   Notes
49    * Using comm_f = MPI_COMM_NULL will result in an error
50    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
51    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
52 */
53 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool *isvalid)
54 {
55   PetscInt       valid = 1;
56   MPI_Group      group_f,group_c;
57   PetscErrorCode ierr;
58   PetscMPIInt    count,k,size_f = 0,size_c = 0,size_c_sum = 0;
59   PetscMPIInt    *ranks_f,*ranks_c;
60 
61   PetscFunctionBegin;
62   if (comm_f == MPI_COMM_NULL) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"comm_f cannot be MPI_COMM_NULL");
63 
64   ierr = MPI_Comm_group(comm_f,&group_f);CHKERRMPI(ierr);
65   if (comm_c != MPI_COMM_NULL) {
66     ierr = MPI_Comm_group(comm_c,&group_c);CHKERRMPI(ierr);
67   }
68 
69   ierr = MPI_Comm_size(comm_f,&size_f);CHKERRMPI(ierr);
70   if (comm_c != MPI_COMM_NULL) {
71     ierr = MPI_Comm_size(comm_c,&size_c);CHKERRMPI(ierr);
72   }
73 
74   /* check not all comm_c's are NULL */
75   size_c_sum = size_c;
76   ierr = MPI_Allreduce(MPI_IN_PLACE,&size_c_sum,1,MPI_INT,MPI_SUM,comm_f);CHKERRMPI(ierr);
77   if (size_c_sum == 0) valid = 0;
78 
79   /* check we can map at least 1 rank in comm_c to comm_f */
80   ierr = PetscMalloc1(size_f,&ranks_f);CHKERRQ(ierr);
81   ierr = PetscMalloc1(size_c,&ranks_c);CHKERRQ(ierr);
82   for (k=0; k<size_f; k++) ranks_f[k] = MPI_UNDEFINED;
83   for (k=0; k<size_c; k++) ranks_c[k] = k;
84 
85   /*
86    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
87    I do not want the code to terminate immediately if this occurs, rather I want to throw
88    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
89    that comm_c is not a valid sub-communicator.
90    Hence I purposefully do not call CHKERRQ() after MPI_Group_translate_ranks().
91   */
92   count = 0;
93   if (comm_c != MPI_COMM_NULL) {
94     (void)MPI_Group_translate_ranks(group_c,size_c,ranks_c,group_f,ranks_f);
95     for (k=0; k<size_f; k++) {
96       if (ranks_f[k] == MPI_UNDEFINED) {
97         count++;
98       }
99     }
100   }
101   if (count == size_f) valid = 0;
102 
103   ierr = MPI_Allreduce(MPI_IN_PLACE,&valid,1,MPIU_INT,MPI_MIN,comm_f);CHKERRMPI(ierr);
104   if (valid == 1) *isvalid = PETSC_TRUE;
105   else *isvalid = PETSC_FALSE;
106 
107   ierr = PetscFree(ranks_f);CHKERRQ(ierr);
108   ierr = PetscFree(ranks_c);CHKERRQ(ierr);
109   ierr = MPI_Group_free(&group_f);CHKERRMPI(ierr);
110   if (comm_c != MPI_COMM_NULL) {
111     ierr = MPI_Group_free(&group_c);CHKERRMPI(ierr);
112   }
113   PetscFunctionReturn(0);
114 }
115 
116 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
117 {
118   DM subdm = NULL;
119 
120   if (!PCTelescope_isActiveRank(sred)) { subdm = NULL; }
121   else {
122     switch (sred->sr_type) {
123     case TELESCOPE_DEFAULT: subdm = NULL;
124       break;
125     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
126       break;
127     case TELESCOPE_DMPLEX:  subdm = NULL;
128       break;
129     case TELESCOPE_COARSEDM: if (sred->ksp) { KSPGetDM(sred->ksp,&subdm); }
130       break;
131     }
132   }
133   return(subdm);
134 }
135 
136 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
137 {
138   PetscErrorCode ierr;
139   PetscInt       m,M,bs,st,ed;
140   Vec            x,xred,yred,xtmp;
141   Mat            B;
142   MPI_Comm       comm,subcomm;
143   VecScatter     scatter;
144   IS             isin;
145 
146   PetscFunctionBegin;
147   ierr = PetscInfo(pc,"PCTelescope: setup (default)\n");CHKERRQ(ierr);
148   comm = PetscSubcommParent(sred->psubcomm);
149   subcomm = PetscSubcommChild(sred->psubcomm);
150 
151   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
152   ierr = MatGetSize(B,&M,NULL);CHKERRQ(ierr);
153   ierr = MatGetBlockSize(B,&bs);CHKERRQ(ierr);
154   ierr = MatCreateVecs(B,&x,NULL);CHKERRQ(ierr);
155 
156   xred = NULL;
157   m    = 0;
158   if (PCTelescope_isActiveRank(sred)) {
159     ierr = VecCreate(subcomm,&xred);CHKERRQ(ierr);
160     ierr = VecSetSizes(xred,PETSC_DECIDE,M);CHKERRQ(ierr);
161     ierr = VecSetBlockSize(xred,bs);CHKERRQ(ierr);
162     ierr = VecSetFromOptions(xred);CHKERRQ(ierr);
163     ierr = VecGetLocalSize(xred,&m);CHKERRQ(ierr);
164   }
165 
166   yred = NULL;
167   if (PCTelescope_isActiveRank(sred)) {
168     ierr = VecDuplicate(xred,&yred);CHKERRQ(ierr);
169   }
170 
171   ierr = VecCreate(comm,&xtmp);CHKERRQ(ierr);
172   ierr = VecSetSizes(xtmp,m,PETSC_DECIDE);CHKERRQ(ierr);
173   ierr = VecSetBlockSize(xtmp,bs);CHKERRQ(ierr);
174   ierr = VecSetType(xtmp,((PetscObject)x)->type_name);CHKERRQ(ierr);
175 
176   if (PCTelescope_isActiveRank(sred)) {
177     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
178     ierr = ISCreateStride(comm,(ed-st),st,1,&isin);CHKERRQ(ierr);
179   } else {
180     ierr = VecGetOwnershipRange(x,&st,&ed);CHKERRQ(ierr);
181     ierr = ISCreateStride(comm,0,st,1,&isin);CHKERRQ(ierr);
182   }
183   ierr = ISSetBlockSize(isin,bs);CHKERRQ(ierr);
184 
185   ierr = VecScatterCreate(x,isin,xtmp,NULL,&scatter);CHKERRQ(ierr);
186 
187   sred->isin    = isin;
188   sred->scatter = scatter;
189   sred->xred    = xred;
190   sred->yred    = yred;
191   sred->xtmp    = xtmp;
192   ierr = VecDestroy(&x);CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
197 {
198   PetscErrorCode ierr;
199   MPI_Comm       comm,subcomm;
200   Mat            Bred,B;
201   PetscInt       nr,nc,bs;
202   IS             isrow,iscol;
203   Mat            Blocal,*_Blocal;
204 
205   PetscFunctionBegin;
206   ierr = PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");CHKERRQ(ierr);
207   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
208   subcomm = PetscSubcommChild(sred->psubcomm);
209   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
210   ierr = MatGetSize(B,&nr,&nc);CHKERRQ(ierr);
211   isrow = sred->isin;
212   ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&iscol);CHKERRQ(ierr);
213   ierr = ISSetIdentity(iscol);CHKERRQ(ierr);
214   ierr = MatGetBlockSizes(B,NULL,&bs);CHKERRQ(ierr);
215   ierr = ISSetBlockSize(iscol,bs);CHKERRQ(ierr);
216   ierr = MatSetOption(B,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
217   ierr = MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);CHKERRQ(ierr);
218   Blocal = *_Blocal;
219   ierr = PetscFree(_Blocal);CHKERRQ(ierr);
220   Bred = NULL;
221   if (PCTelescope_isActiveRank(sred)) {
222     PetscInt mm;
223 
224     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
225 
226     ierr = MatGetSize(Blocal,&mm,NULL);CHKERRQ(ierr);
227     ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);CHKERRQ(ierr);
228   }
229   *A = Bred;
230   ierr = ISDestroy(&iscol);CHKERRQ(ierr);
231   ierr = MatDestroy(&Blocal);CHKERRQ(ierr);
232   PetscFunctionReturn(0);
233 }
234 
235 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
236 {
237   PetscErrorCode ierr;
238   PetscBool      has_const;
239   const Vec      *vecs;
240   Vec            *sub_vecs = NULL;
241   PetscInt       i,k,n = 0;
242   MPI_Comm       subcomm;
243 
244   PetscFunctionBegin;
245   subcomm = PetscSubcommChild(sred->psubcomm);
246   ierr = MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);CHKERRQ(ierr);
247 
248   if (PCTelescope_isActiveRank(sred)) {
249     if (n) {
250       ierr = VecDuplicateVecs(sred->xred,n,&sub_vecs);CHKERRQ(ierr);
251     }
252   }
253 
254   /* copy entries */
255   for (k=0; k<n; k++) {
256     const PetscScalar *x_array;
257     PetscScalar       *LA_sub_vec;
258     PetscInt          st,ed;
259 
260     /* pull in vector x->xtmp */
261     ierr = VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
262     ierr = VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
263     if (sub_vecs) {
264       /* copy vector entries into xred */
265       ierr = VecGetArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
266       if (sub_vecs[k]) {
267         ierr = VecGetOwnershipRange(sub_vecs[k],&st,&ed);CHKERRQ(ierr);
268         ierr = VecGetArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
269         for (i=0; i<ed-st; i++) {
270           LA_sub_vec[i] = x_array[i];
271         }
272         ierr = VecRestoreArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
273       }
274       ierr = VecRestoreArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
275     }
276   }
277 
278   if (PCTelescope_isActiveRank(sred)) {
279     /* create new (near) nullspace for redundant object */
280     ierr = MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);CHKERRQ(ierr);
281     ierr = VecDestroyVecs(n,&sub_vecs);CHKERRQ(ierr);
282     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
283     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
284   }
285   PetscFunctionReturn(0);
286 }
287 
288 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
289 {
290   PetscErrorCode ierr;
291   Mat            B;
292 
293   PetscFunctionBegin;
294   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
295   /* Propagate the nullspace if it exists */
296   {
297     MatNullSpace nullspace,sub_nullspace;
298     ierr = MatGetNullSpace(B,&nullspace);CHKERRQ(ierr);
299     if (nullspace) {
300       ierr = PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");CHKERRQ(ierr);
301       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace);CHKERRQ(ierr);
302       if (PCTelescope_isActiveRank(sred)) {
303         ierr = MatSetNullSpace(sub_mat,sub_nullspace);CHKERRQ(ierr);
304         ierr = MatNullSpaceDestroy(&sub_nullspace);CHKERRQ(ierr);
305       }
306     }
307   }
308   /* Propagate the near nullspace if it exists */
309   {
310     MatNullSpace nearnullspace,sub_nearnullspace;
311     ierr = MatGetNearNullSpace(B,&nearnullspace);CHKERRQ(ierr);
312     if (nearnullspace) {
313       ierr = PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n");CHKERRQ(ierr);
314       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);CHKERRQ(ierr);
315       if (PCTelescope_isActiveRank(sred)) {
316         ierr = MatSetNearNullSpace(sub_mat,sub_nearnullspace);CHKERRQ(ierr);
317         ierr = MatNullSpaceDestroy(&sub_nearnullspace);CHKERRQ(ierr);
318       }
319     }
320   }
321   PetscFunctionReturn(0);
322 }
323 
324 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
325 {
326   PC_Telescope   sred = (PC_Telescope)pc->data;
327   PetscErrorCode ierr;
328   PetscBool      iascii,isstring;
329   PetscViewer    subviewer;
330 
331   PetscFunctionBegin;
332   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
333   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
334   if (iascii) {
335     {
336       MPI_Comm    comm,subcomm;
337       PetscMPIInt comm_size,subcomm_size;
338       DM          dm = NULL,subdm = NULL;
339 
340       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
341       subdm = private_PCTelescopeGetSubDM(sred);
342 
343       if (sred->psubcomm) {
344         comm = PetscSubcommParent(sred->psubcomm);
345         subcomm = PetscSubcommChild(sred->psubcomm);
346         ierr = MPI_Comm_size(comm,&comm_size);CHKERRMPI(ierr);
347         ierr = MPI_Comm_size(subcomm,&subcomm_size);CHKERRMPI(ierr);
348 
349         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
350         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent comm size reduction factor = %D\n",sred->redfactor);CHKERRQ(ierr);
351         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);CHKERRQ(ierr);
352         switch (sred->subcommtype) {
353         case PETSC_SUBCOMM_INTERLACED :
354           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: type = interlaced\n",sred->subcommtype);CHKERRQ(ierr);
355           break;
356         case PETSC_SUBCOMM_CONTIGUOUS :
357           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm type = contiguous\n",sred->subcommtype);CHKERRQ(ierr);
358           break;
359         default :
360           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
361         }
362         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
363       } else {
364         ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
365         subcomm = sred->subcomm;
366         if (!PCTelescope_isActiveRank(sred)) {
367           subcomm = PETSC_COMM_SELF;
368         }
369 
370         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
371         ierr = PetscViewerASCIIPrintf(viewer,"subcomm: using user provided sub-communicator\n");CHKERRQ(ierr);
372         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
373       }
374 
375       ierr = PetscViewerGetSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
376       if (PCTelescope_isActiveRank(sred)) {
377         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
378 
379         if (dm && sred->ignore_dm) {
380           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring DM\n");CHKERRQ(ierr);
381         }
382         if (sred->ignore_kspcomputeoperators) {
383           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring KSPComputeOperators\n");CHKERRQ(ierr);
384         }
385         switch (sred->sr_type) {
386         case TELESCOPE_DEFAULT:
387           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: default\n");CHKERRQ(ierr);
388           break;
389         case TELESCOPE_DMDA:
390           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMDA auto-repartitioning\n");CHKERRQ(ierr);
391           ierr = DMView_DA_Short(subdm,subviewer);CHKERRQ(ierr);
392           break;
393         case TELESCOPE_DMPLEX:
394           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMPLEX auto-repartitioning\n");CHKERRQ(ierr);
395           break;
396         case TELESCOPE_COARSEDM:
397           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: coarse DM\n");CHKERRQ(ierr);
398           break;
399         }
400 
401         if (dm) {
402           PetscObject obj = (PetscObject)dm;
403           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object:");CHKERRQ(ierr);
404           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
405           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
406           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
407           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
408           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
409           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
410         } else {
411           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object: NULL\n");CHKERRQ(ierr);
412         }
413         if (subdm) {
414           PetscObject obj = (PetscObject)subdm;
415           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object:");CHKERRQ(ierr);
416           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
417           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
418           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
419           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
420           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
421           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
422         } else {
423           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object: NULL\n");CHKERRQ(ierr);
424         }
425 
426         ierr = KSPView(sred->ksp,subviewer);CHKERRQ(ierr);
427         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
428       }
429       ierr = PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
430     }
431   }
432   PetscFunctionReturn(0);
433 }
434 
435 static PetscErrorCode PCSetUp_Telescope(PC pc)
436 {
437   PC_Telescope    sred = (PC_Telescope)pc->data;
438   PetscErrorCode  ierr;
439   MPI_Comm        comm,subcomm=0;
440   PCTelescopeType sr_type;
441 
442   PetscFunctionBegin;
443   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
444 
445   /* Determine type of setup/update */
446   if (!pc->setupcalled) {
447     PetscBool has_dm,same;
448     DM        dm;
449 
450     sr_type = TELESCOPE_DEFAULT;
451     has_dm = PETSC_FALSE;
452     ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
453     if (dm) { has_dm = PETSC_TRUE; }
454     if (has_dm) {
455       /* check for dmda */
456       ierr = PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);CHKERRQ(ierr);
457       if (same) {
458         ierr = PetscInfo(pc,"PCTelescope: found DMDA\n");CHKERRQ(ierr);
459         sr_type = TELESCOPE_DMDA;
460       }
461       /* check for dmplex */
462       ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);CHKERRQ(ierr);
463       if (same) {
464         ierr = PetscInfo(pc,"PCTelescope: found DMPLEX\n");CHKERRQ(ierr);
465         sr_type = TELESCOPE_DMPLEX;
466       }
467 
468       if (sred->use_coarse_dm) {
469         ierr = PetscInfo(pc,"PCTelescope: using coarse DM\n");CHKERRQ(ierr);
470         sr_type = TELESCOPE_COARSEDM;
471       }
472 
473       if (sred->ignore_dm) {
474         ierr = PetscInfo(pc,"PCTelescope: ignoring DM\n");CHKERRQ(ierr);
475         sr_type = TELESCOPE_DEFAULT;
476       }
477     }
478     sred->sr_type = sr_type;
479   } else {
480     sr_type = sred->sr_type;
481   }
482 
483   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
484   switch (sr_type) {
485   case TELESCOPE_DEFAULT:
486     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
487     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
488     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
489     sred->pctelescope_reset_type              = NULL;
490     break;
491   case TELESCOPE_DMDA:
492     pc->ops->apply                            = PCApply_Telescope_dmda;
493     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
494     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
495     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
496     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
497     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
498     break;
499   case TELESCOPE_DMPLEX:
500     SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
501   case TELESCOPE_COARSEDM:
502     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
503     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
504     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
505     sred->pctelescope_matcreate_type          = NULL;
506     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
507     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
508     break;
509   default:
510     SETERRQ(comm,PETSC_ERR_SUP,"Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
511   }
512 
513   /* subcomm definition */
514   if (!pc->setupcalled) {
515     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
516       if (!sred->psubcomm) {
517         ierr = PetscSubcommCreate(comm,&sred->psubcomm);CHKERRQ(ierr);
518         ierr = PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);CHKERRQ(ierr);
519         ierr = PetscSubcommSetType(sred->psubcomm,sred->subcommtype);CHKERRQ(ierr);
520         ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
521         sred->subcomm = PetscSubcommChild(sred->psubcomm);
522       }
523     } else { /* query PC for DM, check communicators */
524       DM          dm,dm_coarse_partition = NULL;
525       MPI_Comm    comm_fine,comm_coarse_partition = MPI_COMM_NULL;
526       PetscMPIInt csize_fine=0,csize_coarse_partition=0,cs[2],csg[2],cnt=0;
527       PetscBool   isvalidsubcomm;
528 
529       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
530       comm_fine = PetscObjectComm((PetscObject)dm);
531       ierr = DMGetCoarseDM(dm,&dm_coarse_partition);CHKERRQ(ierr);
532       if (dm_coarse_partition) { cnt = 1; }
533       ierr = MPI_Allreduce(MPI_IN_PLACE,&cnt,1,MPI_INT,MPI_SUM,comm_fine);CHKERRMPI(ierr);
534       if (cnt == 0) SETERRQ(comm_fine,PETSC_ERR_SUP,"Zero instances of a coarse DM were found");
535 
536       ierr = MPI_Comm_size(comm_fine,&csize_fine);CHKERRMPI(ierr);
537       if (dm_coarse_partition) {
538         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
539         ierr = MPI_Comm_size(comm_coarse_partition,&csize_coarse_partition);CHKERRMPI(ierr);
540       }
541 
542       cs[0] = csize_fine;
543       cs[1] = csize_coarse_partition;
544       ierr = MPI_Allreduce(cs,csg,2,MPI_INT,MPI_MAX,comm_fine);CHKERRMPI(ierr);
545       if (csg[0] == csg[1]) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM uses the same size communicator as the parent DM attached to the PC");
546 
547       ierr = PCTelescopeTestValidSubcomm(comm_fine,comm_coarse_partition,&isvalidsubcomm);CHKERRQ(ierr);
548       if (!isvalidsubcomm) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM communicator is not a sub-communicator of parentDM->comm");
549       sred->subcomm = comm_coarse_partition;
550     }
551   }
552   subcomm = sred->subcomm;
553 
554   /* internal KSP */
555   if (!pc->setupcalled) {
556     const char *prefix;
557 
558     if (PCTelescope_isActiveRank(sred)) {
559       ierr = KSPCreate(subcomm,&sred->ksp);CHKERRQ(ierr);
560       ierr = KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);CHKERRQ(ierr);
561       ierr = PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
562       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);CHKERRQ(ierr);
563       ierr = KSPSetType(sred->ksp,KSPPREONLY);CHKERRQ(ierr);
564       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
565       ierr = KSPSetOptionsPrefix(sred->ksp,prefix);CHKERRQ(ierr);
566       ierr = KSPAppendOptionsPrefix(sred->ksp,"telescope_");CHKERRQ(ierr);
567     }
568   }
569 
570   /* setup */
571   if (!pc->setupcalled && sred->pctelescope_setup_type) {
572     ierr = sred->pctelescope_setup_type(pc,sred);CHKERRQ(ierr);
573   }
574   /* update */
575   if (!pc->setupcalled) {
576     if (sred->pctelescope_matcreate_type) {
577       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);CHKERRQ(ierr);
578     }
579     if (sred->pctelescope_matnullspacecreate_type) {
580       ierr = sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);CHKERRQ(ierr);
581     }
582   } else {
583     if (sred->pctelescope_matcreate_type) {
584       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);CHKERRQ(ierr);
585     }
586   }
587 
588   /* common - no construction */
589   if (PCTelescope_isActiveRank(sred)) {
590     ierr = KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);CHKERRQ(ierr);
591     if (pc->setfromoptionscalled && !pc->setupcalled){
592       ierr = KSPSetFromOptions(sred->ksp);CHKERRQ(ierr);
593     }
594   }
595   PetscFunctionReturn(0);
596 }
597 
598 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
599 {
600   PC_Telescope      sred = (PC_Telescope)pc->data;
601   PetscErrorCode    ierr;
602   Vec               xtmp,xred,yred;
603   PetscInt          i,st,ed;
604   VecScatter        scatter;
605   PetscScalar       *array;
606   const PetscScalar *x_array;
607 
608   PetscFunctionBegin;
609   ierr = PetscCitationsRegister(citation,&cited);CHKERRQ(ierr);
610 
611   xtmp    = sred->xtmp;
612   scatter = sred->scatter;
613   xred    = sred->xred;
614   yred    = sred->yred;
615 
616   /* pull in vector x->xtmp */
617   ierr = VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
618   ierr = VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
619 
620   /* copy vector entries into xred */
621   ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
622   if (xred) {
623     PetscScalar *LA_xred;
624     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
625     ierr = VecGetArray(xred,&LA_xred);CHKERRQ(ierr);
626     for (i=0; i<ed-st; i++) {
627       LA_xred[i] = x_array[i];
628     }
629     ierr = VecRestoreArray(xred,&LA_xred);CHKERRQ(ierr);
630   }
631   ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
632   /* solve */
633   if (PCTelescope_isActiveRank(sred)) {
634     ierr = KSPSolve(sred->ksp,xred,yred);CHKERRQ(ierr);
635     ierr = KSPCheckSolve(sred->ksp,pc,yred);CHKERRQ(ierr);
636   }
637   /* return vector */
638   ierr = VecGetArray(xtmp,&array);CHKERRQ(ierr);
639   if (yred) {
640     const PetscScalar *LA_yred;
641     ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
642     ierr = VecGetArrayRead(yred,&LA_yred);CHKERRQ(ierr);
643     for (i=0; i<ed-st; i++) {
644       array[i] = LA_yred[i];
645     }
646     ierr = VecRestoreArrayRead(yred,&LA_yred);CHKERRQ(ierr);
647   }
648   ierr = VecRestoreArray(xtmp,&array);CHKERRQ(ierr);
649   ierr = VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
650   ierr = VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
651   PetscFunctionReturn(0);
652 }
653 
654 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
655 {
656   PC_Telescope      sred = (PC_Telescope)pc->data;
657   PetscErrorCode    ierr;
658   Vec               xtmp,yred;
659   PetscInt          i,st,ed;
660   VecScatter        scatter;
661   const PetscScalar *x_array;
662   PetscBool         default_init_guess_value;
663 
664   PetscFunctionBegin;
665   xtmp    = sred->xtmp;
666   scatter = sred->scatter;
667   yred    = sred->yred;
668 
669   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
670   *reason = (PCRichardsonConvergedReason)0;
671 
672   if (!zeroguess) {
673     ierr = PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");CHKERRQ(ierr);
674     /* pull in vector y->xtmp */
675     ierr = VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
676     ierr = VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
677 
678     /* copy vector entries into xred */
679     ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
680     if (yred) {
681       PetscScalar *LA_yred;
682       ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
683       ierr = VecGetArray(yred,&LA_yred);CHKERRQ(ierr);
684       for (i=0; i<ed-st; i++) {
685         LA_yred[i] = x_array[i];
686       }
687       ierr = VecRestoreArray(yred,&LA_yred);CHKERRQ(ierr);
688     }
689     ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
690   }
691 
692   if (PCTelescope_isActiveRank(sred)) {
693     ierr = KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);CHKERRQ(ierr);
694     if (!zeroguess) ierr = KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);CHKERRQ(ierr);
695   }
696 
697   ierr = PCApply_Telescope(pc,x,y);CHKERRQ(ierr);
698 
699   if (PCTelescope_isActiveRank(sred)) {
700     ierr = KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);CHKERRQ(ierr);
701   }
702 
703   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
704   *outits = 1;
705   PetscFunctionReturn(0);
706 }
707 
708 static PetscErrorCode PCReset_Telescope(PC pc)
709 {
710   PC_Telescope   sred = (PC_Telescope)pc->data;
711   PetscErrorCode ierr;
712 
713   ierr = ISDestroy(&sred->isin);CHKERRQ(ierr);
714   ierr = VecScatterDestroy(&sred->scatter);CHKERRQ(ierr);
715   ierr = VecDestroy(&sred->xred);CHKERRQ(ierr);
716   ierr = VecDestroy(&sred->yred);CHKERRQ(ierr);
717   ierr = VecDestroy(&sred->xtmp);CHKERRQ(ierr);
718   ierr = MatDestroy(&sred->Bred);CHKERRQ(ierr);
719   ierr = KSPReset(sred->ksp);CHKERRQ(ierr);
720   if (sred->pctelescope_reset_type) {
721     ierr = sred->pctelescope_reset_type(pc);CHKERRQ(ierr);
722   }
723   PetscFunctionReturn(0);
724 }
725 
726 static PetscErrorCode PCDestroy_Telescope(PC pc)
727 {
728   PC_Telescope   sred = (PC_Telescope)pc->data;
729   PetscErrorCode ierr;
730 
731   PetscFunctionBegin;
732   ierr = PCReset_Telescope(pc);CHKERRQ(ierr);
733   ierr = KSPDestroy(&sred->ksp);CHKERRQ(ierr);
734   ierr = PetscSubcommDestroy(&sred->psubcomm);CHKERRQ(ierr);
735   ierr = PetscFree(sred->dm_ctx);CHKERRQ(ierr);
736   ierr = PetscFree(pc->data);CHKERRQ(ierr);
737   PetscFunctionReturn(0);
738 }
739 
740 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
741 {
742   PC_Telescope     sred = (PC_Telescope)pc->data;
743   PetscErrorCode   ierr;
744   MPI_Comm         comm;
745   PetscMPIInt      size;
746   PetscBool        flg;
747   PetscSubcommType subcommtype;
748 
749   PetscFunctionBegin;
750   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
751   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
752   ierr = PetscOptionsHead(PetscOptionsObject,"Telescope options");CHKERRQ(ierr);
753   ierr = PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg);CHKERRQ(ierr);
754   if (flg) {
755     ierr = PCTelescopeSetSubcommType(pc,subcommtype);CHKERRQ(ierr);
756   }
757   ierr = PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,NULL);CHKERRQ(ierr);
758   if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
759   ierr = PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,NULL);CHKERRQ(ierr);
760   ierr = PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,NULL);CHKERRQ(ierr);
761   ierr = PetscOptionsBool("-pc_telescope_use_coarse_dm","Define sub-communicator from the coarse DM","PCTelescopeSetUseCoarseDM",sred->use_coarse_dm,&sred->use_coarse_dm,NULL);CHKERRQ(ierr);
762   ierr = PetscOptionsTail();CHKERRQ(ierr);
763   PetscFunctionReturn(0);
764 }
765 
766 /* PC simplementation specific API's */
767 
768 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
769 {
770   PC_Telescope red = (PC_Telescope)pc->data;
771   PetscFunctionBegin;
772   if (ksp) *ksp = red->ksp;
773   PetscFunctionReturn(0);
774 }
775 
776 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
777 {
778   PC_Telescope red = (PC_Telescope)pc->data;
779   PetscFunctionBegin;
780   if (subcommtype) *subcommtype = red->subcommtype;
781   PetscFunctionReturn(0);
782 }
783 
784 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
785 {
786   PC_Telescope     red = (PC_Telescope)pc->data;
787 
788   PetscFunctionBegin;
789   if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
790   red->subcommtype = subcommtype;
791   PetscFunctionReturn(0);
792 }
793 
794 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
795 {
796   PC_Telescope red = (PC_Telescope)pc->data;
797   PetscFunctionBegin;
798   if (fact) *fact = red->redfactor;
799   PetscFunctionReturn(0);
800 }
801 
802 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
803 {
804   PC_Telescope     red = (PC_Telescope)pc->data;
805   PetscMPIInt      size;
806   PetscErrorCode   ierr;
807 
808   PetscFunctionBegin;
809   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRMPI(ierr);
810   if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
811   if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
812   red->redfactor = fact;
813   PetscFunctionReturn(0);
814 }
815 
816 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
817 {
818   PC_Telescope red = (PC_Telescope)pc->data;
819   PetscFunctionBegin;
820   if (v) *v = red->ignore_dm;
821   PetscFunctionReturn(0);
822 }
823 
824 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
825 {
826   PC_Telescope red = (PC_Telescope)pc->data;
827   PetscFunctionBegin;
828   red->ignore_dm = v;
829   PetscFunctionReturn(0);
830 }
831 
832 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool *v)
833 {
834   PC_Telescope red = (PC_Telescope)pc->data;
835   PetscFunctionBegin;
836   if (v) *v = red->use_coarse_dm;
837   PetscFunctionReturn(0);
838 }
839 
840 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)
841 {
842   PC_Telescope red = (PC_Telescope)pc->data;
843   PetscFunctionBegin;
844   red->use_coarse_dm = v;
845   PetscFunctionReturn(0);
846 }
847 
848 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
849 {
850   PC_Telescope red = (PC_Telescope)pc->data;
851   PetscFunctionBegin;
852   if (v) *v = red->ignore_kspcomputeoperators;
853   PetscFunctionReturn(0);
854 }
855 
856 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
857 {
858   PC_Telescope red = (PC_Telescope)pc->data;
859   PetscFunctionBegin;
860   red->ignore_kspcomputeoperators = v;
861   PetscFunctionReturn(0);
862 }
863 
864 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
865 {
866   PC_Telescope red = (PC_Telescope)pc->data;
867   PetscFunctionBegin;
868   *dm = private_PCTelescopeGetSubDM(red);
869   PetscFunctionReturn(0);
870 }
871 
872 /*@
873  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
874 
875  Not Collective
876 
877  Input Parameter:
878 .  pc - the preconditioner context
879 
880  Output Parameter:
881 .  subksp - the KSP defined the smaller set of processes
882 
883  Level: advanced
884 
885 @*/
886 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
887 {
888   PetscErrorCode ierr;
889   PetscFunctionBegin;
890   ierr = PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));CHKERRQ(ierr);
891   PetscFunctionReturn(0);
892 }
893 
894 /*@
895  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
896 
897  Not Collective
898 
899  Input Parameter:
900 .  pc - the preconditioner context
901 
902  Output Parameter:
903 .  fact - the reduction factor
904 
905  Level: advanced
906 
907 @*/
908 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
909 {
910   PetscErrorCode ierr;
911   PetscFunctionBegin;
912   ierr = PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));CHKERRQ(ierr);
913   PetscFunctionReturn(0);
914 }
915 
916 /*@
917  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
918 
919  Not Collective
920 
921  Input Parameter:
922 .  pc - the preconditioner context
923 
924  Output Parameter:
925 .  fact - the reduction factor
926 
927  Level: advanced
928 
929 @*/
930 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
931 {
932   PetscErrorCode ierr;
933   PetscFunctionBegin;
934   ierr = PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));CHKERRQ(ierr);
935   PetscFunctionReturn(0);
936 }
937 
938 /*@
939  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
940 
941  Not Collective
942 
943  Input Parameter:
944 .  pc - the preconditioner context
945 
946  Output Parameter:
947 .  v - the flag
948 
949  Level: advanced
950 
951 @*/
952 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
953 {
954   PetscErrorCode ierr;
955   PetscFunctionBegin;
956   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
957   PetscFunctionReturn(0);
958 }
959 
960 /*@
961  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
962 
963  Not Collective
964 
965  Input Parameter:
966 .  pc - the preconditioner context
967 
968  Output Parameter:
969 .  v - Use PETSC_TRUE to ignore any DM
970 
971  Level: advanced
972 
973 @*/
974 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
975 {
976   PetscErrorCode ierr;
977   PetscFunctionBegin;
978   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
979   PetscFunctionReturn(0);
980 }
981 
982 /*@
983  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used.
984 
985  Not Collective
986 
987  Input Parameter:
988 .  pc - the preconditioner context
989 
990  Output Parameter:
991 .  v - the flag
992 
993  Level: advanced
994 
995 @*/
996 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc,PetscBool *v)
997 {
998   PetscErrorCode ierr;
999   PetscFunctionBegin;
1000   ierr = PetscUseMethod(pc,"PCTelescopeGetUseCoarseDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1001   PetscFunctionReturn(0);
1002 }
1003 
1004 /*@
1005  PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM
1006 
1007  Not Collective
1008 
1009  Input Parameter:
1010 .  pc - the preconditioner context
1011 
1012  Output Parameter:
1013 .  v - Use PETSC_FALSE to ignore any coarse DM
1014 
1015  Notes:
1016  When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope
1017  will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and
1018  -pc_telescope_subcomm_type will no longer have any meaning.
1019  It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes.
1020  An error will occur of the size of the communicator associated with the coarse DM
1021  is the same as that of the parent DM.
1022  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
1023  This will be checked at the time the preconditioner is setup and an error will occur if
1024  the coarse DM does not define a sub-communicator of that used by the parent DM.
1025 
1026  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
1027  the DM used (e.g. it supports DMSHELL, DMPLEX, etc).
1028 
1029  Support is currently only provided for the case when you are using KSPSetComputeOperators()
1030 
1031  The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs.
1032  In the user code, this is achieved via
1033 .vb
1034    {
1035      DM dm_fine;
1036      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1037    }
1038 .ve
1039  The signature of the user provided field scatter method is
1040 .vb
1041    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1042 .ve
1043  The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE.
1044  SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM.
1045 
1046  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
1047  of state variables between the fine and coarse DMs.
1048  In the context of a finite element discretization, an example state variable might be
1049  values associated with quadrature points within each element.
1050  A user provided state scatter method is composed via
1051 .vb
1052    {
1053      DM dm_fine;
1054      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1055    }
1056 .ve
1057  The signature of the user provided state scatter method is
1058 .vb
1059    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1060 .ve
1061  SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM.
1062  The user is only required to support mode = SCATTER_FORWARD.
1063  No assumption is made about the data type of the state variables.
1064  These must be managed by the user and must be accessible from the DM.
1065 
1066  Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be
1067  associated with the sub-KSP residing within PCTelescope.
1068  In general, PCTelescope assumes that the context on the fine and coarse DM used with
1069  KSPSetComputeOperators() should be "similar" in type or origin.
1070  Specifically the following rules are used to infer what context on the sub-KSP should be.
1071 
1072  First the contexts from the KSP and the fine and coarse DMs are retrieved.
1073  Note that the special case of a DMSHELL context is queried.
1074 
1075 .vb
1076    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1077    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1078    DMShellGetContext(dm_fine,&dmfine_shellctx);
1079 
1080    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1081    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1082 .ve
1083 
1084  The following rules are then enforced:
1085 
1086  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1087  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL);
1088 
1089  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1090  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1091  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1092 
1093  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1094  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1095  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1096 
1097  If neither of the above three tests passed, then PCTelescope cannot safely determine what
1098  context should be provided to KSPSetComputeOperators() for use with the sub-KSP.
1099  In this case, an additional mechanism is provided via a composed function which will return
1100  the actual context to be used. To use this feature you must compose the "getter" function
1101  with the coarse DM, e.g.
1102 .vb
1103    {
1104      DM dm_coarse;
1105      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1106    }
1107 .ve
1108  The signature of the user provided method is
1109 .vb
1110    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1111 .ve
1112 
1113  Level: advanced
1114 
1115 @*/
1116 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)
1117 {
1118   PetscErrorCode ierr;
1119   PetscFunctionBegin;
1120   ierr = PetscTryMethod(pc,"PCTelescopeSetUseCoarseDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1121   PetscFunctionReturn(0);
1122 }
1123 
1124 /*@
1125  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
1126 
1127  Not Collective
1128 
1129  Input Parameter:
1130 .  pc - the preconditioner context
1131 
1132  Output Parameter:
1133 .  v - the flag
1134 
1135  Level: advanced
1136 
1137 @*/
1138 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
1139 {
1140   PetscErrorCode ierr;
1141   PetscFunctionBegin;
1142   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1143   PetscFunctionReturn(0);
1144 }
1145 
1146 /*@
1147  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
1148 
1149  Not Collective
1150 
1151  Input Parameter:
1152 .  pc - the preconditioner context
1153 
1154  Output Parameter:
1155 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
1156 
1157  Level: advanced
1158 
1159 @*/
1160 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
1161 {
1162   PetscErrorCode ierr;
1163   PetscFunctionBegin;
1164   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1165   PetscFunctionReturn(0);
1166 }
1167 
1168 /*@
1169  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
1170 
1171  Not Collective
1172 
1173  Input Parameter:
1174 .  pc - the preconditioner context
1175 
1176  Output Parameter:
1177 .  subdm - The re-partitioned DM
1178 
1179  Level: advanced
1180 
1181 @*/
1182 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
1183 {
1184   PetscErrorCode ierr;
1185   PetscFunctionBegin;
1186   ierr = PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));CHKERRQ(ierr);
1187   PetscFunctionReturn(0);
1188 }
1189 
1190 /*@
1191  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1192 
1193  Logically Collective
1194 
1195  Input Parameter:
1196 +  pc - the preconditioner context
1197 -  subcommtype - the subcommunicator type (see PetscSubcommType)
1198 
1199  Level: advanced
1200 
1201 .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE
1202 @*/
1203 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1204 {
1205   PetscErrorCode ierr;
1206   PetscFunctionBegin;
1207   ierr = PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));CHKERRQ(ierr);
1208   PetscFunctionReturn(0);
1209 }
1210 
1211 /*@
1212  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1213 
1214  Not Collective
1215 
1216  Input Parameter:
1217 .  pc - the preconditioner context
1218 
1219  Output Parameter:
1220 .  subcommtype - the subcommunicator type (see PetscSubcommType)
1221 
1222  Level: advanced
1223 
1224 .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE
1225 @*/
1226 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1227 {
1228   PetscErrorCode ierr;
1229   PetscFunctionBegin;
1230   ierr = PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));CHKERRQ(ierr);
1231   PetscFunctionReturn(0);
1232 }
1233 
1234 /* -------------------------------------------------------------------------------------*/
1235 /*MC
1236    PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1237 
1238    Options Database:
1239 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1240 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1241 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1242 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP.
1243 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator.
1244 
1245    Level: advanced
1246 
1247    Notes:
1248    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1249    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC).
1250    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1251    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1252    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1253    Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM.
1254 
1255    The default type of the sub KSP (the KSP defined on c') is PREONLY.
1256 
1257    There are three setup mechanisms for PCTelescope. Features support by each type are described below.
1258    In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the
1259    communicators c and c' respectively.
1260 
1261    [1] Default setup
1262    The sub-communicator c' is created via PetscSubcommCreate().
1263    Explicitly defined nullspace and near nullspace vectors will be propogated from B to B'.
1264    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1265    No support is provided for KSPSetComputeOperators().
1266    Currently there is no support for the flag -pc_use_amat.
1267 
1268    [2] DM aware setup
1269    If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'.
1270    c' is created via PetscSubcommCreate().
1271    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1272    Currently only support for re-partitioning a DMDA is provided.
1273    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1274    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1275    Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP.
1276    This is fragile since the user must ensure that their user context is valid for use on c'.
1277    Currently there is no support for the flag -pc_use_amat.
1278 
1279    [3] Coarse DM setup
1280    If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM().
1281    PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c.
1282    The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE.
1283    PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1284    The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be
1285    available with using multi-grid on unstructured meshes.
1286    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1287    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1288    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1289    There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported.
1290    The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1291    Propogation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators().
1292    Currently there is no support for the flag -pc_use_amat.
1293    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
1294    Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM().
1295 
1296    Developer Notes:
1297    During PCSetup, the B operator is scattered onto c'.
1298    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1299    Then, KSPSolve() is executed on the c' communicator.
1300 
1301    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
1302    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
1303 
1304    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1305    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1306    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1307 
1308    The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D -
1309    support for 1D DMDAs is not provided). If a DMDA is found, a topologically equivalent DMDA is created on c'
1310    and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support
1311    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1312    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1313 
1314    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1315 
1316    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1317    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
1318    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
1319 
1320    Limitations/improvements include the following.
1321    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1322    A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction().
1323 
1324    The symmetric permutation used when a DMDA is encountered is performed via explicitly assmbleming a permutation matrix P,
1325    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1326    VecPermute() does not supported for the use case required here. By computing P, one can permute both the operator and RHS in a
1327    consistent manner.
1328 
1329    Mapping of vectors (default setup mode) is performed in the following way.
1330    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1331    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1332    We perform the scatter to the sub-communicator in the following way.
1333    [1] Given a vector x defined on communicator c
1334 
1335 .vb
1336    rank(c)  local values of x
1337    ------- ----------------------------------------
1338         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1339         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1340         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1341         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1342 .ve
1343 
1344    scatter into xtmp defined also on comm c, so that we have the following values
1345 
1346 .vb
1347    rank(c)  local values of xtmp
1348    ------- ----------------------------------------------------------------------------
1349         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1350         1   [ ]
1351         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1352         3   [ ]
1353 .ve
1354 
1355    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1356 
1357 
1358    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1359    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1360 
1361 .vb
1362    rank(c')  local values of xred
1363    -------- ----------------------------------------------------------------------------
1364          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1365          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1366 .ve
1367 
1368   Contributed by Dave May
1369 
1370   Reference:
1371   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1372 
1373 .seealso:  PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
1374 M*/
1375 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1376 {
1377   PetscErrorCode       ierr;
1378   struct _PC_Telescope *sred;
1379 
1380   PetscFunctionBegin;
1381   ierr = PetscNewLog(pc,&sred);CHKERRQ(ierr);
1382   sred->psubcomm       = NULL;
1383   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1384   sred->subcomm        = MPI_COMM_NULL;
1385   sred->redfactor      = 1;
1386   sred->ignore_dm      = PETSC_FALSE;
1387   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1388   sred->use_coarse_dm  = PETSC_FALSE;
1389   pc->data             = (void*)sred;
1390 
1391   pc->ops->apply           = PCApply_Telescope;
1392   pc->ops->applytranspose  = NULL;
1393   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1394   pc->ops->setup           = PCSetUp_Telescope;
1395   pc->ops->destroy         = PCDestroy_Telescope;
1396   pc->ops->reset           = PCReset_Telescope;
1397   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1398   pc->ops->view            = PCView_Telescope;
1399 
1400   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1401   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1402   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1403   sred->pctelescope_reset_type              = NULL;
1404 
1405   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);CHKERRQ(ierr);
1406   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope);CHKERRQ(ierr);
1407   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope);CHKERRQ(ierr);
1408   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);CHKERRQ(ierr);
1409   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);CHKERRQ(ierr);
1410   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);CHKERRQ(ierr);
1411   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);CHKERRQ(ierr);
1412   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1413   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1414   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);CHKERRQ(ierr);
1415   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",PCTelescopeGetUseCoarseDM_Telescope);CHKERRQ(ierr);
1416   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",PCTelescopeSetUseCoarseDM_Telescope);CHKERRQ(ierr);
1417   PetscFunctionReturn(0);
1418 }
1419