xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision e5a36eccef3d6b83a2c625c30d0dfd5adb4001f2)
1 
2 #include <petsc/private/petscimpl.h>
3 #include <petsc/private/matimpl.h>
4 #include <petsc/private/pcimpl.h>
5 #include <petscksp.h> /*I "petscksp.h" I*/
6 #include <petscdm.h> /*I "petscdm.h" I*/
7 #include "../src/ksp/pc/impls/telescope/telescope.h"
8 
9 static PetscBool  cited = PETSC_FALSE;
10 static const char citation[] =
11 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
12 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
13 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
14 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
15 "  series    = {PASC '16},\n"
16 "  isbn      = {978-1-4503-4126-4},\n"
17 "  location  = {Lausanne, Switzerland},\n"
18 "  pages     = {5:1--5:12},\n"
19 "  articleno = {5},\n"
20 "  numpages  = {12},\n"
21 "  url       = {http://doi.acm.org/10.1145/2929908.2929913},\n"
22 "  doi       = {10.1145/2929908.2929913},\n"
23 "  acmid     = {2929913},\n"
24 "  publisher = {ACM},\n"
25 "  address   = {New York, NY, USA},\n"
26 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
27 "  year      = {2016}\n"
28 "}\n";
29 
30 /*
31  default setup mode
32 
33  [1a] scatter to (FORWARD)
34  x(comm) -> xtmp(comm)
35  [1b] local copy (to) ranks with color = 0
36  xred(subcomm) <- xtmp
37 
38  [2] solve on sub KSP to obtain yred(subcomm)
39 
40  [3a] local copy (from) ranks with color = 0
41  yred(subcomm) --> xtmp
42  [2b] scatter from (REVERSE)
43  xtmp(comm) -> y(comm)
44 */
45 
46 /*
47   Collective on MPI_Comm[comm_f]
48   Notes
49    * Using comm_f = MPI_COMM_NULL will result in an error
50    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
51    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
52 */
53 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool *isvalid)
54 {
55   PetscInt       valid = 1;
56   MPI_Group      group_f,group_c;
57   PetscErrorCode ierr;
58   PetscMPIInt    count,k,size_f = 0,size_c = 0,size_c_sum = 0;
59   int            *ranks_f = NULL,*ranks_c = NULL;
60 
61   PetscFunctionBegin;
62   if (comm_f == MPI_COMM_NULL) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"comm_f cannot be MPI_COMM_NULL");
63 
64   ierr = MPI_Comm_group(comm_f,&group_f);CHKERRQ(ierr);
65   if (comm_c != MPI_COMM_NULL) {
66     ierr = MPI_Comm_group(comm_c,&group_c);CHKERRQ(ierr);
67   }
68 
69   ierr = MPI_Comm_size(comm_f,&size_f);CHKERRQ(ierr);
70   if (comm_c != MPI_COMM_NULL) {
71     ierr = MPI_Comm_size(comm_c,&size_c);CHKERRQ(ierr);
72   }
73 
74   /* check not all comm_c's are NULL */
75   size_c_sum = size_c;
76   ierr = MPI_Allreduce(MPI_IN_PLACE,&size_c_sum,1,MPI_INT,MPI_SUM,comm_f);CHKERRQ(ierr);
77   if (size_c_sum == 0) {
78     valid = 0;
79   }
80 
81   /* check we can map at least 1 rank in comm_c to comm_f */
82   ierr = PetscMalloc1(size_f,&ranks_f);CHKERRQ(ierr);
83   ierr = PetscMalloc1(size_c,&ranks_c);CHKERRQ(ierr);
84   for (k=0; k<size_f; k++) {
85     ranks_f[k] = MPI_UNDEFINED;
86   }
87   for (k=0; k<size_c; k++) {
88     ranks_c[k] = (int)k;
89   }
90 
91   /*
92    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
93    I do not want the code to terminate immediately if this occurs, rather I want to throw
94    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
95    that comm_c is not a valid sub-communicator.
96    Hence I purposefully do not call CHKERRQ() after MPI_Group_translate_ranks().
97   */
98   count = 0;
99   if (comm_c != MPI_COMM_NULL) {
100     (void)MPI_Group_translate_ranks(group_c,size_c,ranks_c,group_f,ranks_f);
101     for (k=0; k<size_f; k++) {
102       if (ranks_f[k] == MPI_UNDEFINED) {
103         count++;
104       }
105     }
106   }
107   if (count == size_f) {
108     valid = 0;
109   }
110 
111   ierr = MPI_Allreduce(MPI_IN_PLACE,&valid,1,MPIU_INT,MPI_MIN,comm_f);CHKERRQ(ierr);
112   if (valid == 1) { *isvalid = PETSC_TRUE; }
113   else { *isvalid = PETSC_FALSE; }
114 
115   ierr = PetscFree(ranks_f);CHKERRQ(ierr);
116   ierr = PetscFree(ranks_c);CHKERRQ(ierr);
117   ierr = MPI_Group_free(&group_f);CHKERRQ(ierr);
118   if (comm_c != MPI_COMM_NULL) {
119     ierr = MPI_Group_free(&group_c);CHKERRQ(ierr);
120   }
121   PetscFunctionReturn(0);
122 }
123 
124 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
125 {
126   DM subdm = NULL;
127 
128   if (!PCTelescope_isActiveRank(sred)) { subdm = NULL; }
129   else {
130     switch (sred->sr_type) {
131     case TELESCOPE_DEFAULT: subdm = NULL;
132       break;
133     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
134       break;
135     case TELESCOPE_DMPLEX:  subdm = NULL;
136       break;
137     case TELESCOPE_COARSEDM: if (sred->ksp) { KSPGetDM(sred->ksp,&subdm); }
138       break;
139     }
140   }
141   return(subdm);
142 }
143 
144 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
145 {
146   PetscErrorCode ierr;
147   PetscInt       m,M,bs,st,ed;
148   Vec            x,xred,yred,xtmp;
149   Mat            B;
150   MPI_Comm       comm,subcomm;
151   VecScatter     scatter;
152   IS             isin;
153 
154   PetscFunctionBegin;
155   ierr = PetscInfo(pc,"PCTelescope: setup (default)\n");CHKERRQ(ierr);
156   comm = PetscSubcommParent(sred->psubcomm);
157   subcomm = PetscSubcommChild(sred->psubcomm);
158 
159   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
160   ierr = MatGetSize(B,&M,NULL);CHKERRQ(ierr);
161   ierr = MatGetBlockSize(B,&bs);CHKERRQ(ierr);
162   ierr = MatCreateVecs(B,&x,NULL);CHKERRQ(ierr);
163 
164   xred = NULL;
165   m    = 0;
166   if (PCTelescope_isActiveRank(sred)) {
167     ierr = VecCreate(subcomm,&xred);CHKERRQ(ierr);
168     ierr = VecSetSizes(xred,PETSC_DECIDE,M);CHKERRQ(ierr);
169     ierr = VecSetBlockSize(xred,bs);CHKERRQ(ierr);
170     ierr = VecSetFromOptions(xred);CHKERRQ(ierr);
171     ierr = VecGetLocalSize(xred,&m);CHKERRQ(ierr);
172   }
173 
174   yred = NULL;
175   if (PCTelescope_isActiveRank(sred)) {
176     ierr = VecDuplicate(xred,&yred);CHKERRQ(ierr);
177   }
178 
179   ierr = VecCreate(comm,&xtmp);CHKERRQ(ierr);
180   ierr = VecSetSizes(xtmp,m,PETSC_DECIDE);CHKERRQ(ierr);
181   ierr = VecSetBlockSize(xtmp,bs);CHKERRQ(ierr);
182   ierr = VecSetType(xtmp,((PetscObject)x)->type_name);CHKERRQ(ierr);
183 
184   if (PCTelescope_isActiveRank(sred)) {
185     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
186     ierr = ISCreateStride(comm,(ed-st),st,1,&isin);CHKERRQ(ierr);
187   } else {
188     ierr = VecGetOwnershipRange(x,&st,&ed);CHKERRQ(ierr);
189     ierr = ISCreateStride(comm,0,st,1,&isin);CHKERRQ(ierr);
190   }
191   ierr = ISSetBlockSize(isin,bs);CHKERRQ(ierr);
192 
193   ierr = VecScatterCreate(x,isin,xtmp,NULL,&scatter);CHKERRQ(ierr);
194 
195   sred->isin    = isin;
196   sred->scatter = scatter;
197   sred->xred    = xred;
198   sred->yred    = yred;
199   sred->xtmp    = xtmp;
200   ierr = VecDestroy(&x);CHKERRQ(ierr);
201   PetscFunctionReturn(0);
202 }
203 
204 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
205 {
206   PetscErrorCode ierr;
207   MPI_Comm       comm,subcomm;
208   Mat            Bred,B;
209   PetscInt       nr,nc;
210   IS             isrow,iscol;
211   Mat            Blocal,*_Blocal;
212 
213   PetscFunctionBegin;
214   ierr = PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");CHKERRQ(ierr);
215   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
216   subcomm = PetscSubcommChild(sred->psubcomm);
217   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
218   ierr = MatGetSize(B,&nr,&nc);CHKERRQ(ierr);
219   isrow = sred->isin;
220   ierr = ISCreateStride(comm,nc,0,1,&iscol);CHKERRQ(ierr);
221   ierr = MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);CHKERRQ(ierr);
222   Blocal = *_Blocal;
223   ierr = PetscFree(_Blocal);CHKERRQ(ierr);
224   Bred = NULL;
225   if (PCTelescope_isActiveRank(sred)) {
226     PetscInt mm;
227 
228     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
229 
230     ierr = MatGetSize(Blocal,&mm,NULL);CHKERRQ(ierr);
231     ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);CHKERRQ(ierr);
232   }
233   *A = Bred;
234   ierr = ISDestroy(&iscol);CHKERRQ(ierr);
235   ierr = MatDestroy(&Blocal);CHKERRQ(ierr);
236   PetscFunctionReturn(0);
237 }
238 
239 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
240 {
241   PetscErrorCode ierr;
242   PetscBool      has_const;
243   const Vec      *vecs;
244   Vec            *sub_vecs = NULL;
245   PetscInt       i,k,n = 0;
246   MPI_Comm       subcomm;
247 
248   PetscFunctionBegin;
249   subcomm = PetscSubcommChild(sred->psubcomm);
250   ierr = MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);CHKERRQ(ierr);
251 
252   if (PCTelescope_isActiveRank(sred)) {
253     if (n) {
254       ierr = VecDuplicateVecs(sred->xred,n,&sub_vecs);CHKERRQ(ierr);
255     }
256   }
257 
258   /* copy entries */
259   for (k=0; k<n; k++) {
260     const PetscScalar *x_array;
261     PetscScalar       *LA_sub_vec;
262     PetscInt          st,ed;
263 
264     /* pull in vector x->xtmp */
265     ierr = VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
266     ierr = VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
267     if (sub_vecs) {
268       /* copy vector entries into xred */
269       ierr = VecGetArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
270       if (sub_vecs[k]) {
271         ierr = VecGetOwnershipRange(sub_vecs[k],&st,&ed);CHKERRQ(ierr);
272         ierr = VecGetArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
273         for (i=0; i<ed-st; i++) {
274           LA_sub_vec[i] = x_array[i];
275         }
276         ierr = VecRestoreArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
277       }
278       ierr = VecRestoreArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
279     }
280   }
281 
282   if (PCTelescope_isActiveRank(sred)) {
283     /* create new (near) nullspace for redundant object */
284     ierr = MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);CHKERRQ(ierr);
285     ierr = VecDestroyVecs(n,&sub_vecs);CHKERRQ(ierr);
286     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
287     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
288   }
289   PetscFunctionReturn(0);
290 }
291 
292 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
293 {
294   PetscErrorCode ierr;
295   Mat            B;
296 
297   PetscFunctionBegin;
298   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
299   /* Propagate the nullspace if it exists */
300   {
301     MatNullSpace nullspace,sub_nullspace;
302     ierr = MatGetNullSpace(B,&nullspace);CHKERRQ(ierr);
303     if (nullspace) {
304       ierr = PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");CHKERRQ(ierr);
305       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace);CHKERRQ(ierr);
306       if (PCTelescope_isActiveRank(sred)) {
307         ierr = MatSetNullSpace(sub_mat,sub_nullspace);CHKERRQ(ierr);
308         ierr = MatNullSpaceDestroy(&sub_nullspace);CHKERRQ(ierr);
309       }
310     }
311   }
312   /* Propagate the near nullspace if it exists */
313   {
314     MatNullSpace nearnullspace,sub_nearnullspace;
315     ierr = MatGetNearNullSpace(B,&nearnullspace);CHKERRQ(ierr);
316     if (nearnullspace) {
317       ierr = PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n");CHKERRQ(ierr);
318       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);CHKERRQ(ierr);
319       if (PCTelescope_isActiveRank(sred)) {
320         ierr = MatSetNearNullSpace(sub_mat,sub_nearnullspace);CHKERRQ(ierr);
321         ierr = MatNullSpaceDestroy(&sub_nearnullspace);CHKERRQ(ierr);
322       }
323     }
324   }
325   PetscFunctionReturn(0);
326 }
327 
328 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
329 {
330   PC_Telescope   sred = (PC_Telescope)pc->data;
331   PetscErrorCode ierr;
332   PetscBool      iascii,isstring;
333   PetscViewer    subviewer;
334 
335   PetscFunctionBegin;
336   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
337   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
338   if (iascii) {
339     {
340       MPI_Comm    comm,subcomm;
341       PetscMPIInt comm_size,subcomm_size;
342       DM          dm = NULL,subdm = NULL;
343 
344       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
345       subdm = private_PCTelescopeGetSubDM(sred);
346 
347       if (sred->psubcomm) {
348         comm = PetscSubcommParent(sred->psubcomm);
349         subcomm = PetscSubcommChild(sred->psubcomm);
350         ierr = MPI_Comm_size(comm,&comm_size);CHKERRQ(ierr);
351         ierr = MPI_Comm_size(subcomm,&subcomm_size);CHKERRQ(ierr);
352 
353         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
354         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent comm size reduction factor = %D\n",sred->redfactor);CHKERRQ(ierr);
355         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);CHKERRQ(ierr);
356         switch (sred->subcommtype) {
357         case PETSC_SUBCOMM_INTERLACED :
358           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: type = interlaced\n",sred->subcommtype);CHKERRQ(ierr);
359           break;
360         case PETSC_SUBCOMM_CONTIGUOUS :
361           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm type = contiguous\n",sred->subcommtype);CHKERRQ(ierr);
362           break;
363         default :
364           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
365         }
366         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
367       } else {
368         ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
369         subcomm = sred->subcomm;
370         if (!PCTelescope_isActiveRank(sred)) {
371           subcomm = PETSC_COMM_SELF;
372         }
373 
374         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
375         ierr = PetscViewerASCIIPrintf(viewer,"subcomm: using user provided sub-communicator\n");CHKERRQ(ierr);
376         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
377       }
378 
379       ierr = PetscViewerGetSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
380       if (PCTelescope_isActiveRank(sred)) {
381         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
382 
383         if (dm && sred->ignore_dm) {
384           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring DM\n");CHKERRQ(ierr);
385         }
386         if (sred->ignore_kspcomputeoperators) {
387           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring KSPComputeOperators\n");CHKERRQ(ierr);
388         }
389         switch (sred->sr_type) {
390         case TELESCOPE_DEFAULT:
391           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: default\n");CHKERRQ(ierr);
392           break;
393         case TELESCOPE_DMDA:
394           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMDA auto-repartitioning\n");CHKERRQ(ierr);
395           ierr = DMView_DA_Short(subdm,subviewer);CHKERRQ(ierr);
396           break;
397         case TELESCOPE_DMPLEX:
398           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMPLEX auto-repartitioning\n");CHKERRQ(ierr);
399           break;
400         case TELESCOPE_COARSEDM:
401           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: coarse DM\n");CHKERRQ(ierr);
402           break;
403         }
404 
405         if (dm) {
406           PetscObject obj = (PetscObject)dm;
407           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object:");CHKERRQ(ierr);
408           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
409           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
410           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
411           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
412           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
413           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
414         } else {
415           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object: NULL\n");CHKERRQ(ierr);
416         }
417         if (subdm) {
418           PetscObject obj = (PetscObject)subdm;
419           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object:");CHKERRQ(ierr);
420           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
421           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
422           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
423           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
424           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
425           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
426         } else {
427           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object: NULL\n");CHKERRQ(ierr);
428         }
429 
430         ierr = KSPView(sred->ksp,subviewer);CHKERRQ(ierr);
431         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
432       }
433       ierr = PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
434     }
435   }
436   PetscFunctionReturn(0);
437 }
438 
439 static PetscErrorCode PCSetUp_Telescope(PC pc)
440 {
441   PC_Telescope    sred = (PC_Telescope)pc->data;
442   PetscErrorCode  ierr;
443   MPI_Comm        comm,subcomm=0;
444   PCTelescopeType sr_type;
445 
446   PetscFunctionBegin;
447   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
448 
449   /* Determine type of setup/update */
450   if (!pc->setupcalled) {
451     PetscBool has_dm,same;
452     DM        dm;
453 
454     sr_type = TELESCOPE_DEFAULT;
455     has_dm = PETSC_FALSE;
456     ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
457     if (dm) { has_dm = PETSC_TRUE; }
458     if (has_dm) {
459       /* check for dmda */
460       ierr = PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);CHKERRQ(ierr);
461       if (same) {
462         ierr = PetscInfo(pc,"PCTelescope: found DMDA\n");CHKERRQ(ierr);
463         sr_type = TELESCOPE_DMDA;
464       }
465       /* check for dmplex */
466       ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);CHKERRQ(ierr);
467       if (same) {
468         ierr = PetscInfo(pc,"PCTelescope: found DMPLEX\n");CHKERRQ(ierr);
469         sr_type = TELESCOPE_DMPLEX;
470       }
471 
472       if (sred->use_coarse_dm) {
473         ierr = PetscInfo(pc,"PCTelescope: using coarse DM\n");CHKERRQ(ierr);
474         sr_type = TELESCOPE_COARSEDM;
475       }
476 
477       if (sred->ignore_dm) {
478         ierr = PetscInfo(pc,"PCTelescope: ignoring DM\n");CHKERRQ(ierr);
479         sr_type = TELESCOPE_DEFAULT;
480       }
481     }
482     sred->sr_type = sr_type;
483   } else {
484     sr_type = sred->sr_type;
485   }
486 
487   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
488   switch (sr_type) {
489   case TELESCOPE_DEFAULT:
490     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
491     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
492     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
493     sred->pctelescope_reset_type              = NULL;
494     break;
495   case TELESCOPE_DMDA:
496     pc->ops->apply                            = PCApply_Telescope_dmda;
497     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
498     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
499     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
500     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
501     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
502     break;
503   case TELESCOPE_DMPLEX: SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
504     break;
505   case TELESCOPE_COARSEDM:
506     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
507     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
508     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
509     sred->pctelescope_matcreate_type          = NULL;
510     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
511     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
512     break;
513   default: SETERRQ(comm,PETSC_ERR_SUP,"Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
514     break;
515   }
516 
517   /* subcomm definition */
518   if (!pc->setupcalled) {
519     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
520       if (!sred->psubcomm) {
521         ierr = PetscSubcommCreate(comm,&sred->psubcomm);CHKERRQ(ierr);
522         ierr = PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);CHKERRQ(ierr);
523         ierr = PetscSubcommSetType(sred->psubcomm,sred->subcommtype);CHKERRQ(ierr);
524         ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
525         sred->subcomm = PetscSubcommChild(sred->psubcomm);
526       }
527     } else { /* query PC for DM, check communicators */
528       DM          dm,dm_coarse_partition = NULL;
529       MPI_Comm    comm_fine,comm_coarse_partition = MPI_COMM_NULL;
530       PetscMPIInt csize_fine=0,csize_coarse_partition=0,cs[2],csg[2],cnt=0;
531       PetscBool   isvalidsubcomm;
532 
533       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
534       comm_fine = PetscObjectComm((PetscObject)dm);
535       ierr = DMGetCoarseDM(dm,&dm_coarse_partition);CHKERRQ(ierr);
536       if (dm_coarse_partition) { cnt = 1; }
537       ierr = MPI_Allreduce(MPI_IN_PLACE,&cnt,1,MPI_INT,MPI_SUM,comm_fine);CHKERRQ(ierr);
538       if (cnt == 0) SETERRQ(comm_fine,PETSC_ERR_SUP,"Zero instances of a coarse DM were found");
539 
540       ierr = MPI_Comm_size(comm_fine,&csize_fine);CHKERRQ(ierr);
541       if (dm_coarse_partition) {
542         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
543         ierr = MPI_Comm_size(comm_coarse_partition,&csize_coarse_partition);CHKERRQ(ierr);
544       }
545 
546       cs[0] = csize_fine;
547       cs[1] = csize_coarse_partition;
548       ierr = MPI_Allreduce(cs,csg,2,MPI_INT,MPI_MAX,comm_fine);CHKERRQ(ierr);
549       if (csg[0] == csg[1]) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM uses the same size communicator as the parent DM attached to the PC");
550 
551       ierr = PCTelescopeTestValidSubcomm(comm_fine,comm_coarse_partition,&isvalidsubcomm);CHKERRQ(ierr);
552       if (!isvalidsubcomm) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM communicator is not a sub-communicator of parentDM->comm");
553       sred->subcomm = comm_coarse_partition;
554     }
555   }
556   subcomm = sred->subcomm;
557 
558   /* internal KSP */
559   if (!pc->setupcalled) {
560     const char *prefix;
561 
562     if (PCTelescope_isActiveRank(sred)) {
563       ierr = KSPCreate(subcomm,&sred->ksp);CHKERRQ(ierr);
564       ierr = KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);CHKERRQ(ierr);
565       ierr = PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
566       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);CHKERRQ(ierr);
567       ierr = KSPSetType(sred->ksp,KSPPREONLY);CHKERRQ(ierr);
568       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
569       ierr = KSPSetOptionsPrefix(sred->ksp,prefix);CHKERRQ(ierr);
570       ierr = KSPAppendOptionsPrefix(sred->ksp,"telescope_");CHKERRQ(ierr);
571     }
572   }
573 
574   /* setup */
575   if (!pc->setupcalled && sred->pctelescope_setup_type) {
576     ierr = sred->pctelescope_setup_type(pc,sred);CHKERRQ(ierr);
577   }
578   /* update */
579   if (!pc->setupcalled) {
580     if (sred->pctelescope_matcreate_type) {
581       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);CHKERRQ(ierr);
582     }
583     if (sred->pctelescope_matnullspacecreate_type) {
584       ierr = sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);CHKERRQ(ierr);
585     }
586   } else {
587     if (sred->pctelescope_matcreate_type) {
588       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);CHKERRQ(ierr);
589     }
590   }
591 
592   /* common - no construction */
593   if (PCTelescope_isActiveRank(sred)) {
594     ierr = KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);CHKERRQ(ierr);
595     if (pc->setfromoptionscalled && !pc->setupcalled){
596       ierr = KSPSetFromOptions(sred->ksp);CHKERRQ(ierr);
597     }
598   }
599   PetscFunctionReturn(0);
600 }
601 
602 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
603 {
604   PC_Telescope      sred = (PC_Telescope)pc->data;
605   PetscErrorCode    ierr;
606   Vec               xtmp,xred,yred;
607   PetscInt          i,st,ed;
608   VecScatter        scatter;
609   PetscScalar       *array;
610   const PetscScalar *x_array;
611 
612   PetscFunctionBegin;
613   ierr = PetscCitationsRegister(citation,&cited);CHKERRQ(ierr);
614 
615   xtmp    = sred->xtmp;
616   scatter = sred->scatter;
617   xred    = sred->xred;
618   yred    = sred->yred;
619 
620   /* pull in vector x->xtmp */
621   ierr = VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
622   ierr = VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
623 
624   /* copy vector entries into xred */
625   ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
626   if (xred) {
627     PetscScalar *LA_xred;
628     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
629     ierr = VecGetArray(xred,&LA_xred);CHKERRQ(ierr);
630     for (i=0; i<ed-st; i++) {
631       LA_xred[i] = x_array[i];
632     }
633     ierr = VecRestoreArray(xred,&LA_xred);CHKERRQ(ierr);
634   }
635   ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
636   /* solve */
637   if (PCTelescope_isActiveRank(sred)) {
638     ierr = KSPSolve(sred->ksp,xred,yred);CHKERRQ(ierr);
639     ierr = KSPCheckSolve(sred->ksp,pc,yred);CHKERRQ(ierr);
640   }
641   /* return vector */
642   ierr = VecGetArray(xtmp,&array);CHKERRQ(ierr);
643   if (yred) {
644     const PetscScalar *LA_yred;
645     ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
646     ierr = VecGetArrayRead(yred,&LA_yred);CHKERRQ(ierr);
647     for (i=0; i<ed-st; i++) {
648       array[i] = LA_yred[i];
649     }
650     ierr = VecRestoreArrayRead(yred,&LA_yred);CHKERRQ(ierr);
651   }
652   ierr = VecRestoreArray(xtmp,&array);CHKERRQ(ierr);
653   ierr = VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
654   ierr = VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
655   PetscFunctionReturn(0);
656 }
657 
658 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
659 {
660   PC_Telescope      sred = (PC_Telescope)pc->data;
661   PetscErrorCode    ierr;
662   Vec               xtmp,yred;
663   PetscInt          i,st,ed;
664   VecScatter        scatter;
665   const PetscScalar *x_array;
666   PetscBool         default_init_guess_value;
667 
668   PetscFunctionBegin;
669   xtmp    = sred->xtmp;
670   scatter = sred->scatter;
671   yred    = sred->yred;
672 
673   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
674   *reason = (PCRichardsonConvergedReason)0;
675 
676   if (!zeroguess) {
677     ierr = PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");CHKERRQ(ierr);
678     /* pull in vector y->xtmp */
679     ierr = VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
680     ierr = VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
681 
682     /* copy vector entries into xred */
683     ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
684     if (yred) {
685       PetscScalar *LA_yred;
686       ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
687       ierr = VecGetArray(yred,&LA_yred);CHKERRQ(ierr);
688       for (i=0; i<ed-st; i++) {
689         LA_yred[i] = x_array[i];
690       }
691       ierr = VecRestoreArray(yred,&LA_yred);CHKERRQ(ierr);
692     }
693     ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
694   }
695 
696   if (PCTelescope_isActiveRank(sred)) {
697     ierr = KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);CHKERRQ(ierr);
698     if (!zeroguess) ierr = KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);CHKERRQ(ierr);
699   }
700 
701   ierr = PCApply_Telescope(pc,x,y);CHKERRQ(ierr);
702 
703   if (PCTelescope_isActiveRank(sred)) {
704     ierr = KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);CHKERRQ(ierr);
705   }
706 
707   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
708   *outits = 1;
709   PetscFunctionReturn(0);
710 }
711 
712 static PetscErrorCode PCReset_Telescope(PC pc)
713 {
714   PC_Telescope   sred = (PC_Telescope)pc->data;
715   PetscErrorCode ierr;
716 
717   ierr = ISDestroy(&sred->isin);CHKERRQ(ierr);
718   ierr = VecScatterDestroy(&sred->scatter);CHKERRQ(ierr);
719   ierr = VecDestroy(&sred->xred);CHKERRQ(ierr);
720   ierr = VecDestroy(&sred->yred);CHKERRQ(ierr);
721   ierr = VecDestroy(&sred->xtmp);CHKERRQ(ierr);
722   ierr = MatDestroy(&sred->Bred);CHKERRQ(ierr);
723   ierr = KSPReset(sred->ksp);CHKERRQ(ierr);
724   if (sred->pctelescope_reset_type) {
725     ierr = sred->pctelescope_reset_type(pc);CHKERRQ(ierr);
726   }
727   PetscFunctionReturn(0);
728 }
729 
730 static PetscErrorCode PCDestroy_Telescope(PC pc)
731 {
732   PC_Telescope   sred = (PC_Telescope)pc->data;
733   PetscErrorCode ierr;
734 
735   PetscFunctionBegin;
736   ierr = PCReset_Telescope(pc);CHKERRQ(ierr);
737   ierr = KSPDestroy(&sred->ksp);CHKERRQ(ierr);
738   ierr = PetscSubcommDestroy(&sred->psubcomm);CHKERRQ(ierr);
739   ierr = PetscFree(sred->dm_ctx);CHKERRQ(ierr);
740   ierr = PetscFree(pc->data);CHKERRQ(ierr);
741   PetscFunctionReturn(0);
742 }
743 
744 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
745 {
746   PC_Telescope     sred = (PC_Telescope)pc->data;
747   PetscErrorCode   ierr;
748   MPI_Comm         comm;
749   PetscMPIInt      size;
750   PetscBool        flg;
751   PetscSubcommType subcommtype;
752 
753   PetscFunctionBegin;
754   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
755   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
756   ierr = PetscOptionsHead(PetscOptionsObject,"Telescope options");CHKERRQ(ierr);
757   ierr = PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg);CHKERRQ(ierr);
758   if (flg) {
759     ierr = PCTelescopeSetSubcommType(pc,subcommtype);CHKERRQ(ierr);
760   }
761   ierr = PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,0);CHKERRQ(ierr);
762   if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
763   ierr = PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,0);CHKERRQ(ierr);
764   ierr = PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,0);CHKERRQ(ierr);
765   ierr = PetscOptionsBool("-pc_telescope_use_coarse_dm","Define sub-communicator from the coarse DM","PCTelescopeSetUseCoarseDM",sred->use_coarse_dm,&sred->use_coarse_dm,0);CHKERRQ(ierr);
766   ierr = PetscOptionsTail();CHKERRQ(ierr);
767   PetscFunctionReturn(0);
768 }
769 
770 /* PC simplementation specific API's */
771 
772 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
773 {
774   PC_Telescope red = (PC_Telescope)pc->data;
775   PetscFunctionBegin;
776   if (ksp) *ksp = red->ksp;
777   PetscFunctionReturn(0);
778 }
779 
780 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
781 {
782   PC_Telescope red = (PC_Telescope)pc->data;
783   PetscFunctionBegin;
784   if (subcommtype) *subcommtype = red->subcommtype;
785   PetscFunctionReturn(0);
786 }
787 
788 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
789 {
790   PC_Telescope     red = (PC_Telescope)pc->data;
791 
792   PetscFunctionBegin;
793   if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
794   red->subcommtype = subcommtype;
795   PetscFunctionReturn(0);
796 }
797 
798 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
799 {
800   PC_Telescope red = (PC_Telescope)pc->data;
801   PetscFunctionBegin;
802   if (fact) *fact = red->redfactor;
803   PetscFunctionReturn(0);
804 }
805 
806 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
807 {
808   PC_Telescope     red = (PC_Telescope)pc->data;
809   PetscMPIInt      size;
810   PetscErrorCode   ierr;
811 
812   PetscFunctionBegin;
813   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
814   if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
815   if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
816   red->redfactor = fact;
817   PetscFunctionReturn(0);
818 }
819 
820 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
821 {
822   PC_Telescope red = (PC_Telescope)pc->data;
823   PetscFunctionBegin;
824   if (v) *v = red->ignore_dm;
825   PetscFunctionReturn(0);
826 }
827 
828 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
829 {
830   PC_Telescope red = (PC_Telescope)pc->data;
831   PetscFunctionBegin;
832   red->ignore_dm = v;
833   PetscFunctionReturn(0);
834 }
835 
836 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool *v)
837 {
838   PC_Telescope red = (PC_Telescope)pc->data;
839   PetscFunctionBegin;
840   if (v) *v = red->use_coarse_dm;
841   PetscFunctionReturn(0);
842 }
843 
844 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)
845 {
846   PC_Telescope red = (PC_Telescope)pc->data;
847   PetscFunctionBegin;
848   red->use_coarse_dm = v;
849   PetscFunctionReturn(0);
850 }
851 
852 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
853 {
854   PC_Telescope red = (PC_Telescope)pc->data;
855   PetscFunctionBegin;
856   if (v) *v = red->ignore_kspcomputeoperators;
857   PetscFunctionReturn(0);
858 }
859 
860 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
861 {
862   PC_Telescope red = (PC_Telescope)pc->data;
863   PetscFunctionBegin;
864   red->ignore_kspcomputeoperators = v;
865   PetscFunctionReturn(0);
866 }
867 
868 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
869 {
870   PC_Telescope red = (PC_Telescope)pc->data;
871   PetscFunctionBegin;
872   *dm = private_PCTelescopeGetSubDM(red);
873   PetscFunctionReturn(0);
874 }
875 
876 /*@
877  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
878 
879  Not Collective
880 
881  Input Parameter:
882 .  pc - the preconditioner context
883 
884  Output Parameter:
885 .  subksp - the KSP defined the smaller set of processes
886 
887  Level: advanced
888 
889 .keywords: PC, telescopting solve
890 @*/
891 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
892 {
893   PetscErrorCode ierr;
894   PetscFunctionBegin;
895   ierr = PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));CHKERRQ(ierr);
896   PetscFunctionReturn(0);
897 }
898 
899 /*@
900  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
901 
902  Not Collective
903 
904  Input Parameter:
905 .  pc - the preconditioner context
906 
907  Output Parameter:
908 .  fact - the reduction factor
909 
910  Level: advanced
911 
912 .keywords: PC, telescoping solve
913 @*/
914 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
915 {
916   PetscErrorCode ierr;
917   PetscFunctionBegin;
918   ierr = PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));CHKERRQ(ierr);
919   PetscFunctionReturn(0);
920 }
921 
922 /*@
923  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
924 
925  Not Collective
926 
927  Input Parameter:
928 .  pc - the preconditioner context
929 
930  Output Parameter:
931 .  fact - the reduction factor
932 
933  Level: advanced
934 
935 .keywords: PC, telescoping solve
936 @*/
937 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
938 {
939   PetscErrorCode ierr;
940   PetscFunctionBegin;
941   ierr = PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));CHKERRQ(ierr);
942   PetscFunctionReturn(0);
943 }
944 
945 /*@
946  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
947 
948  Not Collective
949 
950  Input Parameter:
951 .  pc - the preconditioner context
952 
953  Output Parameter:
954 .  v - the flag
955 
956  Level: advanced
957 
958 .keywords: PC, telescoping solve
959 @*/
960 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
961 {
962   PetscErrorCode ierr;
963   PetscFunctionBegin;
964   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
965   PetscFunctionReturn(0);
966 }
967 
968 /*@
969  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
970 
971  Not Collective
972 
973  Input Parameter:
974 .  pc - the preconditioner context
975 
976  Output Parameter:
977 .  v - Use PETSC_TRUE to ignore any DM
978 
979  Level: advanced
980 
981 .keywords: PC, telescoping solve
982 @*/
983 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
984 {
985   PetscErrorCode ierr;
986   PetscFunctionBegin;
987   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
988   PetscFunctionReturn(0);
989 }
990 
991 /*@
992  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used.
993 
994  Not Collective
995 
996  Input Parameter:
997 .  pc - the preconditioner context
998 
999  Output Parameter:
1000 .  v - the flag
1001 
1002  Level: advanced
1003 
1004 .keywords: PC, telescoping solve
1005 @*/
1006 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc,PetscBool *v)
1007 {
1008   PetscErrorCode ierr;
1009   PetscFunctionBegin;
1010   ierr = PetscUseMethod(pc,"PCTelescopeGetUseCoarseDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1011   PetscFunctionReturn(0);
1012 }
1013 
1014 /*@
1015  PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM
1016 
1017  Not Collective
1018 
1019  Input Parameter:
1020 .  pc - the preconditioner context
1021 
1022  Output Parameter:
1023 .  v - Use PETSC_FALSE to ignore any coarse DM
1024 
1025  Notes:
1026  When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope
1027  will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and
1028  -pc_telescope_subcomm_type will no longer have any meaning.
1029  It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes.
1030  An error will occur of the size of the communicator associated with the coarse DM
1031  is the same as that of the parent DM.
1032  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
1033  This will be checked at the time the preconditioner is setup and an error will occur if
1034  the coarse DM does not define a sub-communicator of that used by the parent DM.
1035 
1036  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
1037  the DM used (e.g. it supports DMSHELL, DMPLEX, etc).
1038 
1039  Support is currently only provided for the case when you are using KSPSetComputeOperators()
1040 
1041  The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs.
1042  In the user code, this is achieved via
1043 .vb
1044    {
1045      DM dm_fine;
1046      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1047    }
1048 .ve
1049  The signature of the user provided field scatter method is
1050 .vb
1051    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1052 .ve
1053  The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE.
1054  SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM.
1055 
1056  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
1057  of state variables between the fine and coarse DMs.
1058  In the context of a finite element discretization, an example state variable might be
1059  values associated with quadrature points within each element.
1060  A user provided state scatter method is composed via
1061 .vb
1062    {
1063      DM dm_fine;
1064      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1065    }
1066 .ve
1067  The signature of the user provided state scatter method is
1068 .vb
1069    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1070 .ve
1071  SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM.
1072  The user is only required to support mode = SCATTER_FORWARD.
1073  No assumption is made about the data type of the state variables.
1074  These must be managed by the user and must be accessible from the DM.
1075 
1076  Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be
1077  associated with the sub-KSP residing within PCTelescope.
1078  In general, PCTelescope assumes that the context on the fine and coarse DM used with
1079  KSPSetComputeOperators() should be "similar" in type or origin.
1080  Specifically the following rules are used to infer what context on the sub-KSP should be.
1081 
1082  First the contexts from the KSP and the fine and coarse DMs are retrieved.
1083  Note that the special case of a DMSHELL context is queried.
1084 
1085 .vb
1086    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1087    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1088    DMShellGetContext(dm_fine,&dmfine_shellctx);
1089 
1090    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1091    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1092 .ve
1093 
1094  The following rules are then enforced:
1095 
1096  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1097  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL);
1098 
1099  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1100  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1101  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1102 
1103  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1104  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1105  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1106 
1107  If neither of the above three tests passed, then PCTelescope cannot safely determine what
1108  context should be provided to KSPSetComputeOperators() for use with the sub-KSP.
1109  In this case, an additional mechanism is provided via a composed function which will return
1110  the actual context to be used. To use this feature you must compose the "getter" function
1111  with the coarse DM, e.g.
1112 .vb
1113    {
1114      DM dm_coarse;
1115      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1116    }
1117 .ve
1118  The signature of the user provided method is
1119 .vb
1120    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1121 .ve
1122 
1123  Level: advanced
1124 
1125 .keywords: PC, telescoping solve
1126 @*/
1127 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)
1128 {
1129   PetscErrorCode ierr;
1130   PetscFunctionBegin;
1131   ierr = PetscTryMethod(pc,"PCTelescopeSetUseCoarseDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1132   PetscFunctionReturn(0);
1133 }
1134 
1135 /*@
1136  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
1137 
1138  Not Collective
1139 
1140  Input Parameter:
1141 .  pc - the preconditioner context
1142 
1143  Output Parameter:
1144 .  v - the flag
1145 
1146  Level: advanced
1147 
1148 .keywords: PC, telescoping solve
1149 @*/
1150 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
1151 {
1152   PetscErrorCode ierr;
1153   PetscFunctionBegin;
1154   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1155   PetscFunctionReturn(0);
1156 }
1157 
1158 /*@
1159  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
1160 
1161  Not Collective
1162 
1163  Input Parameter:
1164 .  pc - the preconditioner context
1165 
1166  Output Parameter:
1167 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
1168 
1169  Level: advanced
1170 
1171 .keywords: PC, telescoping solve
1172 @*/
1173 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
1174 {
1175   PetscErrorCode ierr;
1176   PetscFunctionBegin;
1177   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1178   PetscFunctionReturn(0);
1179 }
1180 
1181 /*@
1182  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
1183 
1184  Not Collective
1185 
1186  Input Parameter:
1187 .  pc - the preconditioner context
1188 
1189  Output Parameter:
1190 .  subdm - The re-partitioned DM
1191 
1192  Level: advanced
1193 
1194 .keywords: PC, telescoping solve
1195 @*/
1196 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
1197 {
1198   PetscErrorCode ierr;
1199   PetscFunctionBegin;
1200   ierr = PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));CHKERRQ(ierr);
1201   PetscFunctionReturn(0);
1202 }
1203 
1204 /*@
1205  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1206 
1207  Logically Collective
1208 
1209  Input Parameter:
1210 +  pc - the preconditioner context
1211 -  subcommtype - the subcommunicator type (see PetscSubcommType)
1212 
1213  Level: advanced
1214 
1215 .keywords: PC, telescoping solve
1216 
1217 .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE
1218 @*/
1219 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1220 {
1221   PetscErrorCode ierr;
1222   PetscFunctionBegin;
1223   ierr = PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));CHKERRQ(ierr);
1224   PetscFunctionReturn(0);
1225 }
1226 
1227 /*@
1228  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1229 
1230  Not Collective
1231 
1232  Input Parameter:
1233 .  pc - the preconditioner context
1234 
1235  Output Parameter:
1236 .  subcommtype - the subcommunicator type (see PetscSubcommType)
1237 
1238  Level: advanced
1239 
1240 .keywords: PC, telescoping solve
1241 
1242 .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE
1243 @*/
1244 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1245 {
1246   PetscErrorCode ierr;
1247   PetscFunctionBegin;
1248   ierr = PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));CHKERRQ(ierr);
1249   PetscFunctionReturn(0);
1250 }
1251 
1252 /* -------------------------------------------------------------------------------------*/
1253 /*MC
1254    PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1255 
1256    Options Database:
1257 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1258 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1259 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1260 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP.
1261 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator.
1262 
1263    Level: advanced
1264 
1265    Notes:
1266    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1267    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC).
1268    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1269    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1270    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1271    Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM.
1272 
1273    The default type of the sub KSP (the KSP defined on c') is PREONLY.
1274 
1275    There are three setup mechanisms for PCTelescope. Features support by each type are described below.
1276    In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the
1277    communicators c and c' respectively.
1278 
1279    [1] Default setup
1280    The sub-communicator c' is created via PetscSubcommCreate().
1281    Explicitly defined nullspace and near nullspace vectors will be propogated from B to B'.
1282    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1283    No support is provided for KSPSetComputeOperators().
1284    Currently there is no support for the flag -pc_use_amat.
1285 
1286    [2] DM aware setup
1287    If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'.
1288    c' is created via PetscSubcommCreate().
1289    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1290    Currently only support for re-partitioning a DMDA is provided.
1291    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1292    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1293    Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP.
1294    This is fragile since the user must ensure that their user context is valid for use on c'.
1295    Currently there is no support for the flag -pc_use_amat.
1296 
1297    [3] Coarse DM setup
1298    If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM().
1299    PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c.
1300    The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE.
1301    PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1302    The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be
1303    available with using multi-grid on unstructured meshes.
1304    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1305    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1306    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1307    There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported.
1308    The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1309    Propogation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators().
1310    Currently there is no support for the flag -pc_use_amat.
1311    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
1312    Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM().
1313 
1314    Developer Notes:
1315    During PCSetup, the B operator is scattered onto c'.
1316    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1317    Then, KSPSolve() is executed on the c' communicator.
1318 
1319    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
1320    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
1321 
1322    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1323    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1324    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1325 
1326    The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D -
1327    support for 1D DMDAs is not provided). If a DMDA is found, a topolgically equivalent DMDA is created on c'
1328    and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support
1329    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1330    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1331 
1332    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1333 
1334    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1335    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
1336    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
1337 
1338    Limitations/improvements include the following.
1339    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1340    A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction().
1341 
1342    The symmetric permutation used when a DMDA is encountered is performed via explicitly assmbleming a permutation matrix P,
1343    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1344    VecPermute() does not supported for the use case required here. By computing P, one can permute both the operator and RHS in a
1345    consistent manner.
1346 
1347    Mapping of vectors (default setup mode) is performed in the following way.
1348    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1349    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1350    We perform the scatter to the sub-communicator in the following way.
1351    [1] Given a vector x defined on communicator c
1352 
1353 .vb
1354    rank(c)  local values of x
1355    ------- ----------------------------------------
1356         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1357         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1358         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1359         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1360 .ve
1361 
1362    scatter into xtmp defined also on comm c, so that we have the following values
1363 
1364 .vb
1365    rank(c)  local values of xtmp
1366    ------- ----------------------------------------------------------------------------
1367         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1368         1   [ ]
1369         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1370         3   [ ]
1371 .ve
1372 
1373    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1374 
1375 
1376    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1377    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1378 
1379 .vb
1380    rank(c')  local values of xred
1381    -------- ----------------------------------------------------------------------------
1382          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1383          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1384 .ve
1385 
1386   Contributed by Dave May
1387 
1388   Reference:
1389   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1390 
1391 .seealso:  PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
1392 M*/
1393 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1394 {
1395   PetscErrorCode       ierr;
1396   struct _PC_Telescope *sred;
1397 
1398   PetscFunctionBegin;
1399   ierr = PetscNewLog(pc,&sred);CHKERRQ(ierr);
1400   sred->psubcomm       = NULL;
1401   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1402   sred->subcomm        = MPI_COMM_NULL;
1403   sred->redfactor      = 1;
1404   sred->ignore_dm      = PETSC_FALSE;
1405   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1406   sred->use_coarse_dm  = PETSC_FALSE;
1407   pc->data             = (void*)sred;
1408 
1409   pc->ops->apply           = PCApply_Telescope;
1410   pc->ops->applytranspose  = NULL;
1411   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1412   pc->ops->setup           = PCSetUp_Telescope;
1413   pc->ops->destroy         = PCDestroy_Telescope;
1414   pc->ops->reset           = PCReset_Telescope;
1415   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1416   pc->ops->view            = PCView_Telescope;
1417 
1418   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1419   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1420   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1421   sred->pctelescope_reset_type              = NULL;
1422 
1423   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);CHKERRQ(ierr);
1424   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope);CHKERRQ(ierr);
1425   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope);CHKERRQ(ierr);
1426   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);CHKERRQ(ierr);
1427   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);CHKERRQ(ierr);
1428   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);CHKERRQ(ierr);
1429   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);CHKERRQ(ierr);
1430   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1431   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1432   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);CHKERRQ(ierr);
1433   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",PCTelescopeGetUseCoarseDM_Telescope);CHKERRQ(ierr);
1434   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",PCTelescopeSetUseCoarseDM_Telescope);CHKERRQ(ierr);
1435   PetscFunctionReturn(0);
1436 }
1437