xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision c5083d92c4ff246cae69ce4f2fa2928232b0a98b)
1 
2 #include <petsc/private/petscimpl.h>
3 #include <petsc/private/matimpl.h>
4 #include <petsc/private/pcimpl.h>
5 #include <petscksp.h> /*I "petscksp.h" I*/
6 #include <petscdm.h> /*I "petscdm.h" I*/
7 #include "../src/ksp/pc/impls/telescope/telescope.h"
8 
9 static PetscBool  cited = PETSC_FALSE;
10 static const char citation[] =
11 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
12 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
13 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
14 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
15 "  series    = {PASC '16},\n"
16 "  isbn      = {978-1-4503-4126-4},\n"
17 "  location  = {Lausanne, Switzerland},\n"
18 "  pages     = {5:1--5:12},\n"
19 "  articleno = {5},\n"
20 "  numpages  = {12},\n"
21 "  url       = {http://doi.acm.org/10.1145/2929908.2929913},\n"
22 "  doi       = {10.1145/2929908.2929913},\n"
23 "  acmid     = {2929913},\n"
24 "  publisher = {ACM},\n"
25 "  address   = {New York, NY, USA},\n"
26 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
27 "  year      = {2016}\n"
28 "}\n";
29 
30 /*
31  default setup mode
32 
33  [1a] scatter to (FORWARD)
34  x(comm) -> xtmp(comm)
35  [1b] local copy (to) ranks with color = 0
36  xred(subcomm) <- xtmp
37 
38  [2] solve on sub KSP to obtain yred(subcomm)
39 
40  [3a] local copy (from) ranks with color = 0
41  yred(subcomm) --> xtmp
42  [2b] scatter from (REVERSE)
43  xtmp(comm) -> y(comm)
44 */
45 
46 PetscBool PetscSubComm_isActiveRank(PetscSubcomm scomm)
47 {
48   if (scomm->color == 0) { return PETSC_TRUE; }
49   else { return PETSC_FALSE; }
50 }
51 
52 PetscBool isActiveRank(PC_Telescope sred)
53 {
54   if (sred->psubcomm) {
55     return(PetscSubComm_isActiveRank(sred->psubcomm));
56   } else {
57     if (sred->subcomm != MPI_COMM_NULL) { return PETSC_TRUE; }
58     else { return PETSC_FALSE; }
59   }
60 }
61 
62 /*
63   Collective on MPI_Comm[comm_f]
64   Notes
65    * Using comm_f = MPI_COMM_NULL will result in an error
66    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
67    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
68 */
69 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool *isvalid)
70 {
71   int valid = 1;
72   MPI_Group group_f,group_c;
73   PetscErrorCode ierr;
74   int errorcode;
75   PetscMPIInt count,k,size_f = 0,size_c = 0,size_c_sum = 0;
76   int *ranks_f = NULL,*ranks_c = NULL;
77 
78   if (comm_f == MPI_COMM_NULL) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"comm_f cannot be MPI_COMM_NULL");
79 
80   ierr = MPI_Comm_group(comm_f,&group_f);CHKERRQ(ierr);
81   if (comm_c != MPI_COMM_NULL) {
82     ierr = MPI_Comm_group(comm_c,&group_c);CHKERRQ(ierr);
83   }
84 
85   ierr = MPI_Comm_size(comm_f,&size_f);CHKERRQ(ierr);
86   if (comm_c != MPI_COMM_NULL) {
87     ierr = MPI_Comm_size(comm_c,&size_c);CHKERRQ(ierr);
88   }
89 
90   /* check not all comm_c's are NULL */
91   size_c_sum = size_c;
92   ierr = MPI_Allreduce(MPI_IN_PLACE,&size_c_sum,1,MPI_INT,MPI_SUM,comm_f);CHKERRQ(ierr);
93   if (size_c_sum == 0) {
94     valid = 0;
95   }
96 
97   /* check we can map at least 1 rank in comm_c to comm_f */
98   ierr = PetscMalloc1(size_f,&ranks_f);CHKERRQ(ierr);
99   ierr = PetscMalloc1(size_c,&ranks_c);CHKERRQ(ierr);
100   for (k=0; k<size_f; k++) {
101     ranks_f[k] = MPI_UNDEFINED;
102   }
103   for (k=0; k<size_c; k++) {
104     ranks_c[k] = (int)k;
105   }
106 
107   count = 0;
108   if (comm_c != MPI_COMM_NULL) {
109     errorcode = MPI_Group_translate_ranks(group_c,size_c,ranks_c,group_f,ranks_f);
110     for (k=0; k<size_f; k++) {
111       if (ranks_f[k] == MPI_UNDEFINED) {
112         count++;
113       }
114     }
115   }
116   if (count == size_f) {
117     valid = 0;
118   }
119 
120   ierr = MPI_Allreduce(MPI_IN_PLACE,&valid,1,MPI_INT,MPI_MIN,comm_f);CHKERRQ(ierr);
121   if (valid == 1) { *isvalid = PETSC_TRUE; }
122   else { *isvalid = PETSC_FALSE; }
123 
124   ierr = PetscFree(ranks_f);CHKERRQ(ierr);
125   ierr = PetscFree(ranks_c);CHKERRQ(ierr);
126   ierr = MPI_Group_free(&group_f);CHKERRQ(ierr);
127   if (comm_c != MPI_COMM_NULL) {
128     ierr = MPI_Group_free(&group_c);CHKERRQ(ierr);
129   }
130   PetscFunctionReturn(0);
131 }
132 
133 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
134 {
135   DM subdm = NULL;
136 
137   if (!isActiveRank(sred)) { subdm = NULL; }
138   else {
139     switch (sred->sr_type) {
140     case TELESCOPE_DEFAULT: subdm = NULL;
141       break;
142     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
143       break;
144     case TELESCOPE_DMPLEX:  subdm = NULL;
145       break;
146     case TELESCOPE_COARSEDM: if (sred->ksp) { KSPGetDM(sred->ksp,&subdm); }
147       break;
148     }
149   }
150   return(subdm);
151 }
152 
153 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
154 {
155   PetscErrorCode ierr;
156   PetscInt       m,M,bs,st,ed;
157   Vec            x,xred,yred,xtmp;
158   Mat            B;
159   MPI_Comm       comm,subcomm;
160   VecScatter     scatter;
161   IS             isin;
162 
163   PetscFunctionBegin;
164   ierr = PetscInfo(pc,"PCTelescope: setup (default)\n");CHKERRQ(ierr);
165   comm = PetscSubcommParent(sred->psubcomm);
166   subcomm = PetscSubcommChild(sred->psubcomm);
167 
168   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
169   ierr = MatGetSize(B,&M,NULL);CHKERRQ(ierr);
170   ierr = MatGetBlockSize(B,&bs);CHKERRQ(ierr);
171   ierr = MatCreateVecs(B,&x,NULL);CHKERRQ(ierr);
172 
173   xred = NULL;
174   m    = 0;
175   if (isActiveRank(sred)) {
176     ierr = VecCreate(subcomm,&xred);CHKERRQ(ierr);
177     ierr = VecSetSizes(xred,PETSC_DECIDE,M);CHKERRQ(ierr);
178     ierr = VecSetBlockSize(xred,bs);CHKERRQ(ierr);
179     ierr = VecSetFromOptions(xred);CHKERRQ(ierr);
180     ierr = VecGetLocalSize(xred,&m);CHKERRQ(ierr);
181   }
182 
183   yred = NULL;
184   if (isActiveRank(sred)) {
185     ierr = VecDuplicate(xred,&yred);CHKERRQ(ierr);
186   }
187 
188   ierr = VecCreate(comm,&xtmp);CHKERRQ(ierr);
189   ierr = VecSetSizes(xtmp,m,PETSC_DECIDE);CHKERRQ(ierr);
190   ierr = VecSetBlockSize(xtmp,bs);CHKERRQ(ierr);
191   ierr = VecSetType(xtmp,((PetscObject)x)->type_name);CHKERRQ(ierr);
192 
193   if (isActiveRank(sred)) {
194     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
195     ierr = ISCreateStride(comm,(ed-st),st,1,&isin);CHKERRQ(ierr);
196   } else {
197     ierr = VecGetOwnershipRange(x,&st,&ed);CHKERRQ(ierr);
198     ierr = ISCreateStride(comm,0,st,1,&isin);CHKERRQ(ierr);
199   }
200   ierr = ISSetBlockSize(isin,bs);CHKERRQ(ierr);
201 
202   ierr = VecScatterCreateWithData(x,isin,xtmp,NULL,&scatter);CHKERRQ(ierr);
203 
204   sred->isin    = isin;
205   sred->scatter = scatter;
206   sred->xred    = xred;
207   sred->yred    = yred;
208   sred->xtmp    = xtmp;
209   ierr = VecDestroy(&x);CHKERRQ(ierr);
210   PetscFunctionReturn(0);
211 }
212 
213 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
214 {
215   PetscErrorCode ierr;
216   MPI_Comm       comm,subcomm;
217   Mat            Bred,B;
218   PetscInt       nr,nc;
219   IS             isrow,iscol;
220   Mat            Blocal,*_Blocal;
221 
222   PetscFunctionBegin;
223   ierr = PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");CHKERRQ(ierr);
224   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
225   subcomm = PetscSubcommChild(sred->psubcomm);
226   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
227   ierr = MatGetSize(B,&nr,&nc);CHKERRQ(ierr);
228   isrow = sred->isin;
229   ierr = ISCreateStride(comm,nc,0,1,&iscol);CHKERRQ(ierr);
230   ierr = MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);CHKERRQ(ierr);
231   Blocal = *_Blocal;
232   ierr = PetscFree(_Blocal);CHKERRQ(ierr);
233   Bred = NULL;
234   if (isActiveRank(sred)) {
235     PetscInt mm;
236 
237     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
238 
239     ierr = MatGetSize(Blocal,&mm,NULL);CHKERRQ(ierr);
240     ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);CHKERRQ(ierr);
241   }
242   *A = Bred;
243   ierr = ISDestroy(&iscol);CHKERRQ(ierr);
244   ierr = MatDestroy(&Blocal);CHKERRQ(ierr);
245   PetscFunctionReturn(0);
246 }
247 
248 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
249 {
250   PetscErrorCode   ierr;
251   PetscBool        has_const;
252   const Vec        *vecs;
253   Vec              *sub_vecs = NULL;
254   PetscInt         i,k,n = 0;
255   MPI_Comm         subcomm;
256 
257   PetscFunctionBegin;
258   subcomm = PetscSubcommChild(sred->psubcomm);
259   ierr = MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);CHKERRQ(ierr);
260 
261   if (isActiveRank(sred)) {
262     if (n) {
263       ierr = VecDuplicateVecs(sred->xred,n,&sub_vecs);CHKERRQ(ierr);
264     }
265   }
266 
267   /* copy entries */
268   for (k=0; k<n; k++) {
269     const PetscScalar *x_array;
270     PetscScalar       *LA_sub_vec;
271     PetscInt          st,ed;
272 
273     /* pull in vector x->xtmp */
274     ierr = VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
275     ierr = VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
276     if (sub_vecs) {
277       /* copy vector entries into xred */
278       ierr = VecGetArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
279       if (sub_vecs[k]) {
280         ierr = VecGetOwnershipRange(sub_vecs[k],&st,&ed);CHKERRQ(ierr);
281         ierr = VecGetArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
282         for (i=0; i<ed-st; i++) {
283           LA_sub_vec[i] = x_array[i];
284         }
285         ierr = VecRestoreArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
286       }
287       ierr = VecRestoreArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
288     }
289   }
290 
291   if (isActiveRank(sred)) {
292     /* create new (near) nullspace for redundant object */
293     ierr = MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);CHKERRQ(ierr);
294     ierr = VecDestroyVecs(n,&sub_vecs);CHKERRQ(ierr);
295     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
296     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
297   }
298   PetscFunctionReturn(0);
299 }
300 
301 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
302 {
303   PetscErrorCode   ierr;
304   Mat              B;
305 
306   PetscFunctionBegin;
307   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
308 
309   /* Propagate the nullspace if it exists */
310   {
311     MatNullSpace nullspace,sub_nullspace;
312     ierr = MatGetNullSpace(B,&nullspace);CHKERRQ(ierr);
313     if (nullspace) {
314       ierr = PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");CHKERRQ(ierr);
315       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace);CHKERRQ(ierr);
316       if (isActiveRank(sred)) {
317         ierr = MatSetNullSpace(sub_mat,sub_nullspace);CHKERRQ(ierr);
318         ierr = MatNullSpaceDestroy(&sub_nullspace);CHKERRQ(ierr);
319       }
320     }
321   }
322 
323   /* Propagate the near nullspace if it exists */
324   {
325     MatNullSpace nearnullspace,sub_nearnullspace;
326     ierr = MatGetNearNullSpace(B,&nearnullspace);CHKERRQ(ierr);
327     if (nearnullspace) {
328       ierr = PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n");CHKERRQ(ierr);
329       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);CHKERRQ(ierr);
330       if (isActiveRank(sred)) {
331         ierr = MatSetNearNullSpace(sub_mat,sub_nearnullspace);CHKERRQ(ierr);
332         ierr = MatNullSpaceDestroy(&sub_nearnullspace);CHKERRQ(ierr);
333       }
334     }
335   }
336   PetscFunctionReturn(0);
337 }
338 
339 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
340 {
341   PC_Telescope     sred = (PC_Telescope)pc->data;
342   PetscErrorCode   ierr;
343   PetscBool        iascii,isstring;
344   PetscViewer      subviewer;
345 
346   PetscFunctionBegin;
347   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
348   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
349   if (iascii) {
350     {
351       MPI_Comm    comm,subcomm;
352       PetscMPIInt comm_size,subcomm_size;
353       DM          dm = NULL,subdm = NULL;
354 
355       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
356       subdm = private_PCTelescopeGetSubDM(sred);
357 
358       if (sred->psubcomm) {
359         comm = PetscSubcommParent(sred->psubcomm);
360         subcomm = PetscSubcommChild(sred->psubcomm);
361         ierr = MPI_Comm_size(comm,&comm_size);CHKERRQ(ierr);
362         ierr = MPI_Comm_size(subcomm,&subcomm_size);CHKERRQ(ierr);
363 
364         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
365         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent comm size reduction factor = %D\n",sred->redfactor);CHKERRQ(ierr);
366         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);CHKERRQ(ierr);
367         switch (sred->subcommtype) {
368           case PETSC_SUBCOMM_INTERLACED :
369             ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: type = interlaced\n",sred->subcommtype);CHKERRQ(ierr);
370             break;
371           case PETSC_SUBCOMM_CONTIGUOUS :
372             ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm type = contiguous\n",sred->subcommtype);CHKERRQ(ierr);
373             break;
374           default :
375             SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
376         }
377         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
378       } else {
379         ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
380         subcomm = sred->subcomm;
381         if (!isActiveRank(sred)) {
382           subcomm = PETSC_COMM_SELF;
383         }
384 
385         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
386         ierr = PetscViewerASCIIPrintf(viewer,"subcomm: using user provided sub-communicator\n");CHKERRQ(ierr);
387         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
388       }
389 
390       ierr = PetscViewerGetSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
391       if (isActiveRank(sred)) {
392         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
393 
394         if (dm && sred->ignore_dm) {
395           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring DM\n");CHKERRQ(ierr);
396         }
397         if (sred->ignore_kspcomputeoperators) {
398           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring KSPComputeOperators\n");CHKERRQ(ierr);
399         }
400         switch (sred->sr_type) {
401         case TELESCOPE_DEFAULT:
402           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: default\n");CHKERRQ(ierr);
403           break;
404         case TELESCOPE_DMDA:
405           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMDA auto-repartitioning\n");CHKERRQ(ierr);
406           ierr = DMView_DA_Short(subdm,subviewer);CHKERRQ(ierr);
407           break;
408         case TELESCOPE_DMPLEX:
409           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMPLEX auto-repartitioning\n");CHKERRQ(ierr);
410           break;
411         case TELESCOPE_COARSEDM:
412           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: coarse DM\n");CHKERRQ(ierr);
413           break;
414         }
415 
416         if (dm) {
417           PetscObject obj = (PetscObject)dm;
418           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object:");CHKERRQ(ierr);
419           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
420           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
421           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
422           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
423           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
424           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
425         } else {
426           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object: NULL\n");CHKERRQ(ierr);
427         }
428         if (subdm) {
429           PetscObject obj = (PetscObject)subdm;
430           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object:");CHKERRQ(ierr);
431           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
432           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
433           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
434           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
435           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
436           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
437         } else {
438           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object: NULL\n");CHKERRQ(ierr);
439         }
440 
441         ierr = KSPView(sred->ksp,subviewer);CHKERRQ(ierr);
442         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
443       }
444       ierr = PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
445     }
446   }
447   PetscFunctionReturn(0);
448 }
449 
450 static PetscErrorCode PCSetUp_Telescope(PC pc)
451 {
452   PC_Telescope      sred = (PC_Telescope)pc->data;
453   PetscErrorCode    ierr;
454   MPI_Comm          comm,subcomm=0;
455   PCTelescopeType   sr_type;
456 
457   PetscFunctionBegin;
458   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
459 
460   /* Determine type of setup/update */
461   if (!pc->setupcalled) {
462     PetscBool has_dm,same;
463     DM        dm;
464 
465     sr_type = TELESCOPE_DEFAULT;
466     has_dm = PETSC_FALSE;
467     ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
468     if (dm) { has_dm = PETSC_TRUE; }
469     if (has_dm) {
470       /* check for dmda */
471       ierr = PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);CHKERRQ(ierr);
472       if (same) {
473         ierr = PetscInfo(pc,"PCTelescope: found DMDA\n");CHKERRQ(ierr);
474         sr_type = TELESCOPE_DMDA;
475       }
476       /* check for dmplex */
477       ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);CHKERRQ(ierr);
478       if (same) {
479         ierr = PetscInfo(pc,"PCTelescope: found DMPLEX\n");CHKERRQ(ierr);
480         sr_type = TELESCOPE_DMPLEX;
481       }
482 
483       if (sred->use_coarse_dm) {
484         ierr = PetscInfo(pc,"PCTelescope: using coarse DM\n");CHKERRQ(ierr);
485         sr_type = TELESCOPE_COARSEDM;
486       }
487 
488       if (sred->ignore_dm) {
489         ierr = PetscInfo(pc,"PCTelescope: ignoring DM\n");CHKERRQ(ierr);
490         sr_type = TELESCOPE_DEFAULT;
491       }
492     }
493     sred->sr_type = sr_type;
494   } else {
495     sr_type = sred->sr_type;
496   }
497 
498   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
499   switch (sr_type) {
500     case TELESCOPE_DEFAULT:
501       sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
502       sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
503       sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
504       sred->pctelescope_reset_type              = NULL;
505       break;
506     case TELESCOPE_DMDA:
507       pc->ops->apply                            = PCApply_Telescope_dmda;
508       pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
509       sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
510       sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
511       sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
512       sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
513       break;
514     case TELESCOPE_DMPLEX: SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
515       break;
516     case TELESCOPE_COARSEDM:
517       pc->ops->apply                            = PCApply_Telescope_CoarseDM;
518       pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
519       sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
520       sred->pctelescope_matcreate_type          = NULL;
521       sred->pctelescope_matnullspacecreate_type = NULL;/*PCTelescopeMatNullSpaceCreate_CoarseDM;*/
522       sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
523       break;
524     default: SETERRQ(comm,PETSC_ERR_SUP,"Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
525       break;
526   }
527 
528   /* subcomm definition */
529   if (!pc->setupcalled) {
530     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
531       if (!sred->psubcomm) {
532         ierr = PetscSubcommCreate(comm,&sred->psubcomm);CHKERRQ(ierr);
533         ierr = PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);CHKERRQ(ierr);
534         ierr = PetscSubcommSetType(sred->psubcomm,sred->subcommtype);CHKERRQ(ierr);
535         ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
536         sred->subcomm = PetscSubcommChild(sred->psubcomm);
537       }
538     } else { /* query PC for DM, check communicators */
539       DM          dm,dm_coarse_partition = NULL;
540       MPI_Comm    comm_fine,comm_coarse_partition = MPI_COMM_NULL;
541       PetscMPIInt csize_fine=0,csize_coarse_partition=0,cs[2],csg[2],cnt=0;
542       PetscBool   isvalidsubcomm;
543 
544       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
545       comm_fine = PetscObjectComm((PetscObject)dm);
546       ierr = DMGetCoarseDM(dm,&dm_coarse_partition);CHKERRQ(ierr);
547       if (dm_coarse_partition) { cnt = 1; }
548       ierr = MPI_Allreduce(MPI_IN_PLACE,&cnt,1,MPI_INT,MPI_SUM,comm_fine);CHKERRQ(ierr);
549       if (cnt == 0) SETERRQ(comm_fine,PETSC_ERR_SUP,"Zero instances of a coarse DM were found");
550 
551       ierr = MPI_Comm_size(comm_fine,&csize_fine);CHKERRQ(ierr);
552       if (dm_coarse_partition) {
553         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
554         ierr = MPI_Comm_size(comm_coarse_partition,&csize_coarse_partition);CHKERRQ(ierr);
555       }
556 
557       cs[0] = csize_fine;
558       cs[1] = csize_coarse_partition;
559       ierr = MPI_Allreduce(cs,csg,2,MPI_INT,MPI_MAX,comm_fine);CHKERRQ(ierr);
560       if (csg[0] == csg[1]) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM uses the same size communicator as the parent DM attached to the PC");
561 
562       ierr = PCTelescopeTestValidSubcomm(comm_fine,comm_coarse_partition,&isvalidsubcomm);CHKERRQ(ierr);
563       if (!isvalidsubcomm) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM communicator is not a sub-communicator of parentDM->comm");
564       sred->subcomm = comm_coarse_partition;
565     }
566   }
567   subcomm = sred->subcomm;
568 
569   /* internal KSP */
570   if (!pc->setupcalled) {
571     const char *prefix;
572 
573     if (isActiveRank(sred)) {
574       ierr = KSPCreate(subcomm,&sred->ksp);CHKERRQ(ierr);
575       ierr = KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);CHKERRQ(ierr);
576       ierr = PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
577       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);CHKERRQ(ierr);
578       ierr = KSPSetType(sred->ksp,KSPPREONLY);CHKERRQ(ierr);
579       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
580       ierr = KSPSetOptionsPrefix(sred->ksp,prefix);CHKERRQ(ierr);
581       ierr = KSPAppendOptionsPrefix(sred->ksp,"telescope_");CHKERRQ(ierr);
582     }
583   }
584 
585   /* setup */
586   if (sred->pctelescope_setup_type) {
587     ierr = sred->pctelescope_setup_type(pc,sred);CHKERRQ(ierr);
588   }
589   /* update */
590   if (!pc->setupcalled) {
591     if (sred->pctelescope_matcreate_type) {
592       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);CHKERRQ(ierr);
593     }
594     if (sred->pctelescope_matnullspacecreate_type) {
595       ierr = sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);CHKERRQ(ierr);
596     }
597   } else {
598     if (sred->pctelescope_matcreate_type) {
599       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);CHKERRQ(ierr);
600     }
601   }
602 
603   /* common - no construction */
604   if (isActiveRank(sred)) {
605     ierr = KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);CHKERRQ(ierr);
606     if (pc->setfromoptionscalled && !pc->setupcalled){
607       ierr = KSPSetFromOptions(sred->ksp);CHKERRQ(ierr);
608     }
609   }
610 
611 #if 0
612   /* we perform this last as Bred is not available with KSPSetComputeOperators() until KSPSetUp has been called */
613   if (!pc->setupcalled) {
614     if (isActiveRank(sred)) {
615       ierr = KSPSetUp(sred->ksp);CHKERRQ(ierr);
616     }
617     if (sred->pctelescope_matnullspacecreate_type) {
618       ierr = sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);CHKERRQ(ierr);
619     }
620   }
621 #endif
622 
623   PetscFunctionReturn(0);
624 }
625 
626 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
627 {
628   PC_Telescope      sred = (PC_Telescope)pc->data;
629   PetscErrorCode    ierr;
630   Vec               xtmp,xred,yred;
631   PetscInt          i,st,ed;
632   VecScatter        scatter;
633   PetscScalar       *array;
634   const PetscScalar *x_array;
635 
636   PetscFunctionBegin;
637   ierr = PetscCitationsRegister(citation,&cited);CHKERRQ(ierr);
638 
639   xtmp    = sred->xtmp;
640   scatter = sred->scatter;
641   xred    = sred->xred;
642   yred    = sred->yred;
643 
644   /* pull in vector x->xtmp */
645   ierr = VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
646   ierr = VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
647 
648   /* copy vector entries into xred */
649   ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
650   if (xred) {
651     PetscScalar *LA_xred;
652     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
653     ierr = VecGetArray(xred,&LA_xred);CHKERRQ(ierr);
654     for (i=0; i<ed-st; i++) {
655       LA_xred[i] = x_array[i];
656     }
657     ierr = VecRestoreArray(xred,&LA_xred);CHKERRQ(ierr);
658   }
659   ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
660   /* solve */
661   if (isActiveRank(sred)) {
662     ierr = KSPSolve(sred->ksp,xred,yred);CHKERRQ(ierr);
663     ierr = KSPCheckSolve(sred->ksp,pc,yred);CHKERRQ(ierr);
664   }
665   /* return vector */
666   ierr = VecGetArray(xtmp,&array);CHKERRQ(ierr);
667   if (yred) {
668     const PetscScalar *LA_yred;
669     ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
670     ierr = VecGetArrayRead(yred,&LA_yred);CHKERRQ(ierr);
671     for (i=0; i<ed-st; i++) {
672       array[i] = LA_yred[i];
673     }
674     ierr = VecRestoreArrayRead(yred,&LA_yred);CHKERRQ(ierr);
675   }
676   ierr = VecRestoreArray(xtmp,&array);CHKERRQ(ierr);
677   ierr = VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
678   ierr = VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
679   PetscFunctionReturn(0);
680 }
681 
682 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
683 {
684   PC_Telescope      sred = (PC_Telescope)pc->data;
685   PetscErrorCode    ierr;
686   Vec               xtmp,yred;
687   PetscInt          i,st,ed;
688   VecScatter        scatter;
689   const PetscScalar *x_array;
690   PetscBool         default_init_guess_value;
691 
692   PetscFunctionBegin;
693   xtmp    = sred->xtmp;
694   scatter = sred->scatter;
695   yred    = sred->yred;
696 
697   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
698   *reason = (PCRichardsonConvergedReason)0;
699 
700   if (!zeroguess) {
701     ierr = PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");CHKERRQ(ierr);
702     /* pull in vector y->xtmp */
703     ierr = VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
704     ierr = VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
705 
706     /* copy vector entries into xred */
707     ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
708     if (yred) {
709       PetscScalar *LA_yred;
710       ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
711       ierr = VecGetArray(yred,&LA_yred);CHKERRQ(ierr);
712       for (i=0; i<ed-st; i++) {
713         LA_yred[i] = x_array[i];
714       }
715       ierr = VecRestoreArray(yred,&LA_yred);CHKERRQ(ierr);
716     }
717     ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
718   }
719 
720   if (isActiveRank(sred)) {
721     ierr = KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);CHKERRQ(ierr);
722     if (!zeroguess) ierr = KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);CHKERRQ(ierr);
723   }
724 
725   ierr = PCApply_Telescope(pc,x,y);CHKERRQ(ierr);
726 
727   if (isActiveRank(sred)) {
728     ierr = KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);CHKERRQ(ierr);
729   }
730 
731   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
732   *outits = 1;
733   PetscFunctionReturn(0);
734 }
735 
736 static PetscErrorCode PCReset_Telescope(PC pc)
737 {
738   PC_Telescope   sred = (PC_Telescope)pc->data;
739   PetscErrorCode ierr;
740 
741   ierr = ISDestroy(&sred->isin);CHKERRQ(ierr);
742   ierr = VecScatterDestroy(&sred->scatter);CHKERRQ(ierr);
743   ierr = VecDestroy(&sred->xred);CHKERRQ(ierr);
744   ierr = VecDestroy(&sred->yred);CHKERRQ(ierr);
745   ierr = VecDestroy(&sred->xtmp);CHKERRQ(ierr);
746   ierr = MatDestroy(&sred->Bred);CHKERRQ(ierr);
747   ierr = KSPReset(sred->ksp);CHKERRQ(ierr);
748   if (sred->pctelescope_reset_type) {
749     ierr = sred->pctelescope_reset_type(pc);CHKERRQ(ierr);
750   }
751   PetscFunctionReturn(0);
752 }
753 
754 static PetscErrorCode PCDestroy_Telescope(PC pc)
755 {
756   PC_Telescope     sred = (PC_Telescope)pc->data;
757   PetscErrorCode   ierr;
758 
759   PetscFunctionBegin;
760   ierr = PCReset_Telescope(pc);CHKERRQ(ierr);
761   ierr = KSPDestroy(&sred->ksp);CHKERRQ(ierr);
762   ierr = PetscSubcommDestroy(&sred->psubcomm);CHKERRQ(ierr);
763   ierr = PetscFree(sred->dm_ctx);CHKERRQ(ierr);
764   ierr = PetscFree(pc->data);CHKERRQ(ierr);
765   PetscFunctionReturn(0);
766 }
767 
768 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
769 {
770   PC_Telescope     sred = (PC_Telescope)pc->data;
771   PetscErrorCode   ierr;
772   MPI_Comm         comm;
773   PetscMPIInt      size;
774   PetscBool        flg;
775   PetscSubcommType subcommtype;
776 
777   PetscFunctionBegin;
778   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
779   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
780   ierr = PetscOptionsHead(PetscOptionsObject,"Telescope options");CHKERRQ(ierr);
781   ierr = PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg);CHKERRQ(ierr);
782   if (flg) {
783     ierr = PCTelescopeSetSubcommType(pc,subcommtype);CHKERRQ(ierr);
784   }
785   ierr = PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,0);CHKERRQ(ierr);
786   if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
787   ierr = PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,0);CHKERRQ(ierr);
788   ierr = PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,0);CHKERRQ(ierr);
789   ierr = PetscOptionsBool("-pc_telescope_use_coarse_dm","Define sub-communicator from the coarse DM","PCTelescopeSetUseCoarseDM",sred->use_coarse_dm,&sred->use_coarse_dm,0);CHKERRQ(ierr);
790   ierr = PetscOptionsTail();CHKERRQ(ierr);
791   PetscFunctionReturn(0);
792 }
793 
794 /* PC simplementation specific API's */
795 
796 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
797 {
798   PC_Telescope red = (PC_Telescope)pc->data;
799   PetscFunctionBegin;
800   if (ksp) *ksp = red->ksp;
801   PetscFunctionReturn(0);
802 }
803 
804 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
805 {
806   PC_Telescope red = (PC_Telescope)pc->data;
807   PetscFunctionBegin;
808   if (subcommtype) *subcommtype = red->subcommtype;
809   PetscFunctionReturn(0);
810 }
811 
812 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
813 {
814   PC_Telescope     red = (PC_Telescope)pc->data;
815 
816   PetscFunctionBegin;
817   if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
818   red->subcommtype = subcommtype;
819   PetscFunctionReturn(0);
820 }
821 
822 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
823 {
824   PC_Telescope red = (PC_Telescope)pc->data;
825   PetscFunctionBegin;
826   if (fact) *fact = red->redfactor;
827   PetscFunctionReturn(0);
828 }
829 
830 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
831 {
832   PC_Telescope     red = (PC_Telescope)pc->data;
833   PetscMPIInt      size;
834   PetscErrorCode   ierr;
835 
836   PetscFunctionBegin;
837   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
838   if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
839   if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
840   red->redfactor = fact;
841   PetscFunctionReturn(0);
842 }
843 
844 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
845 {
846   PC_Telescope red = (PC_Telescope)pc->data;
847   PetscFunctionBegin;
848   if (v) *v = red->ignore_dm;
849   PetscFunctionReturn(0);
850 }
851 
852 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
853 {
854   PC_Telescope red = (PC_Telescope)pc->data;
855   PetscFunctionBegin;
856   red->ignore_dm = v;
857   PetscFunctionReturn(0);
858 }
859 
860 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool *v)
861 {
862   PC_Telescope red = (PC_Telescope)pc->data;
863   PetscFunctionBegin;
864   if (v) *v = red->use_coarse_dm;
865   PetscFunctionReturn(0);
866 }
867 
868 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)
869 {
870   PC_Telescope red = (PC_Telescope)pc->data;
871   PetscFunctionBegin;
872   red->use_coarse_dm = v;
873   PetscFunctionReturn(0);
874 }
875 
876 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
877 {
878   PC_Telescope red = (PC_Telescope)pc->data;
879   PetscFunctionBegin;
880   if (v) *v = red->ignore_kspcomputeoperators;
881   PetscFunctionReturn(0);
882 }
883 
884 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
885 {
886   PC_Telescope red = (PC_Telescope)pc->data;
887   PetscFunctionBegin;
888   red->ignore_kspcomputeoperators = v;
889   PetscFunctionReturn(0);
890 }
891 
892 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
893 {
894   PC_Telescope red = (PC_Telescope)pc->data;
895   PetscFunctionBegin;
896   *dm = private_PCTelescopeGetSubDM(red);
897   PetscFunctionReturn(0);
898 }
899 
900 /*@
901  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
902 
903  Not Collective
904 
905  Input Parameter:
906 .  pc - the preconditioner context
907 
908  Output Parameter:
909 .  subksp - the KSP defined the smaller set of processes
910 
911  Level: advanced
912 
913 .keywords: PC, telescopting solve
914 @*/
915 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
916 {
917   PetscErrorCode ierr;
918   PetscFunctionBegin;
919   ierr = PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));CHKERRQ(ierr);
920   PetscFunctionReturn(0);
921 }
922 
923 /*@
924  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
925 
926  Not Collective
927 
928  Input Parameter:
929 .  pc - the preconditioner context
930 
931  Output Parameter:
932 .  fact - the reduction factor
933 
934  Level: advanced
935 
936 .keywords: PC, telescoping solve
937 @*/
938 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
939 {
940   PetscErrorCode ierr;
941   PetscFunctionBegin;
942   ierr = PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));CHKERRQ(ierr);
943   PetscFunctionReturn(0);
944 }
945 
946 /*@
947  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
948 
949  Not Collective
950 
951  Input Parameter:
952 .  pc - the preconditioner context
953 
954  Output Parameter:
955 .  fact - the reduction factor
956 
957  Level: advanced
958 
959 .keywords: PC, telescoping solve
960 @*/
961 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
962 {
963   PetscErrorCode ierr;
964   PetscFunctionBegin;
965   ierr = PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));CHKERRQ(ierr);
966   PetscFunctionReturn(0);
967 }
968 
969 /*@
970  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
971 
972  Not Collective
973 
974  Input Parameter:
975 .  pc - the preconditioner context
976 
977  Output Parameter:
978 .  v - the flag
979 
980  Level: advanced
981 
982 .keywords: PC, telescoping solve
983 @*/
984 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
985 {
986   PetscErrorCode ierr;
987   PetscFunctionBegin;
988   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
989   PetscFunctionReturn(0);
990 }
991 
992 /*@
993  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
994 
995  Not Collective
996 
997  Input Parameter:
998 .  pc - the preconditioner context
999 
1000  Output Parameter:
1001 .  v - Use PETSC_TRUE to ignore any DM
1002 
1003  Level: advanced
1004 
1005 .keywords: PC, telescoping solve
1006 @*/
1007 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
1008 {
1009   PetscErrorCode ierr;
1010   PetscFunctionBegin;
1011   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1012   PetscFunctionReturn(0);
1013 }
1014 
1015 /*@
1016  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used.
1017 
1018  Not Collective
1019 
1020  Input Parameter:
1021 .  pc - the preconditioner context
1022 
1023  Output Parameter:
1024 .  v - the flag
1025 
1026  Level: advanced
1027 
1028 .keywords: PC, telescoping solve
1029 @*/
1030 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc,PetscBool *v)
1031 {
1032   PetscErrorCode ierr;
1033   PetscFunctionBegin;
1034   ierr = PetscUseMethod(pc,"PCTelescopeGetUseCoarseDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1035   PetscFunctionReturn(0);
1036 }
1037 
1038 /*@
1039  PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM
1040 
1041  Not Collective
1042 
1043  Input Parameter:
1044 .  pc - the preconditioner context
1045 
1046  Output Parameter:
1047 .  v - Use PETSC_TRUE to ignore any DM
1048 
1049  Notes:
1050  When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope
1051  will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and
1052  -pc_telescope_subcomm_type will no longer have any meaning.
1053  It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes.
1054  An error will occur of the size of the communicator associated with the coarse DM
1055  is the same as that of the parent DM.
1056  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
1057  This will be checked at the time the preconditioner is setup and an error will occur if
1058  the coarse DM does not define a sub-communicator of that used by the parent DM.
1059 
1060  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
1061  the DM used (e.g. it supports DMSHELL, DMPLEX, etc).
1062 
1063  Support is currently only provided for the case when you are using KSPSetComputeOperators()
1064 
1065  The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs.
1066  In the user code, this is achieved via
1067 .vb
1068    {
1069      DM dm_fine;
1070      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1071    }
1072 .ve
1073  The signature of the user provided field scatter method is
1074 .vb
1075    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1076 .ve
1077  The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE.
1078  SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM.
1079 
1080  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
1081  of state variables between the fine and coarse DMs.
1082  In the context of a finite element discretization, an example state variable might be
1083  values associated with quadrature points within each element.
1084  A user provided state scatter method is composed via
1085 .vb
1086    {
1087      DM dm_fine;
1088      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1089    }
1090 .ve
1091  The signature of the user provided state scatter method is
1092 .vb
1093    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1094 .ve
1095  SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM.
1096  The user is only required to support mode = SCATTER_FORWARD.
1097  No assumption is made about the data type of the state variables.
1098  These must be managed by the user and must be accessible from the DM.
1099 
1100  Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be
1101  associated with the sub-KSP residing within PCTelescope.
1102  In general, PCTelescope assumes that the context on the fine and coarse DM used with
1103  KSPSetComputeOperators() should be "similar" in type or origin.
1104  Specifically the following rules are used to infer what context on the sub-KSP should be.
1105 
1106  First the contexts from the KSP and the fine and coarse DMs are retrieved.
1107  Note that the special case of a DMSHELL context is queried.
1108 
1109 .vb
1110    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1111    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1112    DMShellGetContext(dm_fine,&dmfine_shellctx);
1113 
1114    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1115    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1116 .ve
1117 
1118  The following rules are then enforced:
1119 
1120  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1121  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL);
1122 
1123  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1124  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1125  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1126 
1127  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1128  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1129  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1130 
1131  If neither of the above three tests passed, then PCTelescope cannot safely determine what
1132  context should be provided to KSPSetComputeOperators() for use with the sub-KSP.
1133  In this case, an additional mechanism is provided via a composed function which will return
1134  the actual context to be used. To use this feature you must compose the "getter" function
1135  with the coarse DM, e.g.
1136 .vb
1137    {
1138      DM dm_coarse;
1139      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1140    }
1141 .ve
1142  The signature of the user provided method is
1143 .vb
1144    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1145 .ve
1146 
1147  Level: advanced
1148 
1149 .keywords: PC, telescoping solve
1150 @*/
1151 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)
1152 {
1153   PetscErrorCode ierr;
1154   PetscFunctionBegin;
1155   ierr = PetscTryMethod(pc,"PCTelescopeSetUseCoarseDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1156   PetscFunctionReturn(0);
1157 }
1158 
1159 /*@
1160  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
1161 
1162  Not Collective
1163 
1164  Input Parameter:
1165 .  pc - the preconditioner context
1166 
1167  Output Parameter:
1168 .  v - the flag
1169 
1170  Level: advanced
1171 
1172 .keywords: PC, telescoping solve
1173 @*/
1174 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
1175 {
1176   PetscErrorCode ierr;
1177   PetscFunctionBegin;
1178   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1179   PetscFunctionReturn(0);
1180 }
1181 
1182 /*@
1183  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
1184 
1185  Not Collective
1186 
1187  Input Parameter:
1188 .  pc - the preconditioner context
1189 
1190  Output Parameter:
1191 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
1192 
1193  Level: advanced
1194 
1195 .keywords: PC, telescoping solve
1196 @*/
1197 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
1198 {
1199   PetscErrorCode ierr;
1200   PetscFunctionBegin;
1201   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1202   PetscFunctionReturn(0);
1203 }
1204 
1205 /*@
1206  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
1207 
1208  Not Collective
1209 
1210  Input Parameter:
1211 .  pc - the preconditioner context
1212 
1213  Output Parameter:
1214 .  subdm - The re-partitioned DM
1215 
1216  Level: advanced
1217 
1218 .keywords: PC, telescoping solve
1219 @*/
1220 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
1221 {
1222   PetscErrorCode ierr;
1223   PetscFunctionBegin;
1224   ierr = PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));CHKERRQ(ierr);
1225   PetscFunctionReturn(0);
1226 }
1227 
1228 /*@
1229  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1230 
1231  Logically Collective
1232 
1233  Input Parameter:
1234 +  pc - the preconditioner context
1235 -  subcommtype - the subcommunicator type (see PetscSubcommType)
1236 
1237  Level: advanced
1238 
1239 .keywords: PC, telescoping solve
1240 
1241 .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE
1242 @*/
1243 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1244 {
1245   PetscErrorCode ierr;
1246   PetscFunctionBegin;
1247   ierr = PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));CHKERRQ(ierr);
1248   PetscFunctionReturn(0);
1249 }
1250 
1251 /*@
1252  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1253 
1254  Not Collective
1255 
1256  Input Parameter:
1257 .  pc - the preconditioner context
1258 
1259  Output Parameter:
1260 .  subcommtype - the subcommunicator type (see PetscSubcommType)
1261 
1262  Level: advanced
1263 
1264 .keywords: PC, telescoping solve
1265 
1266 .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE
1267 @*/
1268 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1269 {
1270   PetscErrorCode ierr;
1271   PetscFunctionBegin;
1272   ierr = PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));CHKERRQ(ierr);
1273   PetscFunctionReturn(0);
1274 }
1275 
1276 /* -------------------------------------------------------------------------------------*/
1277 /*MC
1278    PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1279 
1280    Options Database:
1281 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1282 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1283 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1284 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP.
1285 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator.
1286 
1287    Level: advanced
1288 
1289    Notes:
1290    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1291    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC).
1292    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1293    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1294    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1295    Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM.
1296 
1297    The default type of the sub KSP (the KSP defined on c') is PREONLY.
1298 
1299    There are three setup mechanisms for PCTelescope. Features support by each type are described below.
1300    In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the
1301    communicators c and c' respectively.
1302 
1303    [1] Default setup
1304    The sub-communicator c' is created via PetscSubcommCreate().
1305    Explicitly defined nullspace and near nullspace vectors will be propogated from B to B'.
1306    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1307    No support is provided for KSPSetComputeOperators().
1308    Currently there is no support for the flag -pc_use_amat.
1309 
1310    [2] DM aware setup
1311    If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'.
1312    c' is created via PetscSubcommCreate().
1313    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1314    Currently only support for re-partitioning a DMDA is provided.
1315    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1316    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1317    Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP.
1318    This is fragile since the user must ensure that their user context is valid for use on c'.
1319    Currently there is no support for the flag -pc_use_amat.
1320 
1321    [3] Coarse DM setup
1322    If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM().
1323    PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c.
1324    The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE.
1325    PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1326    The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be
1327    available with using multi-grid on unstructured meshes.
1328    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1329    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1330    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1331    There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported.
1332    The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1333    Propogation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators().
1334    Currently there is no support for the flag -pc_use_amat.
1335    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
1336    Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM().
1337 
1338    Developer Notes:
1339    During PCSetup, the B operator is scattered onto c'.
1340    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1341    Then, KSPSolve() is executed on the c' communicator.
1342 
1343    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
1344    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
1345 
1346    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1347    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1348    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1349 
1350    The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D -
1351    support for 1D DMDAs is not provided). If a DMDA is found, a topolgically equivalent DMDA is created on c'
1352    and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support
1353    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1354    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1355 
1356    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1357 
1358    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1359    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
1360    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
1361 
1362    Limitations/improvements include the following.
1363    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1364    A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction().
1365 
1366    The symmetric permutation used when a DMDA is encountered is performed via explicitly assmbleming a permutation matrix P,
1367    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1368    VecPermute() does not supported for the use case required here. By computing P, one can permute both the operator and RHS in a
1369    consistent manner.
1370 
1371    Mapping of vectors (default setup mode) is performed in the following way.
1372    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1373    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1374    We perform the scatter to the sub-communicator in the following way.
1375    [1] Given a vector x defined on communicator c
1376 
1377 .vb
1378    rank(c)  local values of x
1379    ------- ----------------------------------------
1380         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1381         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1382         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1383         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1384 .ve
1385 
1386    scatter into xtmp defined also on comm c, so that we have the following values
1387 
1388 .vb
1389    rank(c)  local values of xtmp
1390    ------- ----------------------------------------------------------------------------
1391         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1392         1   [ ]
1393         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1394         3   [ ]
1395 .ve
1396 
1397    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1398 
1399 
1400    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1401    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1402 
1403 .vb
1404    rank(c')  local values of xred
1405    -------- ----------------------------------------------------------------------------
1406          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1407          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1408 .ve
1409 
1410   Contributed by Dave May
1411 
1412   Reference:
1413   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1414 
1415 .seealso:  PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
1416 M*/
1417 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1418 {
1419   PetscErrorCode       ierr;
1420   struct _PC_Telescope *sred;
1421 
1422   PetscFunctionBegin;
1423   ierr = PetscNewLog(pc,&sred);CHKERRQ(ierr);
1424   sred->psubcomm       = NULL;
1425   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1426   sred->subcomm        = MPI_COMM_NULL;
1427   sred->redfactor      = 1;
1428   sred->ignore_dm      = PETSC_FALSE;
1429   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1430   sred->use_coarse_dm  = PETSC_FALSE;
1431   pc->data             = (void*)sred;
1432 
1433   pc->ops->apply           = PCApply_Telescope;
1434   pc->ops->applytranspose  = NULL;
1435   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1436   pc->ops->setup           = PCSetUp_Telescope;
1437   pc->ops->destroy         = PCDestroy_Telescope;
1438   pc->ops->reset           = PCReset_Telescope;
1439   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1440   pc->ops->view            = PCView_Telescope;
1441 
1442   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1443   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1444   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1445   sred->pctelescope_reset_type              = NULL;
1446 
1447   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);CHKERRQ(ierr);
1448   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope);CHKERRQ(ierr);
1449   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope);CHKERRQ(ierr);
1450   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);CHKERRQ(ierr);
1451   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);CHKERRQ(ierr);
1452   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);CHKERRQ(ierr);
1453   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);CHKERRQ(ierr);
1454   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1455   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1456   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);CHKERRQ(ierr);
1457   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",PCTelescopeGetUseCoarseDM_Telescope);CHKERRQ(ierr);
1458   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",PCTelescopeSetUseCoarseDM_Telescope);CHKERRQ(ierr);
1459   PetscFunctionReturn(0);
1460 }
1461