xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision c3b5f7ba6bc5ce25a01a67bb37ba5d34b02bbbd7)
1 #include <petsc/private/petscimpl.h>
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h> /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7 
8 static PetscBool  cited = PETSC_FALSE;
9 static const char citation[] =
10 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
11 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
12 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
13 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
14 "  series    = {PASC '16},\n"
15 "  isbn      = {978-1-4503-4126-4},\n"
16 "  location  = {Lausanne, Switzerland},\n"
17 "  pages     = {5:1--5:12},\n"
18 "  articleno = {5},\n"
19 "  numpages  = {12},\n"
20 "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
21 "  doi       = {10.1145/2929908.2929913},\n"
22 "  acmid     = {2929913},\n"
23 "  publisher = {ACM},\n"
24 "  address   = {New York, NY, USA},\n"
25 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
26 "  year      = {2016}\n"
27 "}\n";
28 
29 /*
30  default setup mode
31 
32  [1a] scatter to (FORWARD)
33  x(comm) -> xtmp(comm)
34  [1b] local copy (to) ranks with color = 0
35  xred(subcomm) <- xtmp
36 
37  [2] solve on sub KSP to obtain yred(subcomm)
38 
39  [3a] local copy (from) ranks with color = 0
40  yred(subcomm) --> xtmp
41  [2b] scatter from (REVERSE)
42  xtmp(comm) -> y(comm)
43 */
44 
45 /*
46   Collective[comm_f]
47   Notes
48    * Using comm_f = MPI_COMM_NULL will result in an error
49    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
50    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
51 */
52 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool *isvalid)
53 {
54   PetscInt       valid = 1;
55   MPI_Group      group_f,group_c;
56   PetscMPIInt    count,k,size_f = 0,size_c = 0,size_c_sum = 0;
57   PetscMPIInt    *ranks_f,*ranks_c;
58 
59   PetscFunctionBegin;
60   PetscCheckFalse(comm_f == MPI_COMM_NULL,PETSC_COMM_SELF,PETSC_ERR_SUP,"comm_f cannot be MPI_COMM_NULL");
61 
62   PetscCallMPI(MPI_Comm_group(comm_f,&group_f));
63   if (comm_c != MPI_COMM_NULL) {
64     PetscCallMPI(MPI_Comm_group(comm_c,&group_c));
65   }
66 
67   PetscCallMPI(MPI_Comm_size(comm_f,&size_f));
68   if (comm_c != MPI_COMM_NULL) {
69     PetscCallMPI(MPI_Comm_size(comm_c,&size_c));
70   }
71 
72   /* check not all comm_c's are NULL */
73   size_c_sum = size_c;
74   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE,&size_c_sum,1,MPI_INT,MPI_SUM,comm_f));
75   if (size_c_sum == 0) valid = 0;
76 
77   /* check we can map at least 1 rank in comm_c to comm_f */
78   PetscCall(PetscMalloc1(size_f,&ranks_f));
79   PetscCall(PetscMalloc1(size_c,&ranks_c));
80   for (k=0; k<size_f; k++) ranks_f[k] = MPI_UNDEFINED;
81   for (k=0; k<size_c; k++) ranks_c[k] = k;
82 
83   /*
84    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
85    I do not want the code to terminate immediately if this occurs, rather I want to throw
86    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
87    that comm_c is not a valid sub-communicator.
88    Hence I purposefully do not call PetscCall() after MPI_Group_translate_ranks().
89   */
90   count = 0;
91   if (comm_c != MPI_COMM_NULL) {
92     (void)MPI_Group_translate_ranks(group_c,size_c,ranks_c,group_f,ranks_f);
93     for (k=0; k<size_f; k++) {
94       if (ranks_f[k] == MPI_UNDEFINED) {
95         count++;
96       }
97     }
98   }
99   if (count == size_f) valid = 0;
100 
101   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE,&valid,1,MPIU_INT,MPI_MIN,comm_f));
102   if (valid == 1) *isvalid = PETSC_TRUE;
103   else *isvalid = PETSC_FALSE;
104 
105   PetscCall(PetscFree(ranks_f));
106   PetscCall(PetscFree(ranks_c));
107   PetscCallMPI(MPI_Group_free(&group_f));
108   if (comm_c != MPI_COMM_NULL) {
109     PetscCallMPI(MPI_Group_free(&group_c));
110   }
111   PetscFunctionReturn(0);
112 }
113 
114 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
115 {
116   DM subdm = NULL;
117 
118   if (!PCTelescope_isActiveRank(sred)) { subdm = NULL; }
119   else {
120     switch (sred->sr_type) {
121     case TELESCOPE_DEFAULT: subdm = NULL;
122       break;
123     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
124       break;
125     case TELESCOPE_DMPLEX:  subdm = NULL;
126       break;
127     case TELESCOPE_COARSEDM: if (sred->ksp) { KSPGetDM(sred->ksp,&subdm); }
128       break;
129     }
130   }
131   return(subdm);
132 }
133 
134 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
135 {
136   PetscInt       m,M,bs,st,ed;
137   Vec            x,xred,yred,xtmp;
138   Mat            B;
139   MPI_Comm       comm,subcomm;
140   VecScatter     scatter;
141   IS             isin;
142   VecType        vectype;
143 
144   PetscFunctionBegin;
145   PetscCall(PetscInfo(pc,"PCTelescope: setup (default)\n"));
146   comm = PetscSubcommParent(sred->psubcomm);
147   subcomm = PetscSubcommChild(sred->psubcomm);
148 
149   PetscCall(PCGetOperators(pc,NULL,&B));
150   PetscCall(MatGetSize(B,&M,NULL));
151   PetscCall(MatGetBlockSize(B,&bs));
152   PetscCall(MatCreateVecs(B,&x,NULL));
153   PetscCall(MatGetVecType(B,&vectype));
154 
155   xred = NULL;
156   m    = 0;
157   if (PCTelescope_isActiveRank(sred)) {
158     PetscCall(VecCreate(subcomm,&xred));
159     PetscCall(VecSetSizes(xred,PETSC_DECIDE,M));
160     PetscCall(VecSetBlockSize(xred,bs));
161     PetscCall(VecSetType(xred,vectype)); /* Use the preconditioner matrix's vectype by default */
162     PetscCall(VecSetFromOptions(xred));
163     PetscCall(VecGetLocalSize(xred,&m));
164   }
165 
166   yred = NULL;
167   if (PCTelescope_isActiveRank(sred)) {
168     PetscCall(VecDuplicate(xred,&yred));
169   }
170 
171   PetscCall(VecCreate(comm,&xtmp));
172   PetscCall(VecSetSizes(xtmp,m,PETSC_DECIDE));
173   PetscCall(VecSetBlockSize(xtmp,bs));
174   PetscCall(VecSetType(xtmp,vectype));
175 
176   if (PCTelescope_isActiveRank(sred)) {
177     PetscCall(VecGetOwnershipRange(xred,&st,&ed));
178     PetscCall(ISCreateStride(comm,(ed-st),st,1,&isin));
179   } else {
180     PetscCall(VecGetOwnershipRange(x,&st,&ed));
181     PetscCall(ISCreateStride(comm,0,st,1,&isin));
182   }
183   PetscCall(ISSetBlockSize(isin,bs));
184 
185   PetscCall(VecScatterCreate(x,isin,xtmp,NULL,&scatter));
186 
187   sred->isin    = isin;
188   sred->scatter = scatter;
189   sred->xred    = xred;
190   sred->yred    = yred;
191   sred->xtmp    = xtmp;
192   PetscCall(VecDestroy(&x));
193   PetscFunctionReturn(0);
194 }
195 
196 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
197 {
198   MPI_Comm       comm,subcomm;
199   Mat            Bred,B;
200   PetscInt       nr,nc,bs;
201   IS             isrow,iscol;
202   Mat            Blocal,*_Blocal;
203 
204   PetscFunctionBegin;
205   PetscCall(PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n"));
206   PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
207   subcomm = PetscSubcommChild(sred->psubcomm);
208   PetscCall(PCGetOperators(pc,NULL,&B));
209   PetscCall(MatGetSize(B,&nr,&nc));
210   isrow = sred->isin;
211   PetscCall(ISCreateStride(PETSC_COMM_SELF,nc,0,1,&iscol));
212   PetscCall(ISSetIdentity(iscol));
213   PetscCall(MatGetBlockSizes(B,NULL,&bs));
214   PetscCall(ISSetBlockSize(iscol,bs));
215   PetscCall(MatSetOption(B,MAT_SUBMAT_SINGLEIS,PETSC_TRUE));
216   PetscCall(MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal));
217   Blocal = *_Blocal;
218   PetscCall(PetscFree(_Blocal));
219   Bred = NULL;
220   if (PCTelescope_isActiveRank(sred)) {
221     PetscInt mm;
222 
223     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
224 
225     PetscCall(MatGetSize(Blocal,&mm,NULL));
226     PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred));
227   }
228   *A = Bred;
229   PetscCall(ISDestroy(&iscol));
230   PetscCall(MatDestroy(&Blocal));
231   PetscFunctionReturn(0);
232 }
233 
234 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
235 {
236   PetscBool      has_const;
237   const Vec      *vecs;
238   Vec            *sub_vecs = NULL;
239   PetscInt       i,k,n = 0;
240   MPI_Comm       subcomm;
241 
242   PetscFunctionBegin;
243   subcomm = PetscSubcommChild(sred->psubcomm);
244   PetscCall(MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs));
245 
246   if (PCTelescope_isActiveRank(sred)) {
247     if (n) {
248       PetscCall(VecDuplicateVecs(sred->xred,n,&sub_vecs));
249     }
250   }
251 
252   /* copy entries */
253   for (k=0; k<n; k++) {
254     const PetscScalar *x_array;
255     PetscScalar       *LA_sub_vec;
256     PetscInt          st,ed;
257 
258     /* pull in vector x->xtmp */
259     PetscCall(VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD));
260     PetscCall(VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD));
261     if (sub_vecs) {
262       /* copy vector entries into xred */
263       PetscCall(VecGetArrayRead(sred->xtmp,&x_array));
264       if (sub_vecs[k]) {
265         PetscCall(VecGetOwnershipRange(sub_vecs[k],&st,&ed));
266         PetscCall(VecGetArray(sub_vecs[k],&LA_sub_vec));
267         for (i=0; i<ed-st; i++) {
268           LA_sub_vec[i] = x_array[i];
269         }
270         PetscCall(VecRestoreArray(sub_vecs[k],&LA_sub_vec));
271       }
272       PetscCall(VecRestoreArrayRead(sred->xtmp,&x_array));
273     }
274   }
275 
276   if (PCTelescope_isActiveRank(sred)) {
277     /* create new (near) nullspace for redundant object */
278     PetscCall(MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace));
279     PetscCall(VecDestroyVecs(n,&sub_vecs));
280     PetscCheck(!nullspace->remove,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
281     PetscCheck(!nullspace->rmctx,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
282   }
283   PetscFunctionReturn(0);
284 }
285 
286 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
287 {
288   Mat            B;
289 
290   PetscFunctionBegin;
291   PetscCall(PCGetOperators(pc,NULL,&B));
292   /* Propagate the nullspace if it exists */
293   {
294     MatNullSpace nullspace,sub_nullspace;
295     PetscCall(MatGetNullSpace(B,&nullspace));
296     if (nullspace) {
297       PetscCall(PetscInfo(pc,"PCTelescope: generating nullspace (default)\n"));
298       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace));
299       if (PCTelescope_isActiveRank(sred)) {
300         PetscCall(MatSetNullSpace(sub_mat,sub_nullspace));
301         PetscCall(MatNullSpaceDestroy(&sub_nullspace));
302       }
303     }
304   }
305   /* Propagate the near nullspace if it exists */
306   {
307     MatNullSpace nearnullspace,sub_nearnullspace;
308     PetscCall(MatGetNearNullSpace(B,&nearnullspace));
309     if (nearnullspace) {
310       PetscCall(PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n"));
311       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace));
312       if (PCTelescope_isActiveRank(sred)) {
313         PetscCall(MatSetNearNullSpace(sub_mat,sub_nearnullspace));
314         PetscCall(MatNullSpaceDestroy(&sub_nearnullspace));
315       }
316     }
317   }
318   PetscFunctionReturn(0);
319 }
320 
321 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
322 {
323   PC_Telescope   sred = (PC_Telescope)pc->data;
324   PetscBool      iascii,isstring;
325   PetscViewer    subviewer;
326 
327   PetscFunctionBegin;
328   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
329   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring));
330   if (iascii) {
331     {
332       MPI_Comm    comm,subcomm;
333       PetscMPIInt comm_size,subcomm_size;
334       DM          dm = NULL,subdm = NULL;
335 
336       PetscCall(PCGetDM(pc,&dm));
337       subdm = private_PCTelescopeGetSubDM(sred);
338 
339       if (sred->psubcomm) {
340         comm = PetscSubcommParent(sred->psubcomm);
341         subcomm = PetscSubcommChild(sred->psubcomm);
342         PetscCallMPI(MPI_Comm_size(comm,&comm_size));
343         PetscCallMPI(MPI_Comm_size(subcomm,&subcomm_size));
344 
345         PetscCall(PetscViewerASCIIPushTab(viewer));
346         PetscCall(PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent comm size reduction factor = %D\n",sred->redfactor));
347         PetscCall(PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size));
348         switch (sred->subcommtype) {
349         case PETSC_SUBCOMM_INTERLACED :
350           PetscCall(PetscViewerASCIIPrintf(viewer,"petsc subcomm: type = interlaced\n",sred->subcommtype));
351           break;
352         case PETSC_SUBCOMM_CONTIGUOUS :
353           PetscCall(PetscViewerASCIIPrintf(viewer,"petsc subcomm type = contiguous\n",sred->subcommtype));
354           break;
355         default :
356           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
357         }
358         PetscCall(PetscViewerASCIIPopTab(viewer));
359       } else {
360         PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
361         subcomm = sred->subcomm;
362         if (!PCTelescope_isActiveRank(sred)) {
363           subcomm = PETSC_COMM_SELF;
364         }
365 
366         PetscCall(PetscViewerASCIIPushTab(viewer));
367         PetscCall(PetscViewerASCIIPrintf(viewer,"subcomm: using user provided sub-communicator\n"));
368         PetscCall(PetscViewerASCIIPopTab(viewer));
369       }
370 
371       PetscCall(PetscViewerGetSubViewer(viewer,subcomm,&subviewer));
372       if (PCTelescope_isActiveRank(sred)) {
373         PetscCall(PetscViewerASCIIPushTab(subviewer));
374 
375         if (dm && sred->ignore_dm) {
376           PetscCall(PetscViewerASCIIPrintf(subviewer,"ignoring DM\n"));
377         }
378         if (sred->ignore_kspcomputeoperators) {
379           PetscCall(PetscViewerASCIIPrintf(subviewer,"ignoring KSPComputeOperators\n"));
380         }
381         switch (sred->sr_type) {
382         case TELESCOPE_DEFAULT:
383           PetscCall(PetscViewerASCIIPrintf(subviewer,"setup type: default\n"));
384           break;
385         case TELESCOPE_DMDA:
386           PetscCall(PetscViewerASCIIPrintf(subviewer,"setup type: DMDA auto-repartitioning\n"));
387           PetscCall(DMView_DA_Short(subdm,subviewer));
388           break;
389         case TELESCOPE_DMPLEX:
390           PetscCall(PetscViewerASCIIPrintf(subviewer,"setup type: DMPLEX auto-repartitioning\n"));
391           break;
392         case TELESCOPE_COARSEDM:
393           PetscCall(PetscViewerASCIIPrintf(subviewer,"setup type: coarse DM\n"));
394           break;
395         }
396 
397         if (dm) {
398           PetscObject obj = (PetscObject)dm;
399           PetscCall(PetscViewerASCIIPrintf(subviewer,"Parent DM object:"));
400           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
401           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
402           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
403           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
404           PetscCall(PetscViewerASCIIPrintf(subviewer,"\n"));
405           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
406         } else {
407           PetscCall(PetscViewerASCIIPrintf(subviewer,"Parent DM object: NULL\n"));
408         }
409         if (subdm) {
410           PetscObject obj = (PetscObject)subdm;
411           PetscCall(PetscViewerASCIIPrintf(subviewer,"Sub DM object:"));
412           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
413           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
414           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
415           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
416           PetscCall(PetscViewerASCIIPrintf(subviewer,"\n"));
417           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
418         } else {
419           PetscCall(PetscViewerASCIIPrintf(subviewer,"Sub DM object: NULL\n"));
420         }
421 
422         PetscCall(KSPView(sred->ksp,subviewer));
423         PetscCall(PetscViewerASCIIPopTab(subviewer));
424       }
425       PetscCall(PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer));
426     }
427   }
428   PetscFunctionReturn(0);
429 }
430 
431 static PetscErrorCode PCSetUp_Telescope(PC pc)
432 {
433   PC_Telescope    sred = (PC_Telescope)pc->data;
434   MPI_Comm        comm,subcomm=0;
435   PCTelescopeType sr_type;
436 
437   PetscFunctionBegin;
438   PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
439 
440   /* Determine type of setup/update */
441   if (!pc->setupcalled) {
442     PetscBool has_dm,same;
443     DM        dm;
444 
445     sr_type = TELESCOPE_DEFAULT;
446     has_dm = PETSC_FALSE;
447     PetscCall(PCGetDM(pc,&dm));
448     if (dm) { has_dm = PETSC_TRUE; }
449     if (has_dm) {
450       /* check for dmda */
451       PetscCall(PetscObjectTypeCompare((PetscObject)dm,DMDA,&same));
452       if (same) {
453         PetscCall(PetscInfo(pc,"PCTelescope: found DMDA\n"));
454         sr_type = TELESCOPE_DMDA;
455       }
456       /* check for dmplex */
457       PetscCall(PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same));
458       if (same) {
459         PetscCall(PetscInfo(pc,"PCTelescope: found DMPLEX\n"));
460         sr_type = TELESCOPE_DMPLEX;
461       }
462 
463       if (sred->use_coarse_dm) {
464         PetscCall(PetscInfo(pc,"PCTelescope: using coarse DM\n"));
465         sr_type = TELESCOPE_COARSEDM;
466       }
467 
468       if (sred->ignore_dm) {
469         PetscCall(PetscInfo(pc,"PCTelescope: ignoring DM\n"));
470         sr_type = TELESCOPE_DEFAULT;
471       }
472     }
473     sred->sr_type = sr_type;
474   } else {
475     sr_type = sred->sr_type;
476   }
477 
478   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
479   switch (sr_type) {
480   case TELESCOPE_DEFAULT:
481     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
482     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
483     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
484     sred->pctelescope_reset_type              = NULL;
485     break;
486   case TELESCOPE_DMDA:
487     pc->ops->apply                            = PCApply_Telescope_dmda;
488     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
489     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
490     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
491     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
492     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
493     break;
494   case TELESCOPE_DMPLEX:
495     SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
496   case TELESCOPE_COARSEDM:
497     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
498     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
499     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
500     sred->pctelescope_matcreate_type          = NULL;
501     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
502     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
503     break;
504   default:
505     SETERRQ(comm,PETSC_ERR_SUP,"Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
506   }
507 
508   /* subcomm definition */
509   if (!pc->setupcalled) {
510     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
511       if (!sred->psubcomm) {
512         PetscCall(PetscSubcommCreate(comm,&sred->psubcomm));
513         PetscCall(PetscSubcommSetNumber(sred->psubcomm,sred->redfactor));
514         PetscCall(PetscSubcommSetType(sred->psubcomm,sred->subcommtype));
515         PetscCall(PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm)));
516         sred->subcomm = PetscSubcommChild(sred->psubcomm);
517       }
518     } else { /* query PC for DM, check communicators */
519       DM          dm,dm_coarse_partition = NULL;
520       MPI_Comm    comm_fine,comm_coarse_partition = MPI_COMM_NULL;
521       PetscMPIInt csize_fine=0,csize_coarse_partition=0,cs[2],csg[2],cnt=0;
522       PetscBool   isvalidsubcomm;
523 
524       PetscCall(PCGetDM(pc,&dm));
525       comm_fine = PetscObjectComm((PetscObject)dm);
526       PetscCall(DMGetCoarseDM(dm,&dm_coarse_partition));
527       if (dm_coarse_partition) { cnt = 1; }
528       PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE,&cnt,1,MPI_INT,MPI_SUM,comm_fine));
529       PetscCheckFalse(cnt == 0,comm_fine,PETSC_ERR_SUP,"Zero instances of a coarse DM were found");
530 
531       PetscCallMPI(MPI_Comm_size(comm_fine,&csize_fine));
532       if (dm_coarse_partition) {
533         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
534         PetscCallMPI(MPI_Comm_size(comm_coarse_partition,&csize_coarse_partition));
535       }
536 
537       cs[0] = csize_fine;
538       cs[1] = csize_coarse_partition;
539       PetscCallMPI(MPI_Allreduce(cs,csg,2,MPI_INT,MPI_MAX,comm_fine));
540       PetscCheckFalse(csg[0] == csg[1],comm_fine,PETSC_ERR_SUP,"Coarse DM uses the same size communicator as the parent DM attached to the PC");
541 
542       PetscCall(PCTelescopeTestValidSubcomm(comm_fine,comm_coarse_partition,&isvalidsubcomm));
543       PetscCheck(isvalidsubcomm,comm_fine,PETSC_ERR_SUP,"Coarse DM communicator is not a sub-communicator of parentDM->comm");
544       sred->subcomm = comm_coarse_partition;
545     }
546   }
547   subcomm = sred->subcomm;
548 
549   /* internal KSP */
550   if (!pc->setupcalled) {
551     const char *prefix;
552 
553     if (PCTelescope_isActiveRank(sred)) {
554       PetscCall(KSPCreate(subcomm,&sred->ksp));
555       PetscCall(KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure));
556       PetscCall(PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1));
557       PetscCall(PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp));
558       PetscCall(KSPSetType(sred->ksp,KSPPREONLY));
559       PetscCall(PCGetOptionsPrefix(pc,&prefix));
560       PetscCall(KSPSetOptionsPrefix(sred->ksp,prefix));
561       PetscCall(KSPAppendOptionsPrefix(sred->ksp,"telescope_"));
562     }
563   }
564 
565   /* setup */
566   if (!pc->setupcalled && sred->pctelescope_setup_type) {
567     PetscCall(sred->pctelescope_setup_type(pc,sred));
568   }
569   /* update */
570   if (!pc->setupcalled) {
571     if (sred->pctelescope_matcreate_type) {
572       PetscCall(sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred));
573     }
574     if (sred->pctelescope_matnullspacecreate_type) {
575       PetscCall(sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred));
576     }
577   } else {
578     if (sred->pctelescope_matcreate_type) {
579       PetscCall(sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred));
580     }
581   }
582 
583   /* common - no construction */
584   if (PCTelescope_isActiveRank(sred)) {
585     PetscCall(KSPSetOperators(sred->ksp,sred->Bred,sred->Bred));
586     if (pc->setfromoptionscalled && !pc->setupcalled) {
587       PetscCall(KSPSetFromOptions(sred->ksp));
588     }
589   }
590   PetscFunctionReturn(0);
591 }
592 
593 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
594 {
595   PC_Telescope      sred = (PC_Telescope)pc->data;
596   Vec               xtmp,xred,yred;
597   PetscInt          i,st,ed;
598   VecScatter        scatter;
599   PetscScalar       *array;
600   const PetscScalar *x_array;
601 
602   PetscFunctionBegin;
603   PetscCall(PetscCitationsRegister(citation,&cited));
604 
605   xtmp    = sred->xtmp;
606   scatter = sred->scatter;
607   xred    = sred->xred;
608   yred    = sred->yred;
609 
610   /* pull in vector x->xtmp */
611   PetscCall(VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD));
612   PetscCall(VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD));
613 
614   /* copy vector entries into xred */
615   PetscCall(VecGetArrayRead(xtmp,&x_array));
616   if (xred) {
617     PetscScalar *LA_xred;
618     PetscCall(VecGetOwnershipRange(xred,&st,&ed));
619     PetscCall(VecGetArray(xred,&LA_xred));
620     for (i=0; i<ed-st; i++) {
621       LA_xred[i] = x_array[i];
622     }
623     PetscCall(VecRestoreArray(xred,&LA_xred));
624   }
625   PetscCall(VecRestoreArrayRead(xtmp,&x_array));
626   /* solve */
627   if (PCTelescope_isActiveRank(sred)) {
628     PetscCall(KSPSolve(sred->ksp,xred,yred));
629     PetscCall(KSPCheckSolve(sred->ksp,pc,yred));
630   }
631   /* return vector */
632   PetscCall(VecGetArray(xtmp,&array));
633   if (yred) {
634     const PetscScalar *LA_yred;
635     PetscCall(VecGetOwnershipRange(yred,&st,&ed));
636     PetscCall(VecGetArrayRead(yred,&LA_yred));
637     for (i=0; i<ed-st; i++) {
638       array[i] = LA_yred[i];
639     }
640     PetscCall(VecRestoreArrayRead(yred,&LA_yred));
641   }
642   PetscCall(VecRestoreArray(xtmp,&array));
643   PetscCall(VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE));
644   PetscCall(VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE));
645   PetscFunctionReturn(0);
646 }
647 
648 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
649 {
650   PC_Telescope      sred = (PC_Telescope)pc->data;
651   Vec               xtmp,yred;
652   PetscInt          i,st,ed;
653   VecScatter        scatter;
654   const PetscScalar *x_array;
655   PetscBool         default_init_guess_value;
656 
657   PetscFunctionBegin;
658   xtmp    = sred->xtmp;
659   scatter = sred->scatter;
660   yred    = sred->yred;
661 
662   PetscCheckFalse(its > 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
663   *reason = (PCRichardsonConvergedReason)0;
664 
665   if (!zeroguess) {
666     PetscCall(PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n"));
667     /* pull in vector y->xtmp */
668     PetscCall(VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD));
669     PetscCall(VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD));
670 
671     /* copy vector entries into xred */
672     PetscCall(VecGetArrayRead(xtmp,&x_array));
673     if (yred) {
674       PetscScalar *LA_yred;
675       PetscCall(VecGetOwnershipRange(yred,&st,&ed));
676       PetscCall(VecGetArray(yred,&LA_yred));
677       for (i=0; i<ed-st; i++) {
678         LA_yred[i] = x_array[i];
679       }
680       PetscCall(VecRestoreArray(yred,&LA_yred));
681     }
682     PetscCall(VecRestoreArrayRead(xtmp,&x_array));
683   }
684 
685   if (PCTelescope_isActiveRank(sred)) {
686     PetscCall(KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value));
687     if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE));
688   }
689 
690   PetscCall(PCApply_Telescope(pc,x,y));
691 
692   if (PCTelescope_isActiveRank(sred)) {
693     PetscCall(KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value));
694   }
695 
696   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
697   *outits = 1;
698   PetscFunctionReturn(0);
699 }
700 
701 static PetscErrorCode PCReset_Telescope(PC pc)
702 {
703   PC_Telescope   sred = (PC_Telescope)pc->data;
704 
705   PetscFunctionBegin;
706   PetscCall(ISDestroy(&sred->isin));
707   PetscCall(VecScatterDestroy(&sred->scatter));
708   PetscCall(VecDestroy(&sred->xred));
709   PetscCall(VecDestroy(&sred->yred));
710   PetscCall(VecDestroy(&sred->xtmp));
711   PetscCall(MatDestroy(&sred->Bred));
712   PetscCall(KSPReset(sred->ksp));
713   if (sred->pctelescope_reset_type) {
714     PetscCall(sred->pctelescope_reset_type(pc));
715   }
716   PetscFunctionReturn(0);
717 }
718 
719 static PetscErrorCode PCDestroy_Telescope(PC pc)
720 {
721   PC_Telescope   sred = (PC_Telescope)pc->data;
722 
723   PetscFunctionBegin;
724   PetscCall(PCReset_Telescope(pc));
725   PetscCall(KSPDestroy(&sred->ksp));
726   PetscCall(PetscSubcommDestroy(&sred->psubcomm));
727   PetscCall(PetscFree(sred->dm_ctx));
728   PetscCall(PetscFree(pc->data));
729   PetscFunctionReturn(0);
730 }
731 
732 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
733 {
734   PC_Telescope     sred = (PC_Telescope)pc->data;
735   MPI_Comm         comm;
736   PetscMPIInt      size;
737   PetscBool        flg;
738   PetscSubcommType subcommtype;
739 
740   PetscFunctionBegin;
741   PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
742   PetscCallMPI(MPI_Comm_size(comm,&size));
743   PetscCall(PetscOptionsHead(PetscOptionsObject,"Telescope options"));
744   PetscCall(PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg));
745   if (flg) {
746     PetscCall(PCTelescopeSetSubcommType(pc,subcommtype));
747   }
748   PetscCall(PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,NULL));
749   PetscCheckFalse(sred->redfactor > size,comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
750   PetscCall(PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,NULL));
751   PetscCall(PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,NULL));
752   PetscCall(PetscOptionsBool("-pc_telescope_use_coarse_dm","Define sub-communicator from the coarse DM","PCTelescopeSetUseCoarseDM",sred->use_coarse_dm,&sred->use_coarse_dm,NULL));
753   PetscCall(PetscOptionsTail());
754   PetscFunctionReturn(0);
755 }
756 
757 /* PC simplementation specific API's */
758 
759 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
760 {
761   PC_Telescope red = (PC_Telescope)pc->data;
762   PetscFunctionBegin;
763   if (ksp) *ksp = red->ksp;
764   PetscFunctionReturn(0);
765 }
766 
767 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
768 {
769   PC_Telescope red = (PC_Telescope)pc->data;
770   PetscFunctionBegin;
771   if (subcommtype) *subcommtype = red->subcommtype;
772   PetscFunctionReturn(0);
773 }
774 
775 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
776 {
777   PC_Telescope     red = (PC_Telescope)pc->data;
778 
779   PetscFunctionBegin;
780   PetscCheck(!pc->setupcalled,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
781   red->subcommtype = subcommtype;
782   PetscFunctionReturn(0);
783 }
784 
785 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
786 {
787   PC_Telescope red = (PC_Telescope)pc->data;
788   PetscFunctionBegin;
789   if (fact) *fact = red->redfactor;
790   PetscFunctionReturn(0);
791 }
792 
793 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
794 {
795   PC_Telescope     red = (PC_Telescope)pc->data;
796   PetscMPIInt      size;
797 
798   PetscFunctionBegin;
799   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
800   PetscCheckFalse(fact <= 0,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
801   PetscCheckFalse(fact > size,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
802   red->redfactor = fact;
803   PetscFunctionReturn(0);
804 }
805 
806 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
807 {
808   PC_Telescope red = (PC_Telescope)pc->data;
809   PetscFunctionBegin;
810   if (v) *v = red->ignore_dm;
811   PetscFunctionReturn(0);
812 }
813 
814 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
815 {
816   PC_Telescope red = (PC_Telescope)pc->data;
817   PetscFunctionBegin;
818   red->ignore_dm = v;
819   PetscFunctionReturn(0);
820 }
821 
822 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool *v)
823 {
824   PC_Telescope red = (PC_Telescope)pc->data;
825   PetscFunctionBegin;
826   if (v) *v = red->use_coarse_dm;
827   PetscFunctionReturn(0);
828 }
829 
830 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)
831 {
832   PC_Telescope red = (PC_Telescope)pc->data;
833   PetscFunctionBegin;
834   red->use_coarse_dm = v;
835   PetscFunctionReturn(0);
836 }
837 
838 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
839 {
840   PC_Telescope red = (PC_Telescope)pc->data;
841   PetscFunctionBegin;
842   if (v) *v = red->ignore_kspcomputeoperators;
843   PetscFunctionReturn(0);
844 }
845 
846 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
847 {
848   PC_Telescope red = (PC_Telescope)pc->data;
849   PetscFunctionBegin;
850   red->ignore_kspcomputeoperators = v;
851   PetscFunctionReturn(0);
852 }
853 
854 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
855 {
856   PC_Telescope red = (PC_Telescope)pc->data;
857   PetscFunctionBegin;
858   *dm = private_PCTelescopeGetSubDM(red);
859   PetscFunctionReturn(0);
860 }
861 
862 /*@
863  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
864 
865  Not Collective
866 
867  Input Parameter:
868 .  pc - the preconditioner context
869 
870  Output Parameter:
871 .  subksp - the KSP defined the smaller set of processes
872 
873  Level: advanced
874 
875 @*/
876 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
877 {
878   PetscFunctionBegin;
879   PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));
880   PetscFunctionReturn(0);
881 }
882 
883 /*@
884  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
885 
886  Not Collective
887 
888  Input Parameter:
889 .  pc - the preconditioner context
890 
891  Output Parameter:
892 .  fact - the reduction factor
893 
894  Level: advanced
895 
896 @*/
897 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
898 {
899   PetscFunctionBegin;
900   PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));
901   PetscFunctionReturn(0);
902 }
903 
904 /*@
905  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
906 
907  Not Collective
908 
909  Input Parameter:
910 .  pc - the preconditioner context
911 
912  Output Parameter:
913 .  fact - the reduction factor
914 
915  Level: advanced
916 
917 @*/
918 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
919 {
920   PetscFunctionBegin;
921   PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));
922   PetscFunctionReturn(0);
923 }
924 
925 /*@
926  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
927 
928  Not Collective
929 
930  Input Parameter:
931 .  pc - the preconditioner context
932 
933  Output Parameter:
934 .  v - the flag
935 
936  Level: advanced
937 
938 @*/
939 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
940 {
941   PetscFunctionBegin;
942   PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));
943   PetscFunctionReturn(0);
944 }
945 
946 /*@
947  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
948 
949  Not Collective
950 
951  Input Parameter:
952 .  pc - the preconditioner context
953 
954  Output Parameter:
955 .  v - Use PETSC_TRUE to ignore any DM
956 
957  Level: advanced
958 
959 @*/
960 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
961 {
962   PetscFunctionBegin;
963   PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));
964   PetscFunctionReturn(0);
965 }
966 
967 /*@
968  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used.
969 
970  Not Collective
971 
972  Input Parameter:
973 .  pc - the preconditioner context
974 
975  Output Parameter:
976 .  v - the flag
977 
978  Level: advanced
979 
980 @*/
981 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc,PetscBool *v)
982 {
983   PetscFunctionBegin;
984   PetscUseMethod(pc,"PCTelescopeGetUseCoarseDM_C",(PC,PetscBool*),(pc,v));
985   PetscFunctionReturn(0);
986 }
987 
988 /*@
989  PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM
990 
991  Not Collective
992 
993  Input Parameter:
994 .  pc - the preconditioner context
995 
996  Output Parameter:
997 .  v - Use PETSC_FALSE to ignore any coarse DM
998 
999  Notes:
1000  When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope
1001  will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and
1002  -pc_telescope_subcomm_type will no longer have any meaning.
1003  It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes.
1004  An error will occur of the size of the communicator associated with the coarse DM
1005  is the same as that of the parent DM.
1006  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
1007  This will be checked at the time the preconditioner is setup and an error will occur if
1008  the coarse DM does not define a sub-communicator of that used by the parent DM.
1009 
1010  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
1011  the DM used (e.g. it supports DMSHELL, DMPLEX, etc).
1012 
1013  Support is currently only provided for the case when you are using KSPSetComputeOperators()
1014 
1015  The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs.
1016  In the user code, this is achieved via
1017 .vb
1018    {
1019      DM dm_fine;
1020      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1021    }
1022 .ve
1023  The signature of the user provided field scatter method is
1024 .vb
1025    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1026 .ve
1027  The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE.
1028  SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM.
1029 
1030  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
1031  of state variables between the fine and coarse DMs.
1032  In the context of a finite element discretization, an example state variable might be
1033  values associated with quadrature points within each element.
1034  A user provided state scatter method is composed via
1035 .vb
1036    {
1037      DM dm_fine;
1038      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1039    }
1040 .ve
1041  The signature of the user provided state scatter method is
1042 .vb
1043    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1044 .ve
1045  SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM.
1046  The user is only required to support mode = SCATTER_FORWARD.
1047  No assumption is made about the data type of the state variables.
1048  These must be managed by the user and must be accessible from the DM.
1049 
1050  Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be
1051  associated with the sub-KSP residing within PCTelescope.
1052  In general, PCTelescope assumes that the context on the fine and coarse DM used with
1053  KSPSetComputeOperators() should be "similar" in type or origin.
1054  Specifically the following rules are used to infer what context on the sub-KSP should be.
1055 
1056  First the contexts from the KSP and the fine and coarse DMs are retrieved.
1057  Note that the special case of a DMSHELL context is queried.
1058 
1059 .vb
1060    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1061    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1062    DMShellGetContext(dm_fine,&dmfine_shellctx);
1063 
1064    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1065    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1066 .ve
1067 
1068  The following rules are then enforced:
1069 
1070  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1071  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL);
1072 
1073  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1074  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1075  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1076 
1077  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1078  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1079  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1080 
1081  If neither of the above three tests passed, then PCTelescope cannot safely determine what
1082  context should be provided to KSPSetComputeOperators() for use with the sub-KSP.
1083  In this case, an additional mechanism is provided via a composed function which will return
1084  the actual context to be used. To use this feature you must compose the "getter" function
1085  with the coarse DM, e.g.
1086 .vb
1087    {
1088      DM dm_coarse;
1089      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1090    }
1091 .ve
1092  The signature of the user provided method is
1093 .vb
1094    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1095 .ve
1096 
1097  Level: advanced
1098 
1099 @*/
1100 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)
1101 {
1102   PetscFunctionBegin;
1103   PetscTryMethod(pc,"PCTelescopeSetUseCoarseDM_C",(PC,PetscBool),(pc,v));
1104   PetscFunctionReturn(0);
1105 }
1106 
1107 /*@
1108  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
1109 
1110  Not Collective
1111 
1112  Input Parameter:
1113 .  pc - the preconditioner context
1114 
1115  Output Parameter:
1116 .  v - the flag
1117 
1118  Level: advanced
1119 
1120 @*/
1121 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
1122 {
1123   PetscFunctionBegin;
1124   PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));
1125   PetscFunctionReturn(0);
1126 }
1127 
1128 /*@
1129  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
1130 
1131  Not Collective
1132 
1133  Input Parameter:
1134 .  pc - the preconditioner context
1135 
1136  Output Parameter:
1137 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
1138 
1139  Level: advanced
1140 
1141 @*/
1142 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
1143 {
1144   PetscFunctionBegin;
1145   PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));
1146   PetscFunctionReturn(0);
1147 }
1148 
1149 /*@
1150  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
1151 
1152  Not Collective
1153 
1154  Input Parameter:
1155 .  pc - the preconditioner context
1156 
1157  Output Parameter:
1158 .  subdm - The re-partitioned DM
1159 
1160  Level: advanced
1161 
1162 @*/
1163 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
1164 {
1165   PetscFunctionBegin;
1166   PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));
1167   PetscFunctionReturn(0);
1168 }
1169 
1170 /*@
1171  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1172 
1173  Logically Collective
1174 
1175  Input Parameters:
1176 +  pc - the preconditioner context
1177 -  subcommtype - the subcommunicator type (see PetscSubcommType)
1178 
1179  Level: advanced
1180 
1181 .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE
1182 @*/
1183 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1184 {
1185   PetscFunctionBegin;
1186   PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));
1187   PetscFunctionReturn(0);
1188 }
1189 
1190 /*@
1191  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1192 
1193  Not Collective
1194 
1195  Input Parameter:
1196 .  pc - the preconditioner context
1197 
1198  Output Parameter:
1199 .  subcommtype - the subcommunicator type (see PetscSubcommType)
1200 
1201  Level: advanced
1202 
1203 .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE
1204 @*/
1205 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1206 {
1207   PetscFunctionBegin;
1208   PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));
1209   PetscFunctionReturn(0);
1210 }
1211 
1212 /* -------------------------------------------------------------------------------------*/
1213 /*MC
1214    PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1215 
1216    Options Database:
1217 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1218 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1219 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1220 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP.
1221 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator.
1222 
1223    Level: advanced
1224 
1225    Notes:
1226    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1227    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC).
1228    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1229    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1230    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1231    Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM.
1232 
1233    The default type of the sub KSP (the KSP defined on c') is PREONLY.
1234 
1235    There are three setup mechanisms for PCTelescope. Features support by each type are described below.
1236    In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the
1237    communicators c and c' respectively.
1238 
1239    [1] Default setup
1240    The sub-communicator c' is created via PetscSubcommCreate().
1241    Explicitly defined nullspace and near nullspace vectors will be propagated from B to B'.
1242    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1243    No support is provided for KSPSetComputeOperators().
1244    Currently there is no support for the flag -pc_use_amat.
1245 
1246    [2] DM aware setup
1247    If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'.
1248    c' is created via PetscSubcommCreate().
1249    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1250    Currently only support for re-partitioning a DMDA is provided.
1251    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1252    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1253    Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP.
1254    This is fragile since the user must ensure that their user context is valid for use on c'.
1255    Currently there is no support for the flag -pc_use_amat.
1256 
1257    [3] Coarse DM setup
1258    If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM().
1259    PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c.
1260    The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE.
1261    PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1262    The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be
1263    available with using multi-grid on unstructured meshes.
1264    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1265    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1266    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1267    There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported.
1268    The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1269    Propagation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators().
1270    Currently there is no support for the flag -pc_use_amat.
1271    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
1272    Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM().
1273 
1274    Developer Notes:
1275    During PCSetup, the B operator is scattered onto c'.
1276    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1277    Then, KSPSolve() is executed on the c' communicator.
1278 
1279    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
1280    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
1281 
1282    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1283    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1284    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1285 
1286    The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D -
1287    support for 1D DMDAs is not provided). If a DMDA is found, a topologically equivalent DMDA is created on c'
1288    and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support
1289    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1290    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1291 
1292    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1293 
1294    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1295    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
1296    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
1297 
1298    Limitations/improvements include the following.
1299    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1300    A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction().
1301 
1302    The symmetric permutation used when a DMDA is encountered is performed via explicitly assembling a permutation matrix P,
1303    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1304    VecPermute() does not support the use case required here. By computing P, one can permute both the operator and RHS in a
1305    consistent manner.
1306 
1307    Mapping of vectors (default setup mode) is performed in the following way.
1308    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1309    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1310    We perform the scatter to the sub-communicator in the following way.
1311    [1] Given a vector x defined on communicator c
1312 
1313 .vb
1314    rank(c)  local values of x
1315    ------- ----------------------------------------
1316         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1317         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1318         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1319         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1320 .ve
1321 
1322    scatter into xtmp defined also on comm c, so that we have the following values
1323 
1324 .vb
1325    rank(c)  local values of xtmp
1326    ------- ----------------------------------------------------------------------------
1327         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1328         1   [ ]
1329         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1330         3   [ ]
1331 .ve
1332 
1333    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1334 
1335    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1336    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1337 
1338 .vb
1339    rank(c')  local values of xred
1340    -------- ----------------------------------------------------------------------------
1341          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1342          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1343 .ve
1344 
1345   Contributed by Dave May
1346 
1347   Reference:
1348   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1349 
1350 .seealso:  PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
1351 M*/
1352 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1353 {
1354   struct _PC_Telescope *sred;
1355 
1356   PetscFunctionBegin;
1357   PetscCall(PetscNewLog(pc,&sred));
1358   sred->psubcomm       = NULL;
1359   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1360   sred->subcomm        = MPI_COMM_NULL;
1361   sred->redfactor      = 1;
1362   sred->ignore_dm      = PETSC_FALSE;
1363   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1364   sred->use_coarse_dm  = PETSC_FALSE;
1365   pc->data             = (void*)sred;
1366 
1367   pc->ops->apply           = PCApply_Telescope;
1368   pc->ops->applytranspose  = NULL;
1369   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1370   pc->ops->setup           = PCSetUp_Telescope;
1371   pc->ops->destroy         = PCDestroy_Telescope;
1372   pc->ops->reset           = PCReset_Telescope;
1373   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1374   pc->ops->view            = PCView_Telescope;
1375 
1376   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1377   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1378   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1379   sred->pctelescope_reset_type              = NULL;
1380 
1381   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope));
1382   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope));
1383   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope));
1384   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope));
1385   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope));
1386   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope));
1387   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope));
1388   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope));
1389   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope));
1390   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope));
1391   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",PCTelescopeGetUseCoarseDM_Telescope));
1392   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",PCTelescopeSetUseCoarseDM_Telescope));
1393   PetscFunctionReturn(0);
1394 }
1395