xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 #include <petsc/private/petscimpl.h>
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h> /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7 
8 static PetscBool  cited = PETSC_FALSE;
9 static const char citation[] =
10 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
11 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
12 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
13 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
14 "  series    = {PASC '16},\n"
15 "  isbn      = {978-1-4503-4126-4},\n"
16 "  location  = {Lausanne, Switzerland},\n"
17 "  pages     = {5:1--5:12},\n"
18 "  articleno = {5},\n"
19 "  numpages  = {12},\n"
20 "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
21 "  doi       = {10.1145/2929908.2929913},\n"
22 "  acmid     = {2929913},\n"
23 "  publisher = {ACM},\n"
24 "  address   = {New York, NY, USA},\n"
25 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
26 "  year      = {2016}\n"
27 "}\n";
28 
29 /*
30  default setup mode
31 
32  [1a] scatter to (FORWARD)
33  x(comm) -> xtmp(comm)
34  [1b] local copy (to) ranks with color = 0
35  xred(subcomm) <- xtmp
36 
37  [2] solve on sub KSP to obtain yred(subcomm)
38 
39  [3a] local copy (from) ranks with color = 0
40  yred(subcomm) --> xtmp
41  [2b] scatter from (REVERSE)
42  xtmp(comm) -> y(comm)
43 */
44 
45 /*
46   Collective[comm_f]
47   Notes
48    * Using comm_f = MPI_COMM_NULL will result in an error
49    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
50    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
51 */
52 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool *isvalid)
53 {
54   PetscInt       valid = 1;
55   MPI_Group      group_f,group_c;
56   PetscMPIInt    count,k,size_f = 0,size_c = 0,size_c_sum = 0;
57   PetscMPIInt    *ranks_f,*ranks_c;
58 
59   PetscFunctionBegin;
60   PetscCheck(comm_f != MPI_COMM_NULL,PETSC_COMM_SELF,PETSC_ERR_SUP,"comm_f cannot be MPI_COMM_NULL");
61 
62   PetscCallMPI(MPI_Comm_group(comm_f,&group_f));
63   if (comm_c != MPI_COMM_NULL) {
64     PetscCallMPI(MPI_Comm_group(comm_c,&group_c));
65   }
66 
67   PetscCallMPI(MPI_Comm_size(comm_f,&size_f));
68   if (comm_c != MPI_COMM_NULL) {
69     PetscCallMPI(MPI_Comm_size(comm_c,&size_c));
70   }
71 
72   /* check not all comm_c's are NULL */
73   size_c_sum = size_c;
74   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE,&size_c_sum,1,MPI_INT,MPI_SUM,comm_f));
75   if (size_c_sum == 0) valid = 0;
76 
77   /* check we can map at least 1 rank in comm_c to comm_f */
78   PetscCall(PetscMalloc1(size_f,&ranks_f));
79   PetscCall(PetscMalloc1(size_c,&ranks_c));
80   for (k=0; k<size_f; k++) ranks_f[k] = MPI_UNDEFINED;
81   for (k=0; k<size_c; k++) ranks_c[k] = k;
82 
83   /*
84    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
85    I do not want the code to terminate immediately if this occurs, rather I want to throw
86    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
87    that comm_c is not a valid sub-communicator.
88    Hence I purposefully do not call PetscCall() after MPI_Group_translate_ranks().
89   */
90   count = 0;
91   if (comm_c != MPI_COMM_NULL) {
92     (void)MPI_Group_translate_ranks(group_c,size_c,ranks_c,group_f,ranks_f);
93     for (k=0; k<size_f; k++) {
94       if (ranks_f[k] == MPI_UNDEFINED) {
95         count++;
96       }
97     }
98   }
99   if (count == size_f) valid = 0;
100 
101   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE,&valid,1,MPIU_INT,MPI_MIN,comm_f));
102   if (valid == 1) *isvalid = PETSC_TRUE;
103   else *isvalid = PETSC_FALSE;
104 
105   PetscCall(PetscFree(ranks_f));
106   PetscCall(PetscFree(ranks_c));
107   PetscCallMPI(MPI_Group_free(&group_f));
108   if (comm_c != MPI_COMM_NULL) {
109     PetscCallMPI(MPI_Group_free(&group_c));
110   }
111   PetscFunctionReturn(0);
112 }
113 
114 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
115 {
116   DM subdm = NULL;
117 
118   if (!PCTelescope_isActiveRank(sred)) { subdm = NULL; }
119   else {
120     switch (sred->sr_type) {
121     case TELESCOPE_DEFAULT: subdm = NULL;
122       break;
123     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
124       break;
125     case TELESCOPE_DMPLEX:  subdm = NULL;
126       break;
127     case TELESCOPE_COARSEDM: if (sred->ksp) { KSPGetDM(sred->ksp,&subdm); }
128       break;
129     }
130   }
131   return(subdm);
132 }
133 
134 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
135 {
136   PetscInt       m,M,bs,st,ed;
137   Vec            x,xred,yred,xtmp;
138   Mat            B;
139   MPI_Comm       comm,subcomm;
140   VecScatter     scatter;
141   IS             isin;
142   VecType        vectype;
143 
144   PetscFunctionBegin;
145   PetscCall(PetscInfo(pc,"PCTelescope: setup (default)\n"));
146   comm = PetscSubcommParent(sred->psubcomm);
147   subcomm = PetscSubcommChild(sred->psubcomm);
148 
149   PetscCall(PCGetOperators(pc,NULL,&B));
150   PetscCall(MatGetSize(B,&M,NULL));
151   PetscCall(MatGetBlockSize(B,&bs));
152   PetscCall(MatCreateVecs(B,&x,NULL));
153   PetscCall(MatGetVecType(B,&vectype));
154 
155   xred = NULL;
156   m    = 0;
157   if (PCTelescope_isActiveRank(sred)) {
158     PetscCall(VecCreate(subcomm,&xred));
159     PetscCall(VecSetSizes(xred,PETSC_DECIDE,M));
160     PetscCall(VecSetBlockSize(xred,bs));
161     PetscCall(VecSetType(xred,vectype)); /* Use the preconditioner matrix's vectype by default */
162     PetscCall(VecSetFromOptions(xred));
163     PetscCall(VecGetLocalSize(xred,&m));
164   }
165 
166   yred = NULL;
167   if (PCTelescope_isActiveRank(sred)) {
168     PetscCall(VecDuplicate(xred,&yred));
169   }
170 
171   PetscCall(VecCreate(comm,&xtmp));
172   PetscCall(VecSetSizes(xtmp,m,PETSC_DECIDE));
173   PetscCall(VecSetBlockSize(xtmp,bs));
174   PetscCall(VecSetType(xtmp,vectype));
175 
176   if (PCTelescope_isActiveRank(sred)) {
177     PetscCall(VecGetOwnershipRange(xred,&st,&ed));
178     PetscCall(ISCreateStride(comm,(ed-st),st,1,&isin));
179   } else {
180     PetscCall(VecGetOwnershipRange(x,&st,&ed));
181     PetscCall(ISCreateStride(comm,0,st,1,&isin));
182   }
183   PetscCall(ISSetBlockSize(isin,bs));
184 
185   PetscCall(VecScatterCreate(x,isin,xtmp,NULL,&scatter));
186 
187   sred->isin    = isin;
188   sred->scatter = scatter;
189   sred->xred    = xred;
190   sred->yred    = yred;
191   sred->xtmp    = xtmp;
192   PetscCall(VecDestroy(&x));
193   PetscFunctionReturn(0);
194 }
195 
196 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
197 {
198   MPI_Comm       comm,subcomm;
199   Mat            Bred,B;
200   PetscInt       nr,nc,bs;
201   IS             isrow,iscol;
202   Mat            Blocal,*_Blocal;
203 
204   PetscFunctionBegin;
205   PetscCall(PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n"));
206   PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
207   subcomm = PetscSubcommChild(sred->psubcomm);
208   PetscCall(PCGetOperators(pc,NULL,&B));
209   PetscCall(MatGetSize(B,&nr,&nc));
210   isrow = sred->isin;
211   PetscCall(ISCreateStride(PETSC_COMM_SELF,nc,0,1,&iscol));
212   PetscCall(ISSetIdentity(iscol));
213   PetscCall(MatGetBlockSizes(B,NULL,&bs));
214   PetscCall(ISSetBlockSize(iscol,bs));
215   PetscCall(MatSetOption(B,MAT_SUBMAT_SINGLEIS,PETSC_TRUE));
216   PetscCall(MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal));
217   Blocal = *_Blocal;
218   PetscCall(PetscFree(_Blocal));
219   Bred = NULL;
220   if (PCTelescope_isActiveRank(sred)) {
221     PetscInt mm;
222 
223     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
224 
225     PetscCall(MatGetSize(Blocal,&mm,NULL));
226     PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred));
227   }
228   *A = Bred;
229   PetscCall(ISDestroy(&iscol));
230   PetscCall(MatDestroy(&Blocal));
231   PetscFunctionReturn(0);
232 }
233 
234 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
235 {
236   PetscBool      has_const;
237   const Vec      *vecs;
238   Vec            *sub_vecs = NULL;
239   PetscInt       i,k,n = 0;
240   MPI_Comm       subcomm;
241 
242   PetscFunctionBegin;
243   subcomm = PetscSubcommChild(sred->psubcomm);
244   PetscCall(MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs));
245 
246   if (PCTelescope_isActiveRank(sred)) {
247     if (n) {
248       PetscCall(VecDuplicateVecs(sred->xred,n,&sub_vecs));
249     }
250   }
251 
252   /* copy entries */
253   for (k=0; k<n; k++) {
254     const PetscScalar *x_array;
255     PetscScalar       *LA_sub_vec;
256     PetscInt          st,ed;
257 
258     /* pull in vector x->xtmp */
259     PetscCall(VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD));
260     PetscCall(VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD));
261     if (sub_vecs) {
262       /* copy vector entries into xred */
263       PetscCall(VecGetArrayRead(sred->xtmp,&x_array));
264       if (sub_vecs[k]) {
265         PetscCall(VecGetOwnershipRange(sub_vecs[k],&st,&ed));
266         PetscCall(VecGetArray(sub_vecs[k],&LA_sub_vec));
267         for (i=0; i<ed-st; i++) {
268           LA_sub_vec[i] = x_array[i];
269         }
270         PetscCall(VecRestoreArray(sub_vecs[k],&LA_sub_vec));
271       }
272       PetscCall(VecRestoreArrayRead(sred->xtmp,&x_array));
273     }
274   }
275 
276   if (PCTelescope_isActiveRank(sred)) {
277     /* create new (near) nullspace for redundant object */
278     PetscCall(MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace));
279     PetscCall(VecDestroyVecs(n,&sub_vecs));
280     PetscCheck(!nullspace->remove,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
281     PetscCheck(!nullspace->rmctx,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
282   }
283   PetscFunctionReturn(0);
284 }
285 
286 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
287 {
288   Mat            B;
289 
290   PetscFunctionBegin;
291   PetscCall(PCGetOperators(pc,NULL,&B));
292   /* Propagate the nullspace if it exists */
293   {
294     MatNullSpace nullspace,sub_nullspace;
295     PetscCall(MatGetNullSpace(B,&nullspace));
296     if (nullspace) {
297       PetscCall(PetscInfo(pc,"PCTelescope: generating nullspace (default)\n"));
298       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace));
299       if (PCTelescope_isActiveRank(sred)) {
300         PetscCall(MatSetNullSpace(sub_mat,sub_nullspace));
301         PetscCall(MatNullSpaceDestroy(&sub_nullspace));
302       }
303     }
304   }
305   /* Propagate the near nullspace if it exists */
306   {
307     MatNullSpace nearnullspace,sub_nearnullspace;
308     PetscCall(MatGetNearNullSpace(B,&nearnullspace));
309     if (nearnullspace) {
310       PetscCall(PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n"));
311       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace));
312       if (PCTelescope_isActiveRank(sred)) {
313         PetscCall(MatSetNearNullSpace(sub_mat,sub_nearnullspace));
314         PetscCall(MatNullSpaceDestroy(&sub_nearnullspace));
315       }
316     }
317   }
318   PetscFunctionReturn(0);
319 }
320 
321 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
322 {
323   PC_Telescope   sred = (PC_Telescope)pc->data;
324   PetscBool      iascii,isstring;
325   PetscViewer    subviewer;
326 
327   PetscFunctionBegin;
328   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
329   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring));
330   if (iascii) {
331     {
332       MPI_Comm    comm,subcomm;
333       PetscMPIInt comm_size,subcomm_size;
334       DM          dm = NULL,subdm = NULL;
335 
336       PetscCall(PCGetDM(pc,&dm));
337       subdm = private_PCTelescopeGetSubDM(sred);
338 
339       if (sred->psubcomm) {
340         comm = PetscSubcommParent(sred->psubcomm);
341         subcomm = PetscSubcommChild(sred->psubcomm);
342         PetscCallMPI(MPI_Comm_size(comm,&comm_size));
343         PetscCallMPI(MPI_Comm_size(subcomm,&subcomm_size));
344 
345         PetscCall(PetscViewerASCIIPushTab(viewer));
346         PetscCall(PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent comm size reduction factor = %" PetscInt_FMT "\n",sred->redfactor));
347         PetscCall(PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size));
348         switch (sred->subcommtype) {
349         case PETSC_SUBCOMM_INTERLACED :
350           PetscCall(PetscViewerASCIIPrintf(viewer,"petsc subcomm: type = %s\n",PetscSubcommTypes[sred->subcommtype]));
351           break;
352         case PETSC_SUBCOMM_CONTIGUOUS :
353           PetscCall(PetscViewerASCIIPrintf(viewer,"petsc subcomm type = %s\n",PetscSubcommTypes[sred->subcommtype]));
354           break;
355         default :
356           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
357         }
358         PetscCall(PetscViewerASCIIPopTab(viewer));
359       } else {
360         PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
361         subcomm = sred->subcomm;
362         if (!PCTelescope_isActiveRank(sred)) {
363           subcomm = PETSC_COMM_SELF;
364         }
365 
366         PetscCall(PetscViewerASCIIPushTab(viewer));
367         PetscCall(PetscViewerASCIIPrintf(viewer,"subcomm: using user provided sub-communicator\n"));
368         PetscCall(PetscViewerASCIIPopTab(viewer));
369       }
370 
371       PetscCall(PetscViewerGetSubViewer(viewer,subcomm,&subviewer));
372       if (PCTelescope_isActiveRank(sred)) {
373         PetscCall(PetscViewerASCIIPushTab(subviewer));
374 
375         if (dm && sred->ignore_dm) {
376           PetscCall(PetscViewerASCIIPrintf(subviewer,"ignoring DM\n"));
377         }
378         if (sred->ignore_kspcomputeoperators) {
379           PetscCall(PetscViewerASCIIPrintf(subviewer,"ignoring KSPComputeOperators\n"));
380         }
381         switch (sred->sr_type) {
382         case TELESCOPE_DEFAULT:
383           PetscCall(PetscViewerASCIIPrintf(subviewer,"setup type: default\n"));
384           break;
385         case TELESCOPE_DMDA:
386           PetscCall(PetscViewerASCIIPrintf(subviewer,"setup type: DMDA auto-repartitioning\n"));
387           PetscCall(DMView_DA_Short(subdm,subviewer));
388           break;
389         case TELESCOPE_DMPLEX:
390           PetscCall(PetscViewerASCIIPrintf(subviewer,"setup type: DMPLEX auto-repartitioning\n"));
391           break;
392         case TELESCOPE_COARSEDM:
393           PetscCall(PetscViewerASCIIPrintf(subviewer,"setup type: coarse DM\n"));
394           break;
395         }
396 
397         if (dm) {
398           PetscObject obj = (PetscObject)dm;
399           PetscCall(PetscViewerASCIIPrintf(subviewer,"Parent DM object:"));
400           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
401           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
402           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
403           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
404           PetscCall(PetscViewerASCIIPrintf(subviewer,"\n"));
405           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
406         } else {
407           PetscCall(PetscViewerASCIIPrintf(subviewer,"Parent DM object: NULL\n"));
408         }
409         if (subdm) {
410           PetscObject obj = (PetscObject)subdm;
411           PetscCall(PetscViewerASCIIPrintf(subviewer,"Sub DM object:"));
412           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
413           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
414           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
415           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
416           PetscCall(PetscViewerASCIIPrintf(subviewer,"\n"));
417           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
418         } else {
419           PetscCall(PetscViewerASCIIPrintf(subviewer,"Sub DM object: NULL\n"));
420         }
421 
422         PetscCall(KSPView(sred->ksp,subviewer));
423         PetscCall(PetscViewerASCIIPopTab(subviewer));
424       }
425       PetscCall(PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer));
426     }
427   }
428   PetscFunctionReturn(0);
429 }
430 
431 static PetscErrorCode PCSetUp_Telescope(PC pc)
432 {
433   PC_Telescope    sred = (PC_Telescope)pc->data;
434   MPI_Comm        comm,subcomm=0;
435   PCTelescopeType sr_type;
436 
437   PetscFunctionBegin;
438   PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
439 
440   /* Determine type of setup/update */
441   if (!pc->setupcalled) {
442     PetscBool has_dm,same;
443     DM        dm;
444 
445     sr_type = TELESCOPE_DEFAULT;
446     has_dm = PETSC_FALSE;
447     PetscCall(PCGetDM(pc,&dm));
448     if (dm) { has_dm = PETSC_TRUE; }
449     if (has_dm) {
450       /* check for dmda */
451       PetscCall(PetscObjectTypeCompare((PetscObject)dm,DMDA,&same));
452       if (same) {
453         PetscCall(PetscInfo(pc,"PCTelescope: found DMDA\n"));
454         sr_type = TELESCOPE_DMDA;
455       }
456       /* check for dmplex */
457       PetscCall(PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same));
458       if (same) {
459         PetscCall(PetscInfo(pc,"PCTelescope: found DMPLEX\n"));
460         sr_type = TELESCOPE_DMPLEX;
461       }
462 
463       if (sred->use_coarse_dm) {
464         PetscCall(PetscInfo(pc,"PCTelescope: using coarse DM\n"));
465         sr_type = TELESCOPE_COARSEDM;
466       }
467 
468       if (sred->ignore_dm) {
469         PetscCall(PetscInfo(pc,"PCTelescope: ignoring DM\n"));
470         sr_type = TELESCOPE_DEFAULT;
471       }
472     }
473     sred->sr_type = sr_type;
474   } else {
475     sr_type = sred->sr_type;
476   }
477 
478   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
479   switch (sr_type) {
480   case TELESCOPE_DEFAULT:
481     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
482     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
483     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
484     sred->pctelescope_reset_type              = NULL;
485     break;
486   case TELESCOPE_DMDA:
487     pc->ops->apply                            = PCApply_Telescope_dmda;
488     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
489     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
490     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
491     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
492     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
493     break;
494   case TELESCOPE_DMPLEX:
495     SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
496   case TELESCOPE_COARSEDM:
497     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
498     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
499     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
500     sred->pctelescope_matcreate_type          = NULL;
501     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
502     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
503     break;
504   default:
505     SETERRQ(comm,PETSC_ERR_SUP,"Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
506   }
507 
508   /* subcomm definition */
509   if (!pc->setupcalled) {
510     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
511       if (!sred->psubcomm) {
512         PetscCall(PetscSubcommCreate(comm,&sred->psubcomm));
513         PetscCall(PetscSubcommSetNumber(sred->psubcomm,sred->redfactor));
514         PetscCall(PetscSubcommSetType(sred->psubcomm,sred->subcommtype));
515         PetscCall(PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm)));
516         sred->subcomm = PetscSubcommChild(sred->psubcomm);
517       }
518     } else { /* query PC for DM, check communicators */
519       DM          dm,dm_coarse_partition = NULL;
520       MPI_Comm    comm_fine,comm_coarse_partition = MPI_COMM_NULL;
521       PetscMPIInt csize_fine=0,csize_coarse_partition=0,cs[2],csg[2],cnt=0;
522       PetscBool   isvalidsubcomm;
523 
524       PetscCall(PCGetDM(pc,&dm));
525       comm_fine = PetscObjectComm((PetscObject)dm);
526       PetscCall(DMGetCoarseDM(dm,&dm_coarse_partition));
527       if (dm_coarse_partition) { cnt = 1; }
528       PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE,&cnt,1,MPI_INT,MPI_SUM,comm_fine));
529       PetscCheck(cnt != 0,comm_fine,PETSC_ERR_SUP,"Zero instances of a coarse DM were found");
530 
531       PetscCallMPI(MPI_Comm_size(comm_fine,&csize_fine));
532       if (dm_coarse_partition) {
533         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
534         PetscCallMPI(MPI_Comm_size(comm_coarse_partition,&csize_coarse_partition));
535       }
536 
537       cs[0] = csize_fine;
538       cs[1] = csize_coarse_partition;
539       PetscCallMPI(MPI_Allreduce(cs,csg,2,MPI_INT,MPI_MAX,comm_fine));
540       PetscCheck(csg[0] != csg[1],comm_fine,PETSC_ERR_SUP,"Coarse DM uses the same size communicator as the parent DM attached to the PC");
541 
542       PetscCall(PCTelescopeTestValidSubcomm(comm_fine,comm_coarse_partition,&isvalidsubcomm));
543       PetscCheck(isvalidsubcomm,comm_fine,PETSC_ERR_SUP,"Coarse DM communicator is not a sub-communicator of parentDM->comm");
544       sred->subcomm = comm_coarse_partition;
545     }
546   }
547   subcomm = sred->subcomm;
548 
549   /* internal KSP */
550   if (!pc->setupcalled) {
551     const char *prefix;
552 
553     if (PCTelescope_isActiveRank(sred)) {
554       PetscCall(KSPCreate(subcomm,&sred->ksp));
555       PetscCall(KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure));
556       PetscCall(PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1));
557       PetscCall(PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp));
558       PetscCall(KSPSetType(sred->ksp,KSPPREONLY));
559       PetscCall(PCGetOptionsPrefix(pc,&prefix));
560       PetscCall(KSPSetOptionsPrefix(sred->ksp,prefix));
561       PetscCall(KSPAppendOptionsPrefix(sred->ksp,"telescope_"));
562     }
563   }
564 
565   /* setup */
566   if (!pc->setupcalled && sred->pctelescope_setup_type) {
567     PetscCall(sred->pctelescope_setup_type(pc,sred));
568   }
569   /* update */
570   if (!pc->setupcalled) {
571     if (sred->pctelescope_matcreate_type) {
572       PetscCall(sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred));
573     }
574     if (sred->pctelescope_matnullspacecreate_type) PetscCall(sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred));
575   } else {
576     if (sred->pctelescope_matcreate_type) {
577       PetscCall(sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred));
578     }
579   }
580 
581   /* common - no construction */
582   if (PCTelescope_isActiveRank(sred)) {
583     PetscCall(KSPSetOperators(sred->ksp,sred->Bred,sred->Bred));
584     if (pc->setfromoptionscalled && !pc->setupcalled) {
585       PetscCall(KSPSetFromOptions(sred->ksp));
586     }
587   }
588   PetscFunctionReturn(0);
589 }
590 
591 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
592 {
593   PC_Telescope      sred = (PC_Telescope)pc->data;
594   Vec               xtmp,xred,yred;
595   PetscInt          i,st,ed;
596   VecScatter        scatter;
597   PetscScalar       *array;
598   const PetscScalar *x_array;
599 
600   PetscFunctionBegin;
601   PetscCall(PetscCitationsRegister(citation,&cited));
602 
603   xtmp    = sred->xtmp;
604   scatter = sred->scatter;
605   xred    = sred->xred;
606   yred    = sred->yred;
607 
608   /* pull in vector x->xtmp */
609   PetscCall(VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD));
610   PetscCall(VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD));
611 
612   /* copy vector entries into xred */
613   PetscCall(VecGetArrayRead(xtmp,&x_array));
614   if (xred) {
615     PetscScalar *LA_xred;
616     PetscCall(VecGetOwnershipRange(xred,&st,&ed));
617     PetscCall(VecGetArray(xred,&LA_xred));
618     for (i=0; i<ed-st; i++) {
619       LA_xred[i] = x_array[i];
620     }
621     PetscCall(VecRestoreArray(xred,&LA_xred));
622   }
623   PetscCall(VecRestoreArrayRead(xtmp,&x_array));
624   /* solve */
625   if (PCTelescope_isActiveRank(sred)) {
626     PetscCall(KSPSolve(sred->ksp,xred,yred));
627     PetscCall(KSPCheckSolve(sred->ksp,pc,yred));
628   }
629   /* return vector */
630   PetscCall(VecGetArray(xtmp,&array));
631   if (yred) {
632     const PetscScalar *LA_yred;
633     PetscCall(VecGetOwnershipRange(yred,&st,&ed));
634     PetscCall(VecGetArrayRead(yred,&LA_yred));
635     for (i=0; i<ed-st; i++) {
636       array[i] = LA_yred[i];
637     }
638     PetscCall(VecRestoreArrayRead(yred,&LA_yred));
639   }
640   PetscCall(VecRestoreArray(xtmp,&array));
641   PetscCall(VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE));
642   PetscCall(VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE));
643   PetscFunctionReturn(0);
644 }
645 
646 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
647 {
648   PC_Telescope      sred = (PC_Telescope)pc->data;
649   Vec               xtmp,yred;
650   PetscInt          i,st,ed;
651   VecScatter        scatter;
652   const PetscScalar *x_array;
653   PetscBool         default_init_guess_value;
654 
655   PetscFunctionBegin;
656   xtmp    = sred->xtmp;
657   scatter = sred->scatter;
658   yred    = sred->yred;
659 
660   PetscCheck(its <= 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
661   *reason = (PCRichardsonConvergedReason)0;
662 
663   if (!zeroguess) {
664     PetscCall(PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n"));
665     /* pull in vector y->xtmp */
666     PetscCall(VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD));
667     PetscCall(VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD));
668 
669     /* copy vector entries into xred */
670     PetscCall(VecGetArrayRead(xtmp,&x_array));
671     if (yred) {
672       PetscScalar *LA_yred;
673       PetscCall(VecGetOwnershipRange(yred,&st,&ed));
674       PetscCall(VecGetArray(yred,&LA_yred));
675       for (i=0; i<ed-st; i++) {
676         LA_yred[i] = x_array[i];
677       }
678       PetscCall(VecRestoreArray(yred,&LA_yred));
679     }
680     PetscCall(VecRestoreArrayRead(xtmp,&x_array));
681   }
682 
683   if (PCTelescope_isActiveRank(sred)) {
684     PetscCall(KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value));
685     if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE));
686   }
687 
688   PetscCall(PCApply_Telescope(pc,x,y));
689 
690   if (PCTelescope_isActiveRank(sred)) {
691     PetscCall(KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value));
692   }
693 
694   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
695   *outits = 1;
696   PetscFunctionReturn(0);
697 }
698 
699 static PetscErrorCode PCReset_Telescope(PC pc)
700 {
701   PC_Telescope   sred = (PC_Telescope)pc->data;
702 
703   PetscFunctionBegin;
704   PetscCall(ISDestroy(&sred->isin));
705   PetscCall(VecScatterDestroy(&sred->scatter));
706   PetscCall(VecDestroy(&sred->xred));
707   PetscCall(VecDestroy(&sred->yred));
708   PetscCall(VecDestroy(&sred->xtmp));
709   PetscCall(MatDestroy(&sred->Bred));
710   PetscCall(KSPReset(sred->ksp));
711   if (sred->pctelescope_reset_type) PetscCall(sred->pctelescope_reset_type(pc));
712   PetscFunctionReturn(0);
713 }
714 
715 static PetscErrorCode PCDestroy_Telescope(PC pc)
716 {
717   PC_Telescope   sred = (PC_Telescope)pc->data;
718 
719   PetscFunctionBegin;
720   PetscCall(PCReset_Telescope(pc));
721   PetscCall(KSPDestroy(&sred->ksp));
722   PetscCall(PetscSubcommDestroy(&sred->psubcomm));
723   PetscCall(PetscFree(sred->dm_ctx));
724   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",NULL));
725   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",NULL));
726   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",NULL));
727   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",NULL));
728   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",NULL));
729   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",NULL));
730   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",NULL));
731   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",NULL));
732   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",NULL));
733   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",NULL));
734   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",NULL));
735   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",NULL));
736   PetscCall(PetscFree(pc->data));
737   PetscFunctionReturn(0);
738 }
739 
740 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
741 {
742   PC_Telescope     sred = (PC_Telescope)pc->data;
743   MPI_Comm         comm;
744   PetscMPIInt      size;
745   PetscBool        flg;
746   PetscSubcommType subcommtype;
747 
748   PetscFunctionBegin;
749   PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
750   PetscCallMPI(MPI_Comm_size(comm,&size));
751   PetscOptionsHeadBegin(PetscOptionsObject,"Telescope options");
752   PetscCall(PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg));
753   if (flg) PetscCall(PCTelescopeSetSubcommType(pc,subcommtype));
754   PetscCall(PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,NULL));
755   PetscCheck(sred->redfactor <= size,comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
756   PetscCall(PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,NULL));
757   PetscCall(PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,NULL));
758   PetscCall(PetscOptionsBool("-pc_telescope_use_coarse_dm","Define sub-communicator from the coarse DM","PCTelescopeSetUseCoarseDM",sred->use_coarse_dm,&sred->use_coarse_dm,NULL));
759   PetscOptionsHeadEnd();
760   PetscFunctionReturn(0);
761 }
762 
763 /* PC simplementation specific API's */
764 
765 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
766 {
767   PC_Telescope red = (PC_Telescope)pc->data;
768   PetscFunctionBegin;
769   if (ksp) *ksp = red->ksp;
770   PetscFunctionReturn(0);
771 }
772 
773 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
774 {
775   PC_Telescope red = (PC_Telescope)pc->data;
776   PetscFunctionBegin;
777   if (subcommtype) *subcommtype = red->subcommtype;
778   PetscFunctionReturn(0);
779 }
780 
781 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
782 {
783   PC_Telescope     red = (PC_Telescope)pc->data;
784 
785   PetscFunctionBegin;
786   PetscCheck(!pc->setupcalled,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
787   red->subcommtype = subcommtype;
788   PetscFunctionReturn(0);
789 }
790 
791 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
792 {
793   PC_Telescope red = (PC_Telescope)pc->data;
794   PetscFunctionBegin;
795   if (fact) *fact = red->redfactor;
796   PetscFunctionReturn(0);
797 }
798 
799 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
800 {
801   PC_Telescope     red = (PC_Telescope)pc->data;
802   PetscMPIInt      size;
803 
804   PetscFunctionBegin;
805   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
806   PetscCheck(fact > 0,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %" PetscInt_FMT " must be positive",fact);
807   PetscCheck(fact <= size,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %" PetscInt_FMT " must be <= comm.size",fact);
808   red->redfactor = fact;
809   PetscFunctionReturn(0);
810 }
811 
812 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
813 {
814   PC_Telescope red = (PC_Telescope)pc->data;
815   PetscFunctionBegin;
816   if (v) *v = red->ignore_dm;
817   PetscFunctionReturn(0);
818 }
819 
820 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
821 {
822   PC_Telescope red = (PC_Telescope)pc->data;
823   PetscFunctionBegin;
824   red->ignore_dm = v;
825   PetscFunctionReturn(0);
826 }
827 
828 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool *v)
829 {
830   PC_Telescope red = (PC_Telescope)pc->data;
831   PetscFunctionBegin;
832   if (v) *v = red->use_coarse_dm;
833   PetscFunctionReturn(0);
834 }
835 
836 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)
837 {
838   PC_Telescope red = (PC_Telescope)pc->data;
839   PetscFunctionBegin;
840   red->use_coarse_dm = v;
841   PetscFunctionReturn(0);
842 }
843 
844 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
845 {
846   PC_Telescope red = (PC_Telescope)pc->data;
847   PetscFunctionBegin;
848   if (v) *v = red->ignore_kspcomputeoperators;
849   PetscFunctionReturn(0);
850 }
851 
852 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
853 {
854   PC_Telescope red = (PC_Telescope)pc->data;
855   PetscFunctionBegin;
856   red->ignore_kspcomputeoperators = v;
857   PetscFunctionReturn(0);
858 }
859 
860 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
861 {
862   PC_Telescope red = (PC_Telescope)pc->data;
863   PetscFunctionBegin;
864   *dm = private_PCTelescopeGetSubDM(red);
865   PetscFunctionReturn(0);
866 }
867 
868 /*@
869  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
870 
871  Not Collective
872 
873  Input Parameter:
874 .  pc - the preconditioner context
875 
876  Output Parameter:
877 .  subksp - the KSP defined the smaller set of processes
878 
879  Level: advanced
880 
881 @*/
882 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
883 {
884   PetscFunctionBegin;
885   PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));
886   PetscFunctionReturn(0);
887 }
888 
889 /*@
890  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
891 
892  Not Collective
893 
894  Input Parameter:
895 .  pc - the preconditioner context
896 
897  Output Parameter:
898 .  fact - the reduction factor
899 
900  Level: advanced
901 
902 @*/
903 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
904 {
905   PetscFunctionBegin;
906   PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));
907   PetscFunctionReturn(0);
908 }
909 
910 /*@
911  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
912 
913  Not Collective
914 
915  Input Parameter:
916 .  pc - the preconditioner context
917 
918  Output Parameter:
919 .  fact - the reduction factor
920 
921  Level: advanced
922 
923 @*/
924 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
925 {
926   PetscFunctionBegin;
927   PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));
928   PetscFunctionReturn(0);
929 }
930 
931 /*@
932  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
933 
934  Not Collective
935 
936  Input Parameter:
937 .  pc - the preconditioner context
938 
939  Output Parameter:
940 .  v - the flag
941 
942  Level: advanced
943 
944 @*/
945 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
946 {
947   PetscFunctionBegin;
948   PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));
949   PetscFunctionReturn(0);
950 }
951 
952 /*@
953  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
954 
955  Not Collective
956 
957  Input Parameter:
958 .  pc - the preconditioner context
959 
960  Output Parameter:
961 .  v - Use PETSC_TRUE to ignore any DM
962 
963  Level: advanced
964 
965 @*/
966 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
967 {
968   PetscFunctionBegin;
969   PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));
970   PetscFunctionReturn(0);
971 }
972 
973 /*@
974  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used.
975 
976  Not Collective
977 
978  Input Parameter:
979 .  pc - the preconditioner context
980 
981  Output Parameter:
982 .  v - the flag
983 
984  Level: advanced
985 
986 @*/
987 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc,PetscBool *v)
988 {
989   PetscFunctionBegin;
990   PetscUseMethod(pc,"PCTelescopeGetUseCoarseDM_C",(PC,PetscBool*),(pc,v));
991   PetscFunctionReturn(0);
992 }
993 
994 /*@
995  PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM
996 
997  Not Collective
998 
999  Input Parameter:
1000 .  pc - the preconditioner context
1001 
1002  Output Parameter:
1003 .  v - Use PETSC_FALSE to ignore any coarse DM
1004 
1005  Notes:
1006  When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope
1007  will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and
1008  -pc_telescope_subcomm_type will no longer have any meaning.
1009  It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes.
1010  An error will occur of the size of the communicator associated with the coarse DM
1011  is the same as that of the parent DM.
1012  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
1013  This will be checked at the time the preconditioner is setup and an error will occur if
1014  the coarse DM does not define a sub-communicator of that used by the parent DM.
1015 
1016  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
1017  the DM used (e.g. it supports DMSHELL, DMPLEX, etc).
1018 
1019  Support is currently only provided for the case when you are using KSPSetComputeOperators()
1020 
1021  The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs.
1022  In the user code, this is achieved via
1023 .vb
1024    {
1025      DM dm_fine;
1026      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1027    }
1028 .ve
1029  The signature of the user provided field scatter method is
1030 .vb
1031    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1032 .ve
1033  The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE.
1034  SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM.
1035 
1036  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
1037  of state variables between the fine and coarse DMs.
1038  In the context of a finite element discretization, an example state variable might be
1039  values associated with quadrature points within each element.
1040  A user provided state scatter method is composed via
1041 .vb
1042    {
1043      DM dm_fine;
1044      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1045    }
1046 .ve
1047  The signature of the user provided state scatter method is
1048 .vb
1049    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1050 .ve
1051  SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM.
1052  The user is only required to support mode = SCATTER_FORWARD.
1053  No assumption is made about the data type of the state variables.
1054  These must be managed by the user and must be accessible from the DM.
1055 
1056  Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be
1057  associated with the sub-KSP residing within PCTelescope.
1058  In general, PCTelescope assumes that the context on the fine and coarse DM used with
1059  KSPSetComputeOperators() should be "similar" in type or origin.
1060  Specifically the following rules are used to infer what context on the sub-KSP should be.
1061 
1062  First the contexts from the KSP and the fine and coarse DMs are retrieved.
1063  Note that the special case of a DMSHELL context is queried.
1064 
1065 .vb
1066    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1067    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1068    DMShellGetContext(dm_fine,&dmfine_shellctx);
1069 
1070    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1071    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1072 .ve
1073 
1074  The following rules are then enforced:
1075 
1076  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1077  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL);
1078 
1079  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1080  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1081  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1082 
1083  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1084  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1085  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1086 
1087  If neither of the above three tests passed, then PCTelescope cannot safely determine what
1088  context should be provided to KSPSetComputeOperators() for use with the sub-KSP.
1089  In this case, an additional mechanism is provided via a composed function which will return
1090  the actual context to be used. To use this feature you must compose the "getter" function
1091  with the coarse DM, e.g.
1092 .vb
1093    {
1094      DM dm_coarse;
1095      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1096    }
1097 .ve
1098  The signature of the user provided method is
1099 .vb
1100    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1101 .ve
1102 
1103  Level: advanced
1104 
1105 @*/
1106 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)
1107 {
1108   PetscFunctionBegin;
1109   PetscTryMethod(pc,"PCTelescopeSetUseCoarseDM_C",(PC,PetscBool),(pc,v));
1110   PetscFunctionReturn(0);
1111 }
1112 
1113 /*@
1114  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
1115 
1116  Not Collective
1117 
1118  Input Parameter:
1119 .  pc - the preconditioner context
1120 
1121  Output Parameter:
1122 .  v - the flag
1123 
1124  Level: advanced
1125 
1126 @*/
1127 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
1128 {
1129   PetscFunctionBegin;
1130   PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));
1131   PetscFunctionReturn(0);
1132 }
1133 
1134 /*@
1135  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
1136 
1137  Not Collective
1138 
1139  Input Parameter:
1140 .  pc - the preconditioner context
1141 
1142  Output Parameter:
1143 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
1144 
1145  Level: advanced
1146 
1147 @*/
1148 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
1149 {
1150   PetscFunctionBegin;
1151   PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));
1152   PetscFunctionReturn(0);
1153 }
1154 
1155 /*@
1156  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
1157 
1158  Not Collective
1159 
1160  Input Parameter:
1161 .  pc - the preconditioner context
1162 
1163  Output Parameter:
1164 .  subdm - The re-partitioned DM
1165 
1166  Level: advanced
1167 
1168 @*/
1169 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
1170 {
1171   PetscFunctionBegin;
1172   PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));
1173   PetscFunctionReturn(0);
1174 }
1175 
1176 /*@
1177  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1178 
1179  Logically Collective
1180 
1181  Input Parameters:
1182 +  pc - the preconditioner context
1183 -  subcommtype - the subcommunicator type (see PetscSubcommType)
1184 
1185  Level: advanced
1186 
1187 .seealso: `PetscSubcommType`, `PetscSubcomm`, `PCTELESCOPE`
1188 @*/
1189 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1190 {
1191   PetscFunctionBegin;
1192   PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));
1193   PetscFunctionReturn(0);
1194 }
1195 
1196 /*@
1197  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1198 
1199  Not Collective
1200 
1201  Input Parameter:
1202 .  pc - the preconditioner context
1203 
1204  Output Parameter:
1205 .  subcommtype - the subcommunicator type (see PetscSubcommType)
1206 
1207  Level: advanced
1208 
1209 .seealso: `PetscSubcomm`, `PetscSubcommType`, `PCTELESCOPE`
1210 @*/
1211 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1212 {
1213   PetscFunctionBegin;
1214   PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));
1215   PetscFunctionReturn(0);
1216 }
1217 
1218 /* -------------------------------------------------------------------------------------*/
1219 /*MC
1220    PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1221 
1222    Options Database:
1223 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1224 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1225 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1226 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP.
1227 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator.
1228 
1229    Level: advanced
1230 
1231    Notes:
1232    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1233    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC).
1234    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1235    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1236    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1237    Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM.
1238 
1239    The default type of the sub KSP (the KSP defined on c') is PREONLY.
1240 
1241    There are three setup mechanisms for PCTelescope. Features support by each type are described below.
1242    In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the
1243    communicators c and c' respectively.
1244 
1245    [1] Default setup
1246    The sub-communicator c' is created via PetscSubcommCreate().
1247    Explicitly defined nullspace and near nullspace vectors will be propagated from B to B'.
1248    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1249    No support is provided for KSPSetComputeOperators().
1250    Currently there is no support for the flag -pc_use_amat.
1251 
1252    [2] DM aware setup
1253    If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'.
1254    c' is created via PetscSubcommCreate().
1255    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1256    Currently only support for re-partitioning a DMDA is provided.
1257    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1258    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1259    Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP.
1260    This is fragile since the user must ensure that their user context is valid for use on c'.
1261    Currently there is no support for the flag -pc_use_amat.
1262 
1263    [3] Coarse DM setup
1264    If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM().
1265    PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c.
1266    The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE.
1267    PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1268    The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be
1269    available with using multi-grid on unstructured meshes.
1270    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1271    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1272    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1273    There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported.
1274    The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1275    Propagation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators().
1276    Currently there is no support for the flag -pc_use_amat.
1277    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
1278    Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM().
1279 
1280    Developer Notes:
1281    During PCSetup, the B operator is scattered onto c'.
1282    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1283    Then, KSPSolve() is executed on the c' communicator.
1284 
1285    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
1286    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
1287 
1288    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1289    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1290    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1291 
1292    The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D -
1293    support for 1D DMDAs is not provided). If a DMDA is found, a topologically equivalent DMDA is created on c'
1294    and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support
1295    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1296    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1297 
1298    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1299 
1300    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1301    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
1302    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
1303 
1304    Limitations/improvements include the following.
1305    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1306    A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction().
1307 
1308    The symmetric permutation used when a DMDA is encountered is performed via explicitly assembling a permutation matrix P,
1309    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1310    VecPermute() does not support the use case required here. By computing P, one can permute both the operator and RHS in a
1311    consistent manner.
1312 
1313    Mapping of vectors (default setup mode) is performed in the following way.
1314    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1315    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1316    We perform the scatter to the sub-communicator in the following way.
1317    [1] Given a vector x defined on communicator c
1318 
1319 .vb
1320    rank(c)  local values of x
1321    ------- ----------------------------------------
1322         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1323         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1324         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1325         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1326 .ve
1327 
1328    scatter into xtmp defined also on comm c, so that we have the following values
1329 
1330 .vb
1331    rank(c)  local values of xtmp
1332    ------- ----------------------------------------------------------------------------
1333         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1334         1   [ ]
1335         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1336         3   [ ]
1337 .ve
1338 
1339    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1340 
1341    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1342    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1343 
1344 .vb
1345    rank(c')  local values of xred
1346    -------- ----------------------------------------------------------------------------
1347          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1348          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1349 .ve
1350 
1351   Contributed by Dave May
1352 
1353   Reference:
1354   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1355 
1356 .seealso: `PCTelescopeGetKSP()`, `PCTelescopeGetDM()`, `PCTelescopeGetReductionFactor()`, `PCTelescopeSetReductionFactor()`, `PCTelescopeGetIgnoreDM()`, `PCTelescopeSetIgnoreDM()`, `PCREDUNDANT`
1357 M*/
1358 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1359 {
1360   struct _PC_Telescope *sred;
1361 
1362   PetscFunctionBegin;
1363   PetscCall(PetscNewLog(pc,&sred));
1364   sred->psubcomm       = NULL;
1365   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1366   sred->subcomm        = MPI_COMM_NULL;
1367   sred->redfactor      = 1;
1368   sred->ignore_dm      = PETSC_FALSE;
1369   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1370   sred->use_coarse_dm  = PETSC_FALSE;
1371   pc->data             = (void*)sred;
1372 
1373   pc->ops->apply           = PCApply_Telescope;
1374   pc->ops->applytranspose  = NULL;
1375   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1376   pc->ops->setup           = PCSetUp_Telescope;
1377   pc->ops->destroy         = PCDestroy_Telescope;
1378   pc->ops->reset           = PCReset_Telescope;
1379   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1380   pc->ops->view            = PCView_Telescope;
1381 
1382   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1383   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1384   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1385   sred->pctelescope_reset_type              = NULL;
1386 
1387   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope));
1388   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope));
1389   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope));
1390   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope));
1391   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope));
1392   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope));
1393   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope));
1394   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope));
1395   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope));
1396   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope));
1397   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",PCTelescopeGetUseCoarseDM_Telescope));
1398   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",PCTelescopeSetUseCoarseDM_Telescope));
1399   PetscFunctionReturn(0);
1400 }
1401