1 #include <petsc/private/petscimpl.h> 2 #include <petsc/private/matimpl.h> 3 #include <petsc/private/pcimpl.h> 4 #include <petscksp.h> /*I "petscksp.h" I*/ 5 #include <petscdm.h> /*I "petscdm.h" I*/ 6 #include "../src/ksp/pc/impls/telescope/telescope.h" 7 8 static PetscBool cited = PETSC_FALSE; 9 static const char citation[] = "@inproceedings{MaySananRuppKnepleySmith2016,\n" 10 " title = {Extreme-Scale Multigrid Components within PETSc},\n" 11 " author = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n" 12 " booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n" 13 " series = {PASC '16},\n" 14 " isbn = {978-1-4503-4126-4},\n" 15 " location = {Lausanne, Switzerland},\n" 16 " pages = {5:1--5:12},\n" 17 " articleno = {5},\n" 18 " numpages = {12},\n" 19 " url = {https://doi.acm.org/10.1145/2929908.2929913},\n" 20 " doi = {10.1145/2929908.2929913},\n" 21 " acmid = {2929913},\n" 22 " publisher = {ACM},\n" 23 " address = {New York, NY, USA},\n" 24 " keywords = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n" 25 " year = {2016}\n" 26 "}\n"; 27 28 /* 29 default setup mode 30 31 [1a] scatter to (FORWARD) 32 x(comm) -> xtmp(comm) 33 [1b] local copy (to) ranks with color = 0 34 xred(subcomm) <- xtmp 35 36 [2] solve on sub KSP to obtain yred(subcomm) 37 38 [3a] local copy (from) ranks with color = 0 39 yred(subcomm) --> xtmp 40 [2b] scatter from (REVERSE) 41 xtmp(comm) -> y(comm) 42 */ 43 44 /* 45 Collective[comm_f] 46 Notes 47 * Using comm_f = MPI_COMM_NULL will result in an error 48 * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid. 49 * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid. 50 */ 51 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f, MPI_Comm comm_c, PetscBool *isvalid) 52 { 53 PetscInt valid = 1; 54 MPI_Group group_f, group_c; 55 PetscMPIInt count, k, size_f = 0, size_c = 0, size_c_sum = 0; 56 PetscMPIInt *ranks_f, *ranks_c; 57 58 PetscFunctionBegin; 59 PetscCheck(comm_f != MPI_COMM_NULL, PETSC_COMM_SELF, PETSC_ERR_SUP, "comm_f cannot be MPI_COMM_NULL"); 60 61 PetscCallMPI(MPI_Comm_group(comm_f, &group_f)); 62 if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_group(comm_c, &group_c)); 63 64 PetscCallMPI(MPI_Comm_size(comm_f, &size_f)); 65 if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_size(comm_c, &size_c)); 66 67 /* check not all comm_c's are NULL */ 68 size_c_sum = size_c; 69 PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &size_c_sum, 1, MPI_INT, MPI_SUM, comm_f)); 70 if (size_c_sum == 0) valid = 0; 71 72 /* check we can map at least 1 rank in comm_c to comm_f */ 73 PetscCall(PetscMalloc1(size_f, &ranks_f)); 74 PetscCall(PetscMalloc1(size_c, &ranks_c)); 75 for (k = 0; k < size_f; k++) ranks_f[k] = MPI_UNDEFINED; 76 for (k = 0; k < size_c; k++) ranks_c[k] = k; 77 78 /* 79 MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated. 80 I do not want the code to terminate immediately if this occurs, rather I want to throw 81 the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating 82 that comm_c is not a valid sub-communicator. 83 Hence I purposefully do not call PetscCall() after MPI_Group_translate_ranks(). 84 */ 85 count = 0; 86 if (comm_c != MPI_COMM_NULL) { 87 (void)MPI_Group_translate_ranks(group_c, size_c, ranks_c, group_f, ranks_f); 88 for (k = 0; k < size_f; k++) { 89 if (ranks_f[k] == MPI_UNDEFINED) count++; 90 } 91 } 92 if (count == size_f) valid = 0; 93 94 PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &valid, 1, MPIU_INT, MPI_MIN, comm_f)); 95 if (valid == 1) *isvalid = PETSC_TRUE; 96 else *isvalid = PETSC_FALSE; 97 98 PetscCall(PetscFree(ranks_f)); 99 PetscCall(PetscFree(ranks_c)); 100 PetscCallMPI(MPI_Group_free(&group_f)); 101 if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Group_free(&group_c)); 102 PetscFunctionReturn(PETSC_SUCCESS); 103 } 104 105 DM private_PCTelescopeGetSubDM(PC_Telescope sred) 106 { 107 DM subdm = NULL; 108 109 if (!PCTelescope_isActiveRank(sred)) { 110 subdm = NULL; 111 } else { 112 switch (sred->sr_type) { 113 case TELESCOPE_DEFAULT: 114 subdm = NULL; 115 break; 116 case TELESCOPE_DMDA: 117 subdm = ((PC_Telescope_DMDACtx *)sred->dm_ctx)->dmrepart; 118 break; 119 case TELESCOPE_DMPLEX: 120 subdm = NULL; 121 break; 122 case TELESCOPE_COARSEDM: 123 if (sred->ksp) PetscCallAbort(PETSC_COMM_SELF, KSPGetDM(sred->ksp, &subdm)); 124 break; 125 } 126 } 127 return subdm; 128 } 129 130 PetscErrorCode PCTelescopeSetUp_default(PC pc, PC_Telescope sred) 131 { 132 PetscInt m, M, bs, st, ed; 133 Vec x, xred, yred, xtmp; 134 Mat B; 135 MPI_Comm comm, subcomm; 136 VecScatter scatter; 137 IS isin; 138 VecType vectype; 139 140 PetscFunctionBegin; 141 PetscCall(PetscInfo(pc, "PCTelescope: setup (default)\n")); 142 comm = PetscSubcommParent(sred->psubcomm); 143 subcomm = PetscSubcommChild(sred->psubcomm); 144 145 PetscCall(PCGetOperators(pc, NULL, &B)); 146 PetscCall(MatGetSize(B, &M, NULL)); 147 PetscCall(MatGetBlockSize(B, &bs)); 148 PetscCall(MatCreateVecs(B, &x, NULL)); 149 PetscCall(MatGetVecType(B, &vectype)); 150 151 xred = NULL; 152 m = 0; 153 if (PCTelescope_isActiveRank(sred)) { 154 PetscCall(VecCreate(subcomm, &xred)); 155 PetscCall(VecSetSizes(xred, PETSC_DECIDE, M)); 156 PetscCall(VecSetBlockSize(xred, bs)); 157 PetscCall(VecSetType(xred, vectype)); /* Use the preconditioner matrix's vectype by default */ 158 PetscCall(VecSetFromOptions(xred)); 159 PetscCall(VecGetLocalSize(xred, &m)); 160 } 161 162 yred = NULL; 163 if (PCTelescope_isActiveRank(sred)) PetscCall(VecDuplicate(xred, &yred)); 164 165 PetscCall(VecCreate(comm, &xtmp)); 166 PetscCall(VecSetSizes(xtmp, m, PETSC_DECIDE)); 167 PetscCall(VecSetBlockSize(xtmp, bs)); 168 PetscCall(VecSetType(xtmp, vectype)); 169 170 if (PCTelescope_isActiveRank(sred)) { 171 PetscCall(VecGetOwnershipRange(xred, &st, &ed)); 172 PetscCall(ISCreateStride(comm, (ed - st), st, 1, &isin)); 173 } else { 174 PetscCall(VecGetOwnershipRange(x, &st, &ed)); 175 PetscCall(ISCreateStride(comm, 0, st, 1, &isin)); 176 } 177 PetscCall(ISSetBlockSize(isin, bs)); 178 179 PetscCall(VecScatterCreate(x, isin, xtmp, NULL, &scatter)); 180 181 sred->isin = isin; 182 sred->scatter = scatter; 183 sred->xred = xred; 184 sred->yred = yred; 185 sred->xtmp = xtmp; 186 PetscCall(VecDestroy(&x)); 187 PetscFunctionReturn(PETSC_SUCCESS); 188 } 189 190 PetscErrorCode PCTelescopeMatCreate_default(PC pc, PC_Telescope sred, MatReuse reuse, Mat *A) 191 { 192 MPI_Comm comm, subcomm; 193 Mat Bred, B; 194 PetscInt nr, nc, bs; 195 IS isrow, iscol; 196 Mat Blocal, *_Blocal; 197 198 PetscFunctionBegin; 199 PetscCall(PetscInfo(pc, "PCTelescope: updating the redundant preconditioned operator (default)\n")); 200 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 201 subcomm = PetscSubcommChild(sred->psubcomm); 202 PetscCall(PCGetOperators(pc, NULL, &B)); 203 PetscCall(MatGetSize(B, &nr, &nc)); 204 isrow = sred->isin; 205 PetscCall(ISCreateStride(PETSC_COMM_SELF, nc, 0, 1, &iscol)); 206 PetscCall(ISSetIdentity(iscol)); 207 PetscCall(MatGetBlockSizes(B, NULL, &bs)); 208 PetscCall(ISSetBlockSize(iscol, bs)); 209 PetscCall(MatSetOption(B, MAT_SUBMAT_SINGLEIS, PETSC_TRUE)); 210 PetscCall(MatCreateSubMatrices(B, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &_Blocal)); 211 Blocal = *_Blocal; 212 PetscCall(PetscFree(_Blocal)); 213 Bred = NULL; 214 if (PCTelescope_isActiveRank(sred)) { 215 PetscInt mm; 216 217 if (reuse != MAT_INITIAL_MATRIX) Bred = *A; 218 219 PetscCall(MatGetSize(Blocal, &mm, NULL)); 220 PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, Blocal, mm, reuse, &Bred)); 221 } 222 *A = Bred; 223 PetscCall(ISDestroy(&iscol)); 224 PetscCall(MatDestroy(&Blocal)); 225 PetscFunctionReturn(PETSC_SUCCESS); 226 } 227 228 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc, PC_Telescope sred, MatNullSpace nullspace, MatNullSpace *sub_nullspace) 229 { 230 PetscBool has_const; 231 const Vec *vecs; 232 Vec *sub_vecs = NULL; 233 PetscInt i, k, n = 0; 234 MPI_Comm subcomm; 235 236 PetscFunctionBegin; 237 subcomm = PetscSubcommChild(sred->psubcomm); 238 PetscCall(MatNullSpaceGetVecs(nullspace, &has_const, &n, &vecs)); 239 240 if (PCTelescope_isActiveRank(sred)) { 241 if (n) PetscCall(VecDuplicateVecs(sred->xred, n, &sub_vecs)); 242 } 243 244 /* copy entries */ 245 for (k = 0; k < n; k++) { 246 const PetscScalar *x_array; 247 PetscScalar *LA_sub_vec; 248 PetscInt st, ed; 249 250 /* pull in vector x->xtmp */ 251 PetscCall(VecScatterBegin(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD)); 252 PetscCall(VecScatterEnd(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD)); 253 if (sub_vecs) { 254 /* copy vector entries into xred */ 255 PetscCall(VecGetArrayRead(sred->xtmp, &x_array)); 256 if (sub_vecs[k]) { 257 PetscCall(VecGetOwnershipRange(sub_vecs[k], &st, &ed)); 258 PetscCall(VecGetArray(sub_vecs[k], &LA_sub_vec)); 259 for (i = 0; i < ed - st; i++) LA_sub_vec[i] = x_array[i]; 260 PetscCall(VecRestoreArray(sub_vecs[k], &LA_sub_vec)); 261 } 262 PetscCall(VecRestoreArrayRead(sred->xtmp, &x_array)); 263 } 264 } 265 266 if (PCTelescope_isActiveRank(sred)) { 267 /* create new (near) nullspace for redundant object */ 268 PetscCall(MatNullSpaceCreate(subcomm, has_const, n, sub_vecs, sub_nullspace)); 269 PetscCall(VecDestroyVecs(n, &sub_vecs)); 270 PetscCheck(!nullspace->remove, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope"); 271 PetscCheck(!nullspace->rmctx, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope"); 272 } 273 PetscFunctionReturn(PETSC_SUCCESS); 274 } 275 276 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc, PC_Telescope sred, Mat sub_mat) 277 { 278 Mat B; 279 280 PetscFunctionBegin; 281 PetscCall(PCGetOperators(pc, NULL, &B)); 282 /* Propagate the nullspace if it exists */ 283 { 284 MatNullSpace nullspace, sub_nullspace; 285 PetscCall(MatGetNullSpace(B, &nullspace)); 286 if (nullspace) { 287 PetscCall(PetscInfo(pc, "PCTelescope: generating nullspace (default)\n")); 288 PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nullspace, &sub_nullspace)); 289 if (PCTelescope_isActiveRank(sred)) { 290 PetscCall(MatSetNullSpace(sub_mat, sub_nullspace)); 291 PetscCall(MatNullSpaceDestroy(&sub_nullspace)); 292 } 293 } 294 } 295 /* Propagate the near nullspace if it exists */ 296 { 297 MatNullSpace nearnullspace, sub_nearnullspace; 298 PetscCall(MatGetNearNullSpace(B, &nearnullspace)); 299 if (nearnullspace) { 300 PetscCall(PetscInfo(pc, "PCTelescope: generating near nullspace (default)\n")); 301 PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nearnullspace, &sub_nearnullspace)); 302 if (PCTelescope_isActiveRank(sred)) { 303 PetscCall(MatSetNearNullSpace(sub_mat, sub_nearnullspace)); 304 PetscCall(MatNullSpaceDestroy(&sub_nearnullspace)); 305 } 306 } 307 } 308 PetscFunctionReturn(PETSC_SUCCESS); 309 } 310 311 static PetscErrorCode PCView_Telescope(PC pc, PetscViewer viewer) 312 { 313 PC_Telescope sred = (PC_Telescope)pc->data; 314 PetscBool iascii, isstring; 315 PetscViewer subviewer; 316 317 PetscFunctionBegin; 318 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 319 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring)); 320 if (iascii) { 321 { 322 MPI_Comm comm, subcomm; 323 PetscMPIInt comm_size, subcomm_size; 324 DM dm = NULL, subdm = NULL; 325 326 PetscCall(PCGetDM(pc, &dm)); 327 subdm = private_PCTelescopeGetSubDM(sred); 328 329 if (sred->psubcomm) { 330 comm = PetscSubcommParent(sred->psubcomm); 331 subcomm = PetscSubcommChild(sred->psubcomm); 332 PetscCallMPI(MPI_Comm_size(comm, &comm_size)); 333 PetscCallMPI(MPI_Comm_size(subcomm, &subcomm_size)); 334 335 PetscCall(PetscViewerASCIIPushTab(viewer)); 336 PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent comm size reduction factor = %" PetscInt_FMT "\n", sred->redfactor)); 337 PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent_size = %d , subcomm_size = %d\n", (int)comm_size, (int)subcomm_size)); 338 switch (sred->subcommtype) { 339 case PETSC_SUBCOMM_INTERLACED: 340 PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: type = %s\n", PetscSubcommTypes[sred->subcommtype])); 341 break; 342 case PETSC_SUBCOMM_CONTIGUOUS: 343 PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm type = %s\n", PetscSubcommTypes[sred->subcommtype])); 344 break; 345 default: 346 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "General subcomm type not supported by PCTelescope"); 347 } 348 PetscCall(PetscViewerASCIIPopTab(viewer)); 349 } else { 350 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 351 subcomm = sred->subcomm; 352 if (!PCTelescope_isActiveRank(sred)) subcomm = PETSC_COMM_SELF; 353 354 PetscCall(PetscViewerASCIIPushTab(viewer)); 355 PetscCall(PetscViewerASCIIPrintf(viewer, "subcomm: using user provided sub-communicator\n")); 356 PetscCall(PetscViewerASCIIPopTab(viewer)); 357 } 358 359 PetscCall(PetscViewerGetSubViewer(viewer, subcomm, &subviewer)); 360 if (PCTelescope_isActiveRank(sred)) { 361 PetscCall(PetscViewerASCIIPushTab(subviewer)); 362 363 if (dm && sred->ignore_dm) PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring DM\n")); 364 if (sred->ignore_kspcomputeoperators) PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring KSPComputeOperators\n")); 365 switch (sred->sr_type) { 366 case TELESCOPE_DEFAULT: 367 PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: default\n")); 368 break; 369 case TELESCOPE_DMDA: 370 PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMDA auto-repartitioning\n")); 371 PetscCall(DMView_DA_Short(subdm, subviewer)); 372 break; 373 case TELESCOPE_DMPLEX: 374 PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMPLEX auto-repartitioning\n")); 375 break; 376 case TELESCOPE_COARSEDM: 377 PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: coarse DM\n")); 378 break; 379 } 380 381 if (dm) { 382 PetscObject obj = (PetscObject)dm; 383 PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object:")); 384 PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE)); 385 if (obj->type_name) PetscCall(PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name)); 386 if (obj->name) PetscCall(PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name)); 387 if (obj->prefix) PetscCall(PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix)); 388 PetscCall(PetscViewerASCIIPrintf(subviewer, "\n")); 389 PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE)); 390 } else { 391 PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object: NULL\n")); 392 } 393 if (subdm) { 394 PetscObject obj = (PetscObject)subdm; 395 PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object:")); 396 PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE)); 397 if (obj->type_name) PetscCall(PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name)); 398 if (obj->name) PetscCall(PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name)); 399 if (obj->prefix) PetscCall(PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix)); 400 PetscCall(PetscViewerASCIIPrintf(subviewer, "\n")); 401 PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE)); 402 } else { 403 PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object: NULL\n")); 404 } 405 406 PetscCall(KSPView(sred->ksp, subviewer)); 407 PetscCall(PetscViewerASCIIPopTab(subviewer)); 408 } 409 PetscCall(PetscViewerRestoreSubViewer(viewer, subcomm, &subviewer)); 410 } 411 } 412 PetscFunctionReturn(PETSC_SUCCESS); 413 } 414 415 static PetscErrorCode PCSetUp_Telescope(PC pc) 416 { 417 PC_Telescope sred = (PC_Telescope)pc->data; 418 MPI_Comm comm, subcomm = 0; 419 PCTelescopeType sr_type; 420 421 PetscFunctionBegin; 422 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 423 424 /* Determine type of setup/update */ 425 if (!pc->setupcalled) { 426 PetscBool has_dm, same; 427 DM dm; 428 429 sr_type = TELESCOPE_DEFAULT; 430 has_dm = PETSC_FALSE; 431 PetscCall(PCGetDM(pc, &dm)); 432 if (dm) has_dm = PETSC_TRUE; 433 if (has_dm) { 434 /* check for dmda */ 435 PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMDA, &same)); 436 if (same) { 437 PetscCall(PetscInfo(pc, "PCTelescope: found DMDA\n")); 438 sr_type = TELESCOPE_DMDA; 439 } 440 /* check for dmplex */ 441 PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &same)); 442 if (same) { 443 PetscCall(PetscInfo(pc, "PCTelescope: found DMPLEX\n")); 444 sr_type = TELESCOPE_DMPLEX; 445 } 446 447 if (sred->use_coarse_dm) { 448 PetscCall(PetscInfo(pc, "PCTelescope: using coarse DM\n")); 449 sr_type = TELESCOPE_COARSEDM; 450 } 451 452 if (sred->ignore_dm) { 453 PetscCall(PetscInfo(pc, "PCTelescope: ignoring DM\n")); 454 sr_type = TELESCOPE_DEFAULT; 455 } 456 } 457 sred->sr_type = sr_type; 458 } else { 459 sr_type = sred->sr_type; 460 } 461 462 /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */ 463 switch (sr_type) { 464 case TELESCOPE_DEFAULT: 465 sred->pctelescope_setup_type = PCTelescopeSetUp_default; 466 sred->pctelescope_matcreate_type = PCTelescopeMatCreate_default; 467 sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default; 468 sred->pctelescope_reset_type = NULL; 469 break; 470 case TELESCOPE_DMDA: 471 pc->ops->apply = PCApply_Telescope_dmda; 472 pc->ops->applyrichardson = PCApplyRichardson_Telescope_dmda; 473 sred->pctelescope_setup_type = PCTelescopeSetUp_dmda; 474 sred->pctelescope_matcreate_type = PCTelescopeMatCreate_dmda; 475 sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda; 476 sred->pctelescope_reset_type = PCReset_Telescope_dmda; 477 break; 478 case TELESCOPE_DMPLEX: 479 SETERRQ(comm, PETSC_ERR_SUP, "Support for DMPLEX is currently not available"); 480 case TELESCOPE_COARSEDM: 481 pc->ops->apply = PCApply_Telescope_CoarseDM; 482 pc->ops->applyrichardson = PCApplyRichardson_Telescope_CoarseDM; 483 sred->pctelescope_setup_type = PCTelescopeSetUp_CoarseDM; 484 sred->pctelescope_matcreate_type = NULL; 485 sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */ 486 sred->pctelescope_reset_type = PCReset_Telescope_CoarseDM; 487 break; 488 default: 489 SETERRQ(comm, PETSC_ERR_SUP, "Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM"); 490 } 491 492 /* subcomm definition */ 493 if (!pc->setupcalled) { 494 if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) { 495 if (!sred->psubcomm) { 496 PetscCall(PetscSubcommCreate(comm, &sred->psubcomm)); 497 PetscCall(PetscSubcommSetNumber(sred->psubcomm, sred->redfactor)); 498 PetscCall(PetscSubcommSetType(sred->psubcomm, sred->subcommtype)); 499 sred->subcomm = PetscSubcommChild(sred->psubcomm); 500 } 501 } else { /* query PC for DM, check communicators */ 502 DM dm, dm_coarse_partition = NULL; 503 MPI_Comm comm_fine, comm_coarse_partition = MPI_COMM_NULL; 504 PetscMPIInt csize_fine = 0, csize_coarse_partition = 0, cs[2], csg[2], cnt = 0; 505 PetscBool isvalidsubcomm; 506 507 PetscCall(PCGetDM(pc, &dm)); 508 comm_fine = PetscObjectComm((PetscObject)dm); 509 PetscCall(DMGetCoarseDM(dm, &dm_coarse_partition)); 510 if (dm_coarse_partition) cnt = 1; 511 PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &cnt, 1, MPI_INT, MPI_SUM, comm_fine)); 512 PetscCheck(cnt != 0, comm_fine, PETSC_ERR_SUP, "Zero instances of a coarse DM were found"); 513 514 PetscCallMPI(MPI_Comm_size(comm_fine, &csize_fine)); 515 if (dm_coarse_partition) { 516 comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition); 517 PetscCallMPI(MPI_Comm_size(comm_coarse_partition, &csize_coarse_partition)); 518 } 519 520 cs[0] = csize_fine; 521 cs[1] = csize_coarse_partition; 522 PetscCallMPI(MPI_Allreduce(cs, csg, 2, MPI_INT, MPI_MAX, comm_fine)); 523 PetscCheck(csg[0] != csg[1], comm_fine, PETSC_ERR_SUP, "Coarse DM uses the same size communicator as the parent DM attached to the PC"); 524 525 PetscCall(PCTelescopeTestValidSubcomm(comm_fine, comm_coarse_partition, &isvalidsubcomm)); 526 PetscCheck(isvalidsubcomm, comm_fine, PETSC_ERR_SUP, "Coarse DM communicator is not a sub-communicator of parentDM->comm"); 527 sred->subcomm = comm_coarse_partition; 528 } 529 } 530 subcomm = sred->subcomm; 531 532 /* internal KSP */ 533 if (!pc->setupcalled) { 534 const char *prefix; 535 536 if (PCTelescope_isActiveRank(sred)) { 537 PetscCall(KSPCreate(subcomm, &sred->ksp)); 538 PetscCall(KSPSetNestLevel(sred->ksp, pc->kspnestlevel)); 539 PetscCall(KSPSetErrorIfNotConverged(sred->ksp, pc->erroriffailure)); 540 PetscCall(PetscObjectIncrementTabLevel((PetscObject)sred->ksp, (PetscObject)pc, 1)); 541 PetscCall(KSPSetType(sred->ksp, KSPPREONLY)); 542 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 543 PetscCall(KSPSetOptionsPrefix(sred->ksp, prefix)); 544 PetscCall(KSPAppendOptionsPrefix(sred->ksp, "telescope_")); 545 } 546 } 547 548 /* setup */ 549 if (!pc->setupcalled && sred->pctelescope_setup_type) PetscCall(sred->pctelescope_setup_type(pc, sred)); 550 /* update */ 551 if (!pc->setupcalled) { 552 if (sred->pctelescope_matcreate_type) PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_INITIAL_MATRIX, &sred->Bred)); 553 if (sred->pctelescope_matnullspacecreate_type) PetscCall(sred->pctelescope_matnullspacecreate_type(pc, sred, sred->Bred)); 554 } else { 555 if (sred->pctelescope_matcreate_type) PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_REUSE_MATRIX, &sred->Bred)); 556 } 557 558 /* common - no construction */ 559 if (PCTelescope_isActiveRank(sred)) { 560 PetscCall(KSPSetOperators(sred->ksp, sred->Bred, sred->Bred)); 561 if (pc->setfromoptionscalled && !pc->setupcalled) PetscCall(KSPSetFromOptions(sred->ksp)); 562 } 563 PetscFunctionReturn(PETSC_SUCCESS); 564 } 565 566 static PetscErrorCode PCApply_Telescope(PC pc, Vec x, Vec y) 567 { 568 PC_Telescope sred = (PC_Telescope)pc->data; 569 Vec xtmp, xred, yred; 570 PetscInt i, st, ed; 571 VecScatter scatter; 572 PetscScalar *array; 573 const PetscScalar *x_array; 574 575 PetscFunctionBegin; 576 PetscCall(PetscCitationsRegister(citation, &cited)); 577 578 xtmp = sred->xtmp; 579 scatter = sred->scatter; 580 xred = sred->xred; 581 yred = sred->yred; 582 583 /* pull in vector x->xtmp */ 584 PetscCall(VecScatterBegin(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD)); 585 PetscCall(VecScatterEnd(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD)); 586 587 /* copy vector entries into xred */ 588 PetscCall(VecGetArrayRead(xtmp, &x_array)); 589 if (xred) { 590 PetscScalar *LA_xred; 591 PetscCall(VecGetOwnershipRange(xred, &st, &ed)); 592 PetscCall(VecGetArray(xred, &LA_xred)); 593 for (i = 0; i < ed - st; i++) LA_xred[i] = x_array[i]; 594 PetscCall(VecRestoreArray(xred, &LA_xred)); 595 } 596 PetscCall(VecRestoreArrayRead(xtmp, &x_array)); 597 /* solve */ 598 if (PCTelescope_isActiveRank(sred)) { 599 PetscCall(KSPSolve(sred->ksp, xred, yred)); 600 PetscCall(KSPCheckSolve(sred->ksp, pc, yred)); 601 } 602 /* return vector */ 603 PetscCall(VecGetArray(xtmp, &array)); 604 if (yred) { 605 const PetscScalar *LA_yred; 606 PetscCall(VecGetOwnershipRange(yred, &st, &ed)); 607 PetscCall(VecGetArrayRead(yred, &LA_yred)); 608 for (i = 0; i < ed - st; i++) array[i] = LA_yred[i]; 609 PetscCall(VecRestoreArrayRead(yred, &LA_yred)); 610 } 611 PetscCall(VecRestoreArray(xtmp, &array)); 612 PetscCall(VecScatterBegin(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE)); 613 PetscCall(VecScatterEnd(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE)); 614 PetscFunctionReturn(PETSC_SUCCESS); 615 } 616 617 static PetscErrorCode PCApplyRichardson_Telescope(PC pc, Vec x, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason) 618 { 619 PC_Telescope sred = (PC_Telescope)pc->data; 620 Vec xtmp, yred; 621 PetscInt i, st, ed; 622 VecScatter scatter; 623 const PetscScalar *x_array; 624 PetscBool default_init_guess_value; 625 626 PetscFunctionBegin; 627 xtmp = sred->xtmp; 628 scatter = sred->scatter; 629 yred = sred->yred; 630 631 PetscCheck(its <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCApplyRichardson_Telescope only supports max_it = 1"); 632 *reason = (PCRichardsonConvergedReason)0; 633 634 if (!zeroguess) { 635 PetscCall(PetscInfo(pc, "PCTelescope: Scattering y for non-zero initial guess\n")); 636 /* pull in vector y->xtmp */ 637 PetscCall(VecScatterBegin(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD)); 638 PetscCall(VecScatterEnd(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD)); 639 640 /* copy vector entries into xred */ 641 PetscCall(VecGetArrayRead(xtmp, &x_array)); 642 if (yred) { 643 PetscScalar *LA_yred; 644 PetscCall(VecGetOwnershipRange(yred, &st, &ed)); 645 PetscCall(VecGetArray(yred, &LA_yred)); 646 for (i = 0; i < ed - st; i++) LA_yred[i] = x_array[i]; 647 PetscCall(VecRestoreArray(yred, &LA_yred)); 648 } 649 PetscCall(VecRestoreArrayRead(xtmp, &x_array)); 650 } 651 652 if (PCTelescope_isActiveRank(sred)) { 653 PetscCall(KSPGetInitialGuessNonzero(sred->ksp, &default_init_guess_value)); 654 if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, PETSC_TRUE)); 655 } 656 657 PetscCall(PCApply_Telescope(pc, x, y)); 658 659 if (PCTelescope_isActiveRank(sred)) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, default_init_guess_value)); 660 661 if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS; 662 *outits = 1; 663 PetscFunctionReturn(PETSC_SUCCESS); 664 } 665 666 static PetscErrorCode PCReset_Telescope(PC pc) 667 { 668 PC_Telescope sred = (PC_Telescope)pc->data; 669 670 PetscFunctionBegin; 671 PetscCall(ISDestroy(&sred->isin)); 672 PetscCall(VecScatterDestroy(&sred->scatter)); 673 PetscCall(VecDestroy(&sred->xred)); 674 PetscCall(VecDestroy(&sred->yred)); 675 PetscCall(VecDestroy(&sred->xtmp)); 676 PetscCall(MatDestroy(&sred->Bred)); 677 PetscCall(KSPReset(sred->ksp)); 678 if (sred->pctelescope_reset_type) PetscCall(sred->pctelescope_reset_type(pc)); 679 PetscFunctionReturn(PETSC_SUCCESS); 680 } 681 682 static PetscErrorCode PCDestroy_Telescope(PC pc) 683 { 684 PC_Telescope sred = (PC_Telescope)pc->data; 685 686 PetscFunctionBegin; 687 PetscCall(PCReset_Telescope(pc)); 688 PetscCall(KSPDestroy(&sred->ksp)); 689 PetscCall(PetscSubcommDestroy(&sred->psubcomm)); 690 PetscCall(PetscFree(sred->dm_ctx)); 691 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", NULL)); 692 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", NULL)); 693 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", NULL)); 694 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", NULL)); 695 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", NULL)); 696 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", NULL)); 697 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", NULL)); 698 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", NULL)); 699 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", NULL)); 700 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", NULL)); 701 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", NULL)); 702 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", NULL)); 703 PetscCall(PetscFree(pc->data)); 704 PetscFunctionReturn(PETSC_SUCCESS); 705 } 706 707 static PetscErrorCode PCSetFromOptions_Telescope(PC pc, PetscOptionItems *PetscOptionsObject) 708 { 709 PC_Telescope sred = (PC_Telescope)pc->data; 710 MPI_Comm comm; 711 PetscMPIInt size; 712 PetscBool flg; 713 PetscSubcommType subcommtype; 714 715 PetscFunctionBegin; 716 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 717 PetscCallMPI(MPI_Comm_size(comm, &size)); 718 PetscOptionsHeadBegin(PetscOptionsObject, "Telescope options"); 719 PetscCall(PetscOptionsEnum("-pc_telescope_subcomm_type", "Subcomm type (interlaced or contiguous)", "PCTelescopeSetSubcommType", PetscSubcommTypes, (PetscEnum)sred->subcommtype, (PetscEnum *)&subcommtype, &flg)); 720 if (flg) PetscCall(PCTelescopeSetSubcommType(pc, subcommtype)); 721 PetscCall(PetscOptionsInt("-pc_telescope_reduction_factor", "Factor to reduce comm size by", "PCTelescopeSetReductionFactor", sred->redfactor, &sred->redfactor, NULL)); 722 PetscCheck(sred->redfactor <= size, comm, PETSC_ERR_ARG_WRONG, "-pc_telescope_reduction_factor <= comm size"); 723 PetscCall(PetscOptionsBool("-pc_telescope_ignore_dm", "Ignore any DM attached to the PC", "PCTelescopeSetIgnoreDM", sred->ignore_dm, &sred->ignore_dm, NULL)); 724 PetscCall(PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators", "Ignore method used to compute A", "PCTelescopeSetIgnoreKSPComputeOperators", sred->ignore_kspcomputeoperators, &sred->ignore_kspcomputeoperators, NULL)); 725 PetscCall(PetscOptionsBool("-pc_telescope_use_coarse_dm", "Define sub-communicator from the coarse DM", "PCTelescopeSetUseCoarseDM", sred->use_coarse_dm, &sred->use_coarse_dm, NULL)); 726 PetscOptionsHeadEnd(); 727 PetscFunctionReturn(PETSC_SUCCESS); 728 } 729 730 /* PC simplementation specific API's */ 731 732 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc, KSP *ksp) 733 { 734 PC_Telescope red = (PC_Telescope)pc->data; 735 PetscFunctionBegin; 736 if (ksp) *ksp = red->ksp; 737 PetscFunctionReturn(PETSC_SUCCESS); 738 } 739 740 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc, PetscSubcommType *subcommtype) 741 { 742 PC_Telescope red = (PC_Telescope)pc->data; 743 PetscFunctionBegin; 744 if (subcommtype) *subcommtype = red->subcommtype; 745 PetscFunctionReturn(PETSC_SUCCESS); 746 } 747 748 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc, PetscSubcommType subcommtype) 749 { 750 PC_Telescope red = (PC_Telescope)pc->data; 751 752 PetscFunctionBegin; 753 PetscCheck(!pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "You cannot change the subcommunicator type for PCTelescope after it has been set up."); 754 red->subcommtype = subcommtype; 755 PetscFunctionReturn(PETSC_SUCCESS); 756 } 757 758 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc, PetscInt *fact) 759 { 760 PC_Telescope red = (PC_Telescope)pc->data; 761 PetscFunctionBegin; 762 if (fact) *fact = red->redfactor; 763 PetscFunctionReturn(PETSC_SUCCESS); 764 } 765 766 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc, PetscInt fact) 767 { 768 PC_Telescope red = (PC_Telescope)pc->data; 769 PetscMPIInt size; 770 771 PetscFunctionBegin; 772 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 773 PetscCheck(fact > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be positive", fact); 774 PetscCheck(fact <= size, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be <= comm.size", fact); 775 red->redfactor = fact; 776 PetscFunctionReturn(PETSC_SUCCESS); 777 } 778 779 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc, PetscBool *v) 780 { 781 PC_Telescope red = (PC_Telescope)pc->data; 782 PetscFunctionBegin; 783 if (v) *v = red->ignore_dm; 784 PetscFunctionReturn(PETSC_SUCCESS); 785 } 786 787 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc, PetscBool v) 788 { 789 PC_Telescope red = (PC_Telescope)pc->data; 790 PetscFunctionBegin; 791 red->ignore_dm = v; 792 PetscFunctionReturn(PETSC_SUCCESS); 793 } 794 795 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc, PetscBool *v) 796 { 797 PC_Telescope red = (PC_Telescope)pc->data; 798 PetscFunctionBegin; 799 if (v) *v = red->use_coarse_dm; 800 PetscFunctionReturn(PETSC_SUCCESS); 801 } 802 803 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc, PetscBool v) 804 { 805 PC_Telescope red = (PC_Telescope)pc->data; 806 PetscFunctionBegin; 807 red->use_coarse_dm = v; 808 PetscFunctionReturn(PETSC_SUCCESS); 809 } 810 811 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool *v) 812 { 813 PC_Telescope red = (PC_Telescope)pc->data; 814 PetscFunctionBegin; 815 if (v) *v = red->ignore_kspcomputeoperators; 816 PetscFunctionReturn(PETSC_SUCCESS); 817 } 818 819 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool v) 820 { 821 PC_Telescope red = (PC_Telescope)pc->data; 822 PetscFunctionBegin; 823 red->ignore_kspcomputeoperators = v; 824 PetscFunctionReturn(PETSC_SUCCESS); 825 } 826 827 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc, DM *dm) 828 { 829 PC_Telescope red = (PC_Telescope)pc->data; 830 PetscFunctionBegin; 831 *dm = private_PCTelescopeGetSubDM(red); 832 PetscFunctionReturn(PETSC_SUCCESS); 833 } 834 835 /*@ 836 PCTelescopeGetKSP - Gets the `KSP` created by the telescoping `PC`. 837 838 Not Collective 839 840 Input Parameter: 841 . pc - the preconditioner context 842 843 Output Parameter: 844 . subksp - the `KSP` defined the smaller set of processes 845 846 Level: advanced 847 848 .seealso: `PCTELESCOPE` 849 @*/ 850 PetscErrorCode PCTelescopeGetKSP(PC pc, KSP *subksp) 851 { 852 PetscFunctionBegin; 853 PetscUseMethod(pc, "PCTelescopeGetKSP_C", (PC, KSP *), (pc, subksp)); 854 PetscFunctionReturn(PETSC_SUCCESS); 855 } 856 857 /*@ 858 PCTelescopeGetReductionFactor - Gets the factor by which the original number of MPI ranks has been reduced by. 859 860 Not Collective 861 862 Input Parameter: 863 . pc - the preconditioner context 864 865 Output Parameter: 866 . fact - the reduction factor 867 868 Level: advanced 869 870 .seealso: `PCTELESCOPE`, `PCTelescopeSetReductionFactor()` 871 @*/ 872 PetscErrorCode PCTelescopeGetReductionFactor(PC pc, PetscInt *fact) 873 { 874 PetscFunctionBegin; 875 PetscUseMethod(pc, "PCTelescopeGetReductionFactor_C", (PC, PetscInt *), (pc, fact)); 876 PetscFunctionReturn(PETSC_SUCCESS); 877 } 878 879 /*@ 880 PCTelescopeSetReductionFactor - Sets the factor by which the original number of MPI ranks will been reduced by. 881 882 Not Collective 883 884 Input Parameter: 885 . pc - the preconditioner context 886 887 Output Parameter: 888 . fact - the reduction factor 889 890 Level: advanced 891 892 .seealso: `PCTELESCOPE`, `PCTelescopeGetReductionFactor()` 893 @*/ 894 PetscErrorCode PCTelescopeSetReductionFactor(PC pc, PetscInt fact) 895 { 896 PetscFunctionBegin; 897 PetscTryMethod(pc, "PCTelescopeSetReductionFactor_C", (PC, PetscInt), (pc, fact)); 898 PetscFunctionReturn(PETSC_SUCCESS); 899 } 900 901 /*@ 902 PCTelescopeGetIgnoreDM - Get the flag indicating if any `DM` attached to the `PC` will be used. 903 904 Not Collective 905 906 Input Parameter: 907 . pc - the preconditioner context 908 909 Output Parameter: 910 . v - the flag 911 912 Level: advanced 913 914 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()` 915 @*/ 916 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc, PetscBool *v) 917 { 918 PetscFunctionBegin; 919 PetscUseMethod(pc, "PCTelescopeGetIgnoreDM_C", (PC, PetscBool *), (pc, v)); 920 PetscFunctionReturn(PETSC_SUCCESS); 921 } 922 923 /*@ 924 PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC. 925 926 Not Collective 927 928 Input Parameter: 929 . pc - the preconditioner context 930 931 Output Parameter: 932 . v - Use PETSC_TRUE to ignore any DM 933 934 Level: advanced 935 936 .seealso: `PCTELESCOPE`, `PCTelescopeGetIgnoreDM()` 937 @*/ 938 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc, PetscBool v) 939 { 940 PetscFunctionBegin; 941 PetscTryMethod(pc, "PCTelescopeSetIgnoreDM_C", (PC, PetscBool), (pc, v)); 942 PetscFunctionReturn(PETSC_SUCCESS); 943 } 944 945 /*@ 946 PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse `DM` attached to `DM` associated with the `PC` will be used. 947 948 Not Collective 949 950 Input Parameter: 951 . pc - the preconditioner context 952 953 Output Parameter: 954 . v - the flag 955 956 Level: advanced 957 958 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()` 959 @*/ 960 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc, PetscBool *v) 961 { 962 PetscFunctionBegin; 963 PetscUseMethod(pc, "PCTelescopeGetUseCoarseDM_C", (PC, PetscBool *), (pc, v)); 964 PetscFunctionReturn(PETSC_SUCCESS); 965 } 966 967 /*@ 968 PCTelescopeSetUseCoarseDM - Set a flag to query the `DM` attached to the `PC` if it also has a coarse `DM` 969 970 Not Collective 971 972 Input Parameter: 973 . pc - the preconditioner context 974 975 Output Parameter: 976 . v - Use `PETSC_FALSE` to ignore any coarse `DM` 977 978 Notes: 979 When you have specified to use a coarse `DM`, the communicator used to create the sub-KSP within `PCTELESCOPE` 980 will be that of the coarse `DM`. Hence the flags -pc_telescope_reduction_factor and 981 -pc_telescope_subcomm_type will no longer have any meaning. 982 It is required that the communicator associated with the parent (fine) and the coarse `DM` are of different sizes. 983 An error will occur of the size of the communicator associated with the coarse `DM` 984 is the same as that of the parent `DM`. 985 Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent. 986 This will be checked at the time the preconditioner is setup and an error will occur if 987 the coarse DM does not define a sub-communicator of that used by the parent DM. 988 989 The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of 990 the `DM` used (e.g. it supports `DMSHELL`, `DMPLEX`, etc). 991 992 Support is currently only provided for the case when you are using `KSPSetComputeOperators()` 993 994 The user is required to compose a function with the parent DM to facilitate the transfer of fields (`Vec`) between the different decompositions defined by the fine and coarse `DM`s. 995 In the user code, this is achieved via 996 .vb 997 { 998 DM dm_fine; 999 PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method); 1000 } 1001 .ve 1002 The signature of the user provided field scatter method is 1003 .vb 1004 PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse); 1005 .ve 1006 The user must provide support for both mode = `SCATTER_FORWARD` and mode = `SCATTER_REVERSE`. 1007 `SCATTER_FORWARD` implies the direction of transfer is from the parent (fine) `DM` to the coarse `DM`. 1008 1009 Optionally, the user may also compose a function with the parent DM to facilitate the transfer 1010 of state variables between the fine and coarse `DM`s. 1011 In the context of a finite element discretization, an example state variable might be 1012 values associated with quadrature points within each element. 1013 A user provided state scatter method is composed via 1014 .vb 1015 { 1016 DM dm_fine; 1017 PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method); 1018 } 1019 .ve 1020 The signature of the user provided state scatter method is 1021 .vb 1022 PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse); 1023 .ve 1024 `SCATTER_FORWARD` implies the direction of transfer is from the fine `DM` to the coarse `DM`. 1025 The user is only required to support mode = `SCATTER_FORWARD`. 1026 No assumption is made about the data type of the state variables. 1027 These must be managed by the user and must be accessible from the `DM`. 1028 1029 Care must be taken in defining the user context passed to `KSPSetComputeOperators()` which is to be 1030 associated with the sub-`KSP` residing within `PCTELESCOPE`. 1031 In general, `PCTELESCOPE` assumes that the context on the fine and coarse `DM` used with 1032 `KSPSetComputeOperators()` should be "similar" in type or origin. 1033 Specifically the following rules are used to infer what context on the sub-`KSP` should be. 1034 1035 First the contexts from the `KSP` and the fine and coarse `DM`s are retrieved. 1036 Note that the special case of a `DMSHELL` context is queried. 1037 1038 .vb 1039 DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx); 1040 DMGetApplicationContext(dm_fine,&dmfine_appctx); 1041 DMShellGetContext(dm_fine,&dmfine_shellctx); 1042 1043 DMGetApplicationContext(dm_coarse,&dmcoarse_appctx); 1044 DMShellGetContext(dm_coarse,&dmcoarse_shellctx); 1045 .ve 1046 1047 The following rules are then enforced: 1048 1049 1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP: 1050 `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,NULL); 1051 1052 2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx, 1053 1054 check that dmcoarse_appctx is also non-NULL. If this is true, then: 1055 `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,dmcoarse_appctx); 1056 1057 3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx, 1058 1059 check that dmcoarse_shellctx is also non-NULL. If this is true, then: 1060 `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx); 1061 1062 If neither of the above three tests passed, then `PCTELESCOPE` cannot safely determine what 1063 context should be provided to `KSPSetComputeOperators()` for use with the sub-`KSP`. 1064 In this case, an additional mechanism is provided via a composed function which will return 1065 the actual context to be used. To use this feature you must compose the "getter" function 1066 with the coarse `DM`, e.g. 1067 .vb 1068 { 1069 DM dm_coarse; 1070 PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter); 1071 } 1072 .ve 1073 The signature of the user provided method is 1074 .vb 1075 PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext); 1076 .ve 1077 1078 Level: advanced 1079 1080 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()` 1081 @*/ 1082 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc, PetscBool v) 1083 { 1084 PetscFunctionBegin; 1085 PetscTryMethod(pc, "PCTelescopeSetUseCoarseDM_C", (PC, PetscBool), (pc, v)); 1086 PetscFunctionReturn(PETSC_SUCCESS); 1087 } 1088 1089 /*@ 1090 PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if `KSPComputeOperators()` will be used. 1091 1092 Not Collective 1093 1094 Input Parameter: 1095 . pc - the preconditioner context 1096 1097 Output Parameter: 1098 . v - the flag 1099 1100 Level: advanced 1101 1102 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeSetIgnoreKSPComputeOperators()` 1103 @*/ 1104 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc, PetscBool *v) 1105 { 1106 PetscFunctionBegin; 1107 PetscUseMethod(pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", (PC, PetscBool *), (pc, v)); 1108 PetscFunctionReturn(PETSC_SUCCESS); 1109 } 1110 1111 /*@ 1112 PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore `KSPComputeOperators()`. 1113 1114 Not Collective 1115 1116 Input Parameter: 1117 . pc - the preconditioner context 1118 1119 Output Parameter: 1120 . v - Use `PETSC_TRUE` to ignore the method (if defined) set via `KSPSetComputeOperators()` on pc 1121 1122 Level: advanced 1123 1124 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()` 1125 @*/ 1126 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc, PetscBool v) 1127 { 1128 PetscFunctionBegin; 1129 PetscTryMethod(pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", (PC, PetscBool), (pc, v)); 1130 PetscFunctionReturn(PETSC_SUCCESS); 1131 } 1132 1133 /*@ 1134 PCTelescopeGetDM - Get the re-partitioned `DM` attached to the sub-`KSP`. 1135 1136 Not Collective 1137 1138 Input Parameter: 1139 . pc - the preconditioner context 1140 1141 Output Parameter: 1142 . subdm - The re-partitioned DM 1143 1144 Level: advanced 1145 1146 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()` 1147 @*/ 1148 PetscErrorCode PCTelescopeGetDM(PC pc, DM *subdm) 1149 { 1150 PetscFunctionBegin; 1151 PetscUseMethod(pc, "PCTelescopeGetDM_C", (PC, DM *), (pc, subdm)); 1152 PetscFunctionReturn(PETSC_SUCCESS); 1153 } 1154 1155 /*@ 1156 PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous) 1157 1158 Logically Collective 1159 1160 Input Parameters: 1161 + pc - the preconditioner context 1162 - subcommtype - the subcommunicator type (see `PetscSubcommType`) 1163 1164 Level: advanced 1165 1166 .seealso: `PetscSubcommType`, `PetscSubcomm`, `PCTELESCOPE` 1167 @*/ 1168 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype) 1169 { 1170 PetscFunctionBegin; 1171 PetscTryMethod(pc, "PCTelescopeSetSubcommType_C", (PC, PetscSubcommType), (pc, subcommtype)); 1172 PetscFunctionReturn(PETSC_SUCCESS); 1173 } 1174 1175 /*@ 1176 PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous) 1177 1178 Not Collective 1179 1180 Input Parameter: 1181 . pc - the preconditioner context 1182 1183 Output Parameter: 1184 . subcommtype - the subcommunicator type (see `PetscSubcommType`) 1185 1186 Level: advanced 1187 1188 .seealso: `PetscSubcomm`, `PetscSubcommType`, `PCTELESCOPE` 1189 @*/ 1190 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype) 1191 { 1192 PetscFunctionBegin; 1193 PetscUseMethod(pc, "PCTelescopeGetSubcommType_C", (PC, PetscSubcommType *), (pc, subcommtype)); 1194 PetscFunctionReturn(PETSC_SUCCESS); 1195 } 1196 1197 /*MC 1198 PCTELESCOPE - Runs a `KSP` solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve. 1199 1200 Options Database Keys: 1201 + -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks. 1202 . -pc_telescope_ignore_dm - flag to indicate whether an attached DM should be ignored. 1203 . -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information. 1204 . -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether `KSPSetComputeOperators()` should be used on the sub-KSP. 1205 - -pc_telescope_use_coarse_dm - flag to indicate whether the coarse `DM` should be used to define the sub-communicator. 1206 1207 Level: advanced 1208 1209 Notes: 1210 Assuming that the parent preconditioner `PC` is defined on a communicator c, this implementation 1211 creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner `PC`. 1212 The preconditioner is deemed telescopic as it only calls `KSPSolve()` on a single 1213 sub-communicator, in contrast with `PCREDUNDANT` which calls `KSPSolve()` on N sub-communicators. 1214 This means there will be MPI ranks which will be idle during the application of this preconditioner. 1215 Additionally, in comparison with `PCREDUNDANT`, `PCTELESCOPE` can utilize an attached `DM`. 1216 1217 The default type of the sub `KSP` (the `KSP` defined on c') is `KSPPREONLY`. 1218 1219 There are three setup mechanisms for `PCTELESCOPE`. Features support by each type are described below. 1220 In the following, we will refer to the operators B and B', these are the Bmat provided to the `KSP` on the 1221 communicators c and c' respectively. 1222 1223 [1] Default setup 1224 The sub-communicator c' is created via `PetscSubcommCreate()`. 1225 Explicitly defined nullspace and near nullspace vectors will be propagated from B to B'. 1226 Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`). 1227 No support is provided for `KSPSetComputeOperators()`. 1228 Currently there is no support for the flag -pc_use_amat. 1229 1230 [2] `DM` aware setup 1231 If a `DM` is attached to the `PC`, it is re-partitioned on the sub-communicator c'. 1232 c' is created via `PetscSubcommCreate()`. 1233 Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned `DM`. 1234 Currently only support for re-partitioning a `DMDA` is provided. 1235 Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B'). 1236 Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`). 1237 Support is provided for `KSPSetComputeOperators()`. The user provided function and context is propagated to the sub `KSP`. 1238 This is fragile since the user must ensure that their user context is valid for use on c'. 1239 Currently there is no support for the flag -pc_use_amat. 1240 1241 [3] Coarse `DM` setup 1242 If a `DM` (dmfine) is attached to the `PC`, dmfine is queried for a "coarse" `DM` (call this dmcoarse) via `DMGetCoarseDM()`. 1243 `PCTELESCOPE` will interpret the coarse `DM` as being defined on a sub-communicator of c. 1244 The communicator associated with dmcoarse will define the c' to be used within `PCTELESCOPE`. 1245 `PCTELESCOPE` will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported. 1246 The intention of this setup type is that `PCTELESCOPE` will use an existing (e.g. user defined) communicator hierarchy, say as would be 1247 available with using multi-grid on unstructured meshes. 1248 This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type. 1249 Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'. 1250 Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`). 1251 There is no general method to permute field orderings, hence only `KSPSetComputeOperators()` is supported. 1252 The user must use `PetscObjectComposeFunction()` with dmfine to define the method to scatter fields from dmfine to dmcoarse. 1253 Propagation of the user context for `KSPSetComputeOperators()` on the sub `KSP` is attempted by querying the `DM` contexts associated with dmfine and dmcoarse. Alternatively, the user may use `PetscObjectComposeFunction()` with dmcoarse to define a method which will return the appropriate user context for `KSPSetComputeOperators()`. 1254 Currently there is no support for the flag -pc_use_amat. 1255 This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling `PCTelescopeSetUseCoarseDM`(pc,`PETSC_TRUE`); 1256 Further information about the user-provided methods required by this setup type are described here `PCTelescopeSetUseCoarseDM()`. 1257 1258 Developer Notes: 1259 During `PCSetup()`, the B operator is scattered onto c'. 1260 Within `PCApply()`, the RHS vector (x) is scattered into a redundant vector, xred (defined on c'). 1261 Then, `KSPSolve()` is executed on the c' communicator. 1262 1263 The communicator used within the telescoping preconditioner is defined by a `PetscSubcomm` using the INTERLACED 1264 creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub `KSP` on only the ranks within the communicator which have a color equal to zero. 1265 1266 The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator. 1267 In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into 1268 a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c') 1269 1270 The telescoping preconditioner can re-partition an attached DM if it is a `DMDA` (2D or 3D - 1271 support for 1D `DMDA`s is not provided). If a `DMDA` is found, a topologically equivalent `DMDA` is created on c' 1272 and this new `DM` is attached the sub `KSP`. The design of telescope is such that it should be possible to extend support 1273 for re-partitioning other to DM's (e.g. `DMPLEX`). The user can supply a flag to ignore attached DMs. 1274 Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm. 1275 1276 With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'. 1277 1278 When a `DMDA` is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B 1279 into the ordering defined by the `DMDA` on c', (ii) extracting the local chunks via `MatCreateSubMatrices()`, (iii) fusing the 1280 locally (sequential) matrices defined on the ranks common to c and c' into B' using `MatCreateMPIMatConcatenateSeqMat()` 1281 1282 Limitations/improvements include the following. 1283 `VecPlaceArray()` could be used within `PCApply()` to improve efficiency and reduce memory usage. 1284 A unified mechanism to query for user contexts as required by `KSPSetComputeOperators()` and `MatNullSpaceSetFunction()`. 1285 1286 The symmetric permutation used when a `DMDA` is encountered is performed via explicitly assembling a permutation matrix P, 1287 and performing P^T.A.P. Possibly it might be more efficient to use `MatPermute()`. We opted to use P^T.A.P as it appears 1288 `VecPermute()` does not support the use case required here. By computing P, one can permute both the operator and RHS in a 1289 consistent manner. 1290 1291 Mapping of vectors (default setup mode) is performed in the following way. 1292 Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2. 1293 Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2. 1294 We perform the scatter to the sub-communicator in the following way. 1295 [1] Given a vector x defined on communicator c 1296 1297 .vb 1298 rank(c) local values of x 1299 ------- ---------------------------------------- 1300 0 [ 0.0, 1.0, 2.0, 3.0, 4.0, 5.0 ] 1301 1 [ 6.0, 7.0, 8.0, 9.0, 10.0, 11.0 ] 1302 2 [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ] 1303 3 [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ] 1304 .ve 1305 1306 scatter into xtmp defined also on comm c, so that we have the following values 1307 1308 .vb 1309 rank(c) local values of xtmp 1310 ------- ---------------------------------------------------------------------------- 1311 0 [ 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0 ] 1312 1 [ ] 1313 2 [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ] 1314 3 [ ] 1315 .ve 1316 1317 The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values 1318 1319 [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'. 1320 Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0. 1321 1322 .vb 1323 rank(c') local values of xred 1324 -------- ---------------------------------------------------------------------------- 1325 0 [ 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0 ] 1326 1 [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ] 1327 .ve 1328 1329 Contributed by Dave May 1330 1331 Reference: 1332 Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913 1333 1334 .seealso: `PCTelescopeGetKSP()`, `PCTelescopeGetDM()`, `PCTelescopeGetReductionFactor()`, `PCTelescopeSetReductionFactor()`, `PCTelescopeGetIgnoreDM()`, `PCTelescopeSetIgnoreDM()`, `PCREDUNDANT` 1335 M*/ 1336 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc) 1337 { 1338 struct _PC_Telescope *sred; 1339 1340 PetscFunctionBegin; 1341 PetscCall(PetscNew(&sred)); 1342 sred->psubcomm = NULL; 1343 sred->subcommtype = PETSC_SUBCOMM_INTERLACED; 1344 sred->subcomm = MPI_COMM_NULL; 1345 sred->redfactor = 1; 1346 sred->ignore_dm = PETSC_FALSE; 1347 sred->ignore_kspcomputeoperators = PETSC_FALSE; 1348 sred->use_coarse_dm = PETSC_FALSE; 1349 pc->data = (void *)sred; 1350 1351 pc->ops->apply = PCApply_Telescope; 1352 pc->ops->applytranspose = NULL; 1353 pc->ops->applyrichardson = PCApplyRichardson_Telescope; 1354 pc->ops->setup = PCSetUp_Telescope; 1355 pc->ops->destroy = PCDestroy_Telescope; 1356 pc->ops->reset = PCReset_Telescope; 1357 pc->ops->setfromoptions = PCSetFromOptions_Telescope; 1358 pc->ops->view = PCView_Telescope; 1359 1360 sred->pctelescope_setup_type = PCTelescopeSetUp_default; 1361 sred->pctelescope_matcreate_type = PCTelescopeMatCreate_default; 1362 sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default; 1363 sred->pctelescope_reset_type = NULL; 1364 1365 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", PCTelescopeGetKSP_Telescope)); 1366 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", PCTelescopeGetSubcommType_Telescope)); 1367 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", PCTelescopeSetSubcommType_Telescope)); 1368 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", PCTelescopeGetReductionFactor_Telescope)); 1369 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", PCTelescopeSetReductionFactor_Telescope)); 1370 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", PCTelescopeGetIgnoreDM_Telescope)); 1371 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", PCTelescopeSetIgnoreDM_Telescope)); 1372 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", PCTelescopeGetIgnoreKSPComputeOperators_Telescope)); 1373 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", PCTelescopeSetIgnoreKSPComputeOperators_Telescope)); 1374 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", PCTelescopeGetDM_Telescope)); 1375 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", PCTelescopeGetUseCoarseDM_Telescope)); 1376 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", PCTelescopeSetUseCoarseDM_Telescope)); 1377 PetscFunctionReturn(PETSC_SUCCESS); 1378 } 1379