1 2 #include <petsc/private/petscimpl.h> 3 #include <petsc/private/matimpl.h> 4 #include <petsc/private/pcimpl.h> 5 #include <petscksp.h> /*I "petscksp.h" I*/ 6 #include <petscdm.h> /*I "petscdm.h" I*/ 7 #include "../src/ksp/pc/impls/telescope/telescope.h" 8 9 static PetscBool cited = PETSC_FALSE; 10 static const char citation[] = 11 "@inproceedings{MaySananRuppKnepleySmith2016,\n" 12 " title = {Extreme-Scale Multigrid Components within PETSc},\n" 13 " author = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n" 14 " booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n" 15 " series = {PASC '16},\n" 16 " isbn = {978-1-4503-4126-4},\n" 17 " location = {Lausanne, Switzerland},\n" 18 " pages = {5:1--5:12},\n" 19 " articleno = {5},\n" 20 " numpages = {12},\n" 21 " url = {https://doi.acm.org/10.1145/2929908.2929913},\n" 22 " doi = {10.1145/2929908.2929913},\n" 23 " acmid = {2929913},\n" 24 " publisher = {ACM},\n" 25 " address = {New York, NY, USA},\n" 26 " keywords = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n" 27 " year = {2016}\n" 28 "}\n"; 29 30 /* 31 default setup mode 32 33 [1a] scatter to (FORWARD) 34 x(comm) -> xtmp(comm) 35 [1b] local copy (to) ranks with color = 0 36 xred(subcomm) <- xtmp 37 38 [2] solve on sub KSP to obtain yred(subcomm) 39 40 [3a] local copy (from) ranks with color = 0 41 yred(subcomm) --> xtmp 42 [2b] scatter from (REVERSE) 43 xtmp(comm) -> y(comm) 44 */ 45 46 /* 47 Collective[comm_f] 48 Notes 49 * Using comm_f = MPI_COMM_NULL will result in an error 50 * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid. 51 * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid. 52 */ 53 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool *isvalid) 54 { 55 PetscInt valid = 1; 56 MPI_Group group_f,group_c; 57 PetscErrorCode ierr; 58 PetscMPIInt count,k,size_f = 0,size_c = 0,size_c_sum = 0; 59 int *ranks_f = NULL,*ranks_c = NULL; 60 61 PetscFunctionBegin; 62 if (comm_f == MPI_COMM_NULL) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"comm_f cannot be MPI_COMM_NULL"); 63 64 ierr = MPI_Comm_group(comm_f,&group_f);CHKERRQ(ierr); 65 if (comm_c != MPI_COMM_NULL) { 66 ierr = MPI_Comm_group(comm_c,&group_c);CHKERRQ(ierr); 67 } 68 69 ierr = MPI_Comm_size(comm_f,&size_f);CHKERRQ(ierr); 70 if (comm_c != MPI_COMM_NULL) { 71 ierr = MPI_Comm_size(comm_c,&size_c);CHKERRQ(ierr); 72 } 73 74 /* check not all comm_c's are NULL */ 75 size_c_sum = size_c; 76 ierr = MPI_Allreduce(MPI_IN_PLACE,&size_c_sum,1,MPI_INT,MPI_SUM,comm_f);CHKERRQ(ierr); 77 if (size_c_sum == 0) { 78 valid = 0; 79 } 80 81 /* check we can map at least 1 rank in comm_c to comm_f */ 82 ierr = PetscMalloc1(size_f,&ranks_f);CHKERRQ(ierr); 83 ierr = PetscMalloc1(size_c,&ranks_c);CHKERRQ(ierr); 84 for (k=0; k<size_f; k++) { 85 ranks_f[k] = MPI_UNDEFINED; 86 } 87 for (k=0; k<size_c; k++) { 88 ranks_c[k] = (int)k; 89 } 90 91 /* 92 MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated. 93 I do not want the code to terminate immediately if this occurs, rather I want to throw 94 the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating 95 that comm_c is not a valid sub-communicator. 96 Hence I purposefully do not call CHKERRQ() after MPI_Group_translate_ranks(). 97 */ 98 count = 0; 99 if (comm_c != MPI_COMM_NULL) { 100 (void)MPI_Group_translate_ranks(group_c,size_c,ranks_c,group_f,ranks_f); 101 for (k=0; k<size_f; k++) { 102 if (ranks_f[k] == MPI_UNDEFINED) { 103 count++; 104 } 105 } 106 } 107 if (count == size_f) { 108 valid = 0; 109 } 110 111 ierr = MPI_Allreduce(MPI_IN_PLACE,&valid,1,MPIU_INT,MPI_MIN,comm_f);CHKERRQ(ierr); 112 if (valid == 1) { *isvalid = PETSC_TRUE; } 113 else { *isvalid = PETSC_FALSE; } 114 115 ierr = PetscFree(ranks_f);CHKERRQ(ierr); 116 ierr = PetscFree(ranks_c);CHKERRQ(ierr); 117 ierr = MPI_Group_free(&group_f);CHKERRQ(ierr); 118 if (comm_c != MPI_COMM_NULL) { 119 ierr = MPI_Group_free(&group_c);CHKERRQ(ierr); 120 } 121 PetscFunctionReturn(0); 122 } 123 124 DM private_PCTelescopeGetSubDM(PC_Telescope sred) 125 { 126 DM subdm = NULL; 127 128 if (!PCTelescope_isActiveRank(sred)) { subdm = NULL; } 129 else { 130 switch (sred->sr_type) { 131 case TELESCOPE_DEFAULT: subdm = NULL; 132 break; 133 case TELESCOPE_DMDA: subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart; 134 break; 135 case TELESCOPE_DMPLEX: subdm = NULL; 136 break; 137 case TELESCOPE_COARSEDM: if (sred->ksp) { KSPGetDM(sred->ksp,&subdm); } 138 break; 139 } 140 } 141 return(subdm); 142 } 143 144 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred) 145 { 146 PetscErrorCode ierr; 147 PetscInt m,M,bs,st,ed; 148 Vec x,xred,yred,xtmp; 149 Mat B; 150 MPI_Comm comm,subcomm; 151 VecScatter scatter; 152 IS isin; 153 154 PetscFunctionBegin; 155 ierr = PetscInfo(pc,"PCTelescope: setup (default)\n");CHKERRQ(ierr); 156 comm = PetscSubcommParent(sred->psubcomm); 157 subcomm = PetscSubcommChild(sred->psubcomm); 158 159 ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr); 160 ierr = MatGetSize(B,&M,NULL);CHKERRQ(ierr); 161 ierr = MatGetBlockSize(B,&bs);CHKERRQ(ierr); 162 ierr = MatCreateVecs(B,&x,NULL);CHKERRQ(ierr); 163 164 xred = NULL; 165 m = 0; 166 if (PCTelescope_isActiveRank(sred)) { 167 ierr = VecCreate(subcomm,&xred);CHKERRQ(ierr); 168 ierr = VecSetSizes(xred,PETSC_DECIDE,M);CHKERRQ(ierr); 169 ierr = VecSetBlockSize(xred,bs);CHKERRQ(ierr); 170 ierr = VecSetFromOptions(xred);CHKERRQ(ierr); 171 ierr = VecGetLocalSize(xred,&m);CHKERRQ(ierr); 172 } 173 174 yred = NULL; 175 if (PCTelescope_isActiveRank(sred)) { 176 ierr = VecDuplicate(xred,&yred);CHKERRQ(ierr); 177 } 178 179 ierr = VecCreate(comm,&xtmp);CHKERRQ(ierr); 180 ierr = VecSetSizes(xtmp,m,PETSC_DECIDE);CHKERRQ(ierr); 181 ierr = VecSetBlockSize(xtmp,bs);CHKERRQ(ierr); 182 ierr = VecSetType(xtmp,((PetscObject)x)->type_name);CHKERRQ(ierr); 183 184 if (PCTelescope_isActiveRank(sred)) { 185 ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr); 186 ierr = ISCreateStride(comm,(ed-st),st,1,&isin);CHKERRQ(ierr); 187 } else { 188 ierr = VecGetOwnershipRange(x,&st,&ed);CHKERRQ(ierr); 189 ierr = ISCreateStride(comm,0,st,1,&isin);CHKERRQ(ierr); 190 } 191 ierr = ISSetBlockSize(isin,bs);CHKERRQ(ierr); 192 193 ierr = VecScatterCreate(x,isin,xtmp,NULL,&scatter);CHKERRQ(ierr); 194 195 sred->isin = isin; 196 sred->scatter = scatter; 197 sred->xred = xred; 198 sred->yred = yred; 199 sred->xtmp = xtmp; 200 ierr = VecDestroy(&x);CHKERRQ(ierr); 201 PetscFunctionReturn(0); 202 } 203 204 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A) 205 { 206 PetscErrorCode ierr; 207 MPI_Comm comm,subcomm; 208 Mat Bred,B; 209 PetscInt nr,nc; 210 IS isrow,iscol; 211 Mat Blocal,*_Blocal; 212 213 PetscFunctionBegin; 214 ierr = PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");CHKERRQ(ierr); 215 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 216 subcomm = PetscSubcommChild(sred->psubcomm); 217 ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr); 218 ierr = MatGetSize(B,&nr,&nc);CHKERRQ(ierr); 219 isrow = sred->isin; 220 ierr = ISCreateStride(comm,nc,0,1,&iscol);CHKERRQ(ierr); 221 ierr = MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);CHKERRQ(ierr); 222 Blocal = *_Blocal; 223 ierr = PetscFree(_Blocal);CHKERRQ(ierr); 224 Bred = NULL; 225 if (PCTelescope_isActiveRank(sred)) { 226 PetscInt mm; 227 228 if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; } 229 230 ierr = MatGetSize(Blocal,&mm,NULL);CHKERRQ(ierr); 231 ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);CHKERRQ(ierr); 232 } 233 *A = Bred; 234 ierr = ISDestroy(&iscol);CHKERRQ(ierr); 235 ierr = MatDestroy(&Blocal);CHKERRQ(ierr); 236 PetscFunctionReturn(0); 237 } 238 239 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace) 240 { 241 PetscErrorCode ierr; 242 PetscBool has_const; 243 const Vec *vecs; 244 Vec *sub_vecs = NULL; 245 PetscInt i,k,n = 0; 246 MPI_Comm subcomm; 247 248 PetscFunctionBegin; 249 subcomm = PetscSubcommChild(sred->psubcomm); 250 ierr = MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);CHKERRQ(ierr); 251 252 if (PCTelescope_isActiveRank(sred)) { 253 if (n) { 254 ierr = VecDuplicateVecs(sred->xred,n,&sub_vecs);CHKERRQ(ierr); 255 } 256 } 257 258 /* copy entries */ 259 for (k=0; k<n; k++) { 260 const PetscScalar *x_array; 261 PetscScalar *LA_sub_vec; 262 PetscInt st,ed; 263 264 /* pull in vector x->xtmp */ 265 ierr = VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 266 ierr = VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 267 if (sub_vecs) { 268 /* copy vector entries into xred */ 269 ierr = VecGetArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr); 270 if (sub_vecs[k]) { 271 ierr = VecGetOwnershipRange(sub_vecs[k],&st,&ed);CHKERRQ(ierr); 272 ierr = VecGetArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr); 273 for (i=0; i<ed-st; i++) { 274 LA_sub_vec[i] = x_array[i]; 275 } 276 ierr = VecRestoreArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr); 277 } 278 ierr = VecRestoreArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr); 279 } 280 } 281 282 if (PCTelescope_isActiveRank(sred)) { 283 /* create new (near) nullspace for redundant object */ 284 ierr = MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);CHKERRQ(ierr); 285 ierr = VecDestroyVecs(n,&sub_vecs);CHKERRQ(ierr); 286 if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope"); 287 if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope"); 288 } 289 PetscFunctionReturn(0); 290 } 291 292 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat) 293 { 294 PetscErrorCode ierr; 295 Mat B; 296 297 PetscFunctionBegin; 298 ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr); 299 /* Propagate the nullspace if it exists */ 300 { 301 MatNullSpace nullspace,sub_nullspace; 302 ierr = MatGetNullSpace(B,&nullspace);CHKERRQ(ierr); 303 if (nullspace) { 304 ierr = PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");CHKERRQ(ierr); 305 ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace);CHKERRQ(ierr); 306 if (PCTelescope_isActiveRank(sred)) { 307 ierr = MatSetNullSpace(sub_mat,sub_nullspace);CHKERRQ(ierr); 308 ierr = MatNullSpaceDestroy(&sub_nullspace);CHKERRQ(ierr); 309 } 310 } 311 } 312 /* Propagate the near nullspace if it exists */ 313 { 314 MatNullSpace nearnullspace,sub_nearnullspace; 315 ierr = MatGetNearNullSpace(B,&nearnullspace);CHKERRQ(ierr); 316 if (nearnullspace) { 317 ierr = PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n");CHKERRQ(ierr); 318 ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);CHKERRQ(ierr); 319 if (PCTelescope_isActiveRank(sred)) { 320 ierr = MatSetNearNullSpace(sub_mat,sub_nearnullspace);CHKERRQ(ierr); 321 ierr = MatNullSpaceDestroy(&sub_nearnullspace);CHKERRQ(ierr); 322 } 323 } 324 } 325 PetscFunctionReturn(0); 326 } 327 328 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer) 329 { 330 PC_Telescope sred = (PC_Telescope)pc->data; 331 PetscErrorCode ierr; 332 PetscBool iascii,isstring; 333 PetscViewer subviewer; 334 335 PetscFunctionBegin; 336 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 337 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 338 if (iascii) { 339 { 340 MPI_Comm comm,subcomm; 341 PetscMPIInt comm_size,subcomm_size; 342 DM dm = NULL,subdm = NULL; 343 344 ierr = PCGetDM(pc,&dm);CHKERRQ(ierr); 345 subdm = private_PCTelescopeGetSubDM(sred); 346 347 if (sred->psubcomm) { 348 comm = PetscSubcommParent(sred->psubcomm); 349 subcomm = PetscSubcommChild(sred->psubcomm); 350 ierr = MPI_Comm_size(comm,&comm_size);CHKERRQ(ierr); 351 ierr = MPI_Comm_size(subcomm,&subcomm_size);CHKERRQ(ierr); 352 353 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 354 ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent comm size reduction factor = %D\n",sred->redfactor);CHKERRQ(ierr); 355 ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);CHKERRQ(ierr); 356 switch (sred->subcommtype) { 357 case PETSC_SUBCOMM_INTERLACED : 358 ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: type = interlaced\n",sred->subcommtype);CHKERRQ(ierr); 359 break; 360 case PETSC_SUBCOMM_CONTIGUOUS : 361 ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm type = contiguous\n",sred->subcommtype);CHKERRQ(ierr); 362 break; 363 default : 364 SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope"); 365 } 366 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 367 } else { 368 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 369 subcomm = sred->subcomm; 370 if (!PCTelescope_isActiveRank(sred)) { 371 subcomm = PETSC_COMM_SELF; 372 } 373 374 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 375 ierr = PetscViewerASCIIPrintf(viewer,"subcomm: using user provided sub-communicator\n");CHKERRQ(ierr); 376 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 377 } 378 379 ierr = PetscViewerGetSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr); 380 if (PCTelescope_isActiveRank(sred)) { 381 ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr); 382 383 if (dm && sred->ignore_dm) { 384 ierr = PetscViewerASCIIPrintf(subviewer,"ignoring DM\n");CHKERRQ(ierr); 385 } 386 if (sred->ignore_kspcomputeoperators) { 387 ierr = PetscViewerASCIIPrintf(subviewer,"ignoring KSPComputeOperators\n");CHKERRQ(ierr); 388 } 389 switch (sred->sr_type) { 390 case TELESCOPE_DEFAULT: 391 ierr = PetscViewerASCIIPrintf(subviewer,"setup type: default\n");CHKERRQ(ierr); 392 break; 393 case TELESCOPE_DMDA: 394 ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMDA auto-repartitioning\n");CHKERRQ(ierr); 395 ierr = DMView_DA_Short(subdm,subviewer);CHKERRQ(ierr); 396 break; 397 case TELESCOPE_DMPLEX: 398 ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMPLEX auto-repartitioning\n");CHKERRQ(ierr); 399 break; 400 case TELESCOPE_COARSEDM: 401 ierr = PetscViewerASCIIPrintf(subviewer,"setup type: coarse DM\n");CHKERRQ(ierr); 402 break; 403 } 404 405 if (dm) { 406 PetscObject obj = (PetscObject)dm; 407 ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object:");CHKERRQ(ierr); 408 PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE); 409 if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); } 410 if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); } 411 if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); } 412 ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr); 413 PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE); 414 } else { 415 ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object: NULL\n");CHKERRQ(ierr); 416 } 417 if (subdm) { 418 PetscObject obj = (PetscObject)subdm; 419 ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object:");CHKERRQ(ierr); 420 PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE); 421 if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); } 422 if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); } 423 if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); } 424 ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr); 425 PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE); 426 } else { 427 ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object: NULL\n");CHKERRQ(ierr); 428 } 429 430 ierr = KSPView(sred->ksp,subviewer);CHKERRQ(ierr); 431 ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr); 432 } 433 ierr = PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr); 434 } 435 } 436 PetscFunctionReturn(0); 437 } 438 439 static PetscErrorCode PCSetUp_Telescope(PC pc) 440 { 441 PC_Telescope sred = (PC_Telescope)pc->data; 442 PetscErrorCode ierr; 443 MPI_Comm comm,subcomm=0; 444 PCTelescopeType sr_type; 445 446 PetscFunctionBegin; 447 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 448 449 /* Determine type of setup/update */ 450 if (!pc->setupcalled) { 451 PetscBool has_dm,same; 452 DM dm; 453 454 sr_type = TELESCOPE_DEFAULT; 455 has_dm = PETSC_FALSE; 456 ierr = PCGetDM(pc,&dm);CHKERRQ(ierr); 457 if (dm) { has_dm = PETSC_TRUE; } 458 if (has_dm) { 459 /* check for dmda */ 460 ierr = PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);CHKERRQ(ierr); 461 if (same) { 462 ierr = PetscInfo(pc,"PCTelescope: found DMDA\n");CHKERRQ(ierr); 463 sr_type = TELESCOPE_DMDA; 464 } 465 /* check for dmplex */ 466 ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);CHKERRQ(ierr); 467 if (same) { 468 ierr = PetscInfo(pc,"PCTelescope: found DMPLEX\n");CHKERRQ(ierr); 469 sr_type = TELESCOPE_DMPLEX; 470 } 471 472 if (sred->use_coarse_dm) { 473 ierr = PetscInfo(pc,"PCTelescope: using coarse DM\n");CHKERRQ(ierr); 474 sr_type = TELESCOPE_COARSEDM; 475 } 476 477 if (sred->ignore_dm) { 478 ierr = PetscInfo(pc,"PCTelescope: ignoring DM\n");CHKERRQ(ierr); 479 sr_type = TELESCOPE_DEFAULT; 480 } 481 } 482 sred->sr_type = sr_type; 483 } else { 484 sr_type = sred->sr_type; 485 } 486 487 /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */ 488 switch (sr_type) { 489 case TELESCOPE_DEFAULT: 490 sred->pctelescope_setup_type = PCTelescopeSetUp_default; 491 sred->pctelescope_matcreate_type = PCTelescopeMatCreate_default; 492 sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default; 493 sred->pctelescope_reset_type = NULL; 494 break; 495 case TELESCOPE_DMDA: 496 pc->ops->apply = PCApply_Telescope_dmda; 497 pc->ops->applyrichardson = PCApplyRichardson_Telescope_dmda; 498 sred->pctelescope_setup_type = PCTelescopeSetUp_dmda; 499 sred->pctelescope_matcreate_type = PCTelescopeMatCreate_dmda; 500 sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda; 501 sred->pctelescope_reset_type = PCReset_Telescope_dmda; 502 break; 503 case TELESCOPE_DMPLEX: SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available"); 504 break; 505 case TELESCOPE_COARSEDM: 506 pc->ops->apply = PCApply_Telescope_CoarseDM; 507 pc->ops->applyrichardson = PCApplyRichardson_Telescope_CoarseDM; 508 sred->pctelescope_setup_type = PCTelescopeSetUp_CoarseDM; 509 sred->pctelescope_matcreate_type = NULL; 510 sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */ 511 sred->pctelescope_reset_type = PCReset_Telescope_CoarseDM; 512 break; 513 default: SETERRQ(comm,PETSC_ERR_SUP,"Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM"); 514 break; 515 } 516 517 /* subcomm definition */ 518 if (!pc->setupcalled) { 519 if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) { 520 if (!sred->psubcomm) { 521 ierr = PetscSubcommCreate(comm,&sred->psubcomm);CHKERRQ(ierr); 522 ierr = PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);CHKERRQ(ierr); 523 ierr = PetscSubcommSetType(sred->psubcomm,sred->subcommtype);CHKERRQ(ierr); 524 ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr); 525 sred->subcomm = PetscSubcommChild(sred->psubcomm); 526 } 527 } else { /* query PC for DM, check communicators */ 528 DM dm,dm_coarse_partition = NULL; 529 MPI_Comm comm_fine,comm_coarse_partition = MPI_COMM_NULL; 530 PetscMPIInt csize_fine=0,csize_coarse_partition=0,cs[2],csg[2],cnt=0; 531 PetscBool isvalidsubcomm; 532 533 ierr = PCGetDM(pc,&dm);CHKERRQ(ierr); 534 comm_fine = PetscObjectComm((PetscObject)dm); 535 ierr = DMGetCoarseDM(dm,&dm_coarse_partition);CHKERRQ(ierr); 536 if (dm_coarse_partition) { cnt = 1; } 537 ierr = MPI_Allreduce(MPI_IN_PLACE,&cnt,1,MPI_INT,MPI_SUM,comm_fine);CHKERRQ(ierr); 538 if (cnt == 0) SETERRQ(comm_fine,PETSC_ERR_SUP,"Zero instances of a coarse DM were found"); 539 540 ierr = MPI_Comm_size(comm_fine,&csize_fine);CHKERRQ(ierr); 541 if (dm_coarse_partition) { 542 comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition); 543 ierr = MPI_Comm_size(comm_coarse_partition,&csize_coarse_partition);CHKERRQ(ierr); 544 } 545 546 cs[0] = csize_fine; 547 cs[1] = csize_coarse_partition; 548 ierr = MPI_Allreduce(cs,csg,2,MPI_INT,MPI_MAX,comm_fine);CHKERRQ(ierr); 549 if (csg[0] == csg[1]) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM uses the same size communicator as the parent DM attached to the PC"); 550 551 ierr = PCTelescopeTestValidSubcomm(comm_fine,comm_coarse_partition,&isvalidsubcomm);CHKERRQ(ierr); 552 if (!isvalidsubcomm) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM communicator is not a sub-communicator of parentDM->comm"); 553 sred->subcomm = comm_coarse_partition; 554 } 555 } 556 subcomm = sred->subcomm; 557 558 /* internal KSP */ 559 if (!pc->setupcalled) { 560 const char *prefix; 561 562 if (PCTelescope_isActiveRank(sred)) { 563 ierr = KSPCreate(subcomm,&sred->ksp);CHKERRQ(ierr); 564 ierr = KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);CHKERRQ(ierr); 565 ierr = PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);CHKERRQ(ierr); 566 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);CHKERRQ(ierr); 567 ierr = KSPSetType(sred->ksp,KSPPREONLY);CHKERRQ(ierr); 568 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 569 ierr = KSPSetOptionsPrefix(sred->ksp,prefix);CHKERRQ(ierr); 570 ierr = KSPAppendOptionsPrefix(sred->ksp,"telescope_");CHKERRQ(ierr); 571 } 572 } 573 574 /* setup */ 575 if (!pc->setupcalled && sred->pctelescope_setup_type) { 576 ierr = sred->pctelescope_setup_type(pc,sred);CHKERRQ(ierr); 577 } 578 /* update */ 579 if (!pc->setupcalled) { 580 if (sred->pctelescope_matcreate_type) { 581 ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);CHKERRQ(ierr); 582 } 583 if (sred->pctelescope_matnullspacecreate_type) { 584 ierr = sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);CHKERRQ(ierr); 585 } 586 } else { 587 if (sred->pctelescope_matcreate_type) { 588 ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);CHKERRQ(ierr); 589 } 590 } 591 592 /* common - no construction */ 593 if (PCTelescope_isActiveRank(sred)) { 594 ierr = KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);CHKERRQ(ierr); 595 if (pc->setfromoptionscalled && !pc->setupcalled){ 596 ierr = KSPSetFromOptions(sred->ksp);CHKERRQ(ierr); 597 } 598 } 599 PetscFunctionReturn(0); 600 } 601 602 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y) 603 { 604 PC_Telescope sred = (PC_Telescope)pc->data; 605 PetscErrorCode ierr; 606 Vec xtmp,xred,yred; 607 PetscInt i,st,ed; 608 VecScatter scatter; 609 PetscScalar *array; 610 const PetscScalar *x_array; 611 612 PetscFunctionBegin; 613 ierr = PetscCitationsRegister(citation,&cited);CHKERRQ(ierr); 614 615 xtmp = sred->xtmp; 616 scatter = sred->scatter; 617 xred = sred->xred; 618 yred = sred->yred; 619 620 /* pull in vector x->xtmp */ 621 ierr = VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 622 ierr = VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 623 624 /* copy vector entries into xred */ 625 ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr); 626 if (xred) { 627 PetscScalar *LA_xred; 628 ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr); 629 ierr = VecGetArray(xred,&LA_xred);CHKERRQ(ierr); 630 for (i=0; i<ed-st; i++) { 631 LA_xred[i] = x_array[i]; 632 } 633 ierr = VecRestoreArray(xred,&LA_xred);CHKERRQ(ierr); 634 } 635 ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr); 636 /* solve */ 637 if (PCTelescope_isActiveRank(sred)) { 638 ierr = KSPSolve(sred->ksp,xred,yred);CHKERRQ(ierr); 639 ierr = KSPCheckSolve(sred->ksp,pc,yred);CHKERRQ(ierr); 640 } 641 /* return vector */ 642 ierr = VecGetArray(xtmp,&array);CHKERRQ(ierr); 643 if (yred) { 644 const PetscScalar *LA_yred; 645 ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr); 646 ierr = VecGetArrayRead(yred,&LA_yred);CHKERRQ(ierr); 647 for (i=0; i<ed-st; i++) { 648 array[i] = LA_yred[i]; 649 } 650 ierr = VecRestoreArrayRead(yred,&LA_yred);CHKERRQ(ierr); 651 } 652 ierr = VecRestoreArray(xtmp,&array);CHKERRQ(ierr); 653 ierr = VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 654 ierr = VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 655 PetscFunctionReturn(0); 656 } 657 658 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason) 659 { 660 PC_Telescope sred = (PC_Telescope)pc->data; 661 PetscErrorCode ierr; 662 Vec xtmp,yred; 663 PetscInt i,st,ed; 664 VecScatter scatter; 665 const PetscScalar *x_array; 666 PetscBool default_init_guess_value; 667 668 PetscFunctionBegin; 669 xtmp = sred->xtmp; 670 scatter = sred->scatter; 671 yred = sred->yred; 672 673 if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1"); 674 *reason = (PCRichardsonConvergedReason)0; 675 676 if (!zeroguess) { 677 ierr = PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");CHKERRQ(ierr); 678 /* pull in vector y->xtmp */ 679 ierr = VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 680 ierr = VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 681 682 /* copy vector entries into xred */ 683 ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr); 684 if (yred) { 685 PetscScalar *LA_yred; 686 ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr); 687 ierr = VecGetArray(yred,&LA_yred);CHKERRQ(ierr); 688 for (i=0; i<ed-st; i++) { 689 LA_yred[i] = x_array[i]; 690 } 691 ierr = VecRestoreArray(yred,&LA_yred);CHKERRQ(ierr); 692 } 693 ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr); 694 } 695 696 if (PCTelescope_isActiveRank(sred)) { 697 ierr = KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);CHKERRQ(ierr); 698 if (!zeroguess) ierr = KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);CHKERRQ(ierr); 699 } 700 701 ierr = PCApply_Telescope(pc,x,y);CHKERRQ(ierr); 702 703 if (PCTelescope_isActiveRank(sred)) { 704 ierr = KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);CHKERRQ(ierr); 705 } 706 707 if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS; 708 *outits = 1; 709 PetscFunctionReturn(0); 710 } 711 712 static PetscErrorCode PCReset_Telescope(PC pc) 713 { 714 PC_Telescope sred = (PC_Telescope)pc->data; 715 PetscErrorCode ierr; 716 717 ierr = ISDestroy(&sred->isin);CHKERRQ(ierr); 718 ierr = VecScatterDestroy(&sred->scatter);CHKERRQ(ierr); 719 ierr = VecDestroy(&sred->xred);CHKERRQ(ierr); 720 ierr = VecDestroy(&sred->yred);CHKERRQ(ierr); 721 ierr = VecDestroy(&sred->xtmp);CHKERRQ(ierr); 722 ierr = MatDestroy(&sred->Bred);CHKERRQ(ierr); 723 ierr = KSPReset(sred->ksp);CHKERRQ(ierr); 724 if (sred->pctelescope_reset_type) { 725 ierr = sred->pctelescope_reset_type(pc);CHKERRQ(ierr); 726 } 727 PetscFunctionReturn(0); 728 } 729 730 static PetscErrorCode PCDestroy_Telescope(PC pc) 731 { 732 PC_Telescope sred = (PC_Telescope)pc->data; 733 PetscErrorCode ierr; 734 735 PetscFunctionBegin; 736 ierr = PCReset_Telescope(pc);CHKERRQ(ierr); 737 ierr = KSPDestroy(&sred->ksp);CHKERRQ(ierr); 738 ierr = PetscSubcommDestroy(&sred->psubcomm);CHKERRQ(ierr); 739 ierr = PetscFree(sred->dm_ctx);CHKERRQ(ierr); 740 ierr = PetscFree(pc->data);CHKERRQ(ierr); 741 PetscFunctionReturn(0); 742 } 743 744 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc) 745 { 746 PC_Telescope sred = (PC_Telescope)pc->data; 747 PetscErrorCode ierr; 748 MPI_Comm comm; 749 PetscMPIInt size; 750 PetscBool flg; 751 PetscSubcommType subcommtype; 752 753 PetscFunctionBegin; 754 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 755 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 756 ierr = PetscOptionsHead(PetscOptionsObject,"Telescope options");CHKERRQ(ierr); 757 ierr = PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg);CHKERRQ(ierr); 758 if (flg) { 759 ierr = PCTelescopeSetSubcommType(pc,subcommtype);CHKERRQ(ierr); 760 } 761 ierr = PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,0);CHKERRQ(ierr); 762 if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size"); 763 ierr = PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,0);CHKERRQ(ierr); 764 ierr = PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,0);CHKERRQ(ierr); 765 ierr = PetscOptionsBool("-pc_telescope_use_coarse_dm","Define sub-communicator from the coarse DM","PCTelescopeSetUseCoarseDM",sred->use_coarse_dm,&sred->use_coarse_dm,0);CHKERRQ(ierr); 766 ierr = PetscOptionsTail();CHKERRQ(ierr); 767 PetscFunctionReturn(0); 768 } 769 770 /* PC simplementation specific API's */ 771 772 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp) 773 { 774 PC_Telescope red = (PC_Telescope)pc->data; 775 PetscFunctionBegin; 776 if (ksp) *ksp = red->ksp; 777 PetscFunctionReturn(0); 778 } 779 780 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype) 781 { 782 PC_Telescope red = (PC_Telescope)pc->data; 783 PetscFunctionBegin; 784 if (subcommtype) *subcommtype = red->subcommtype; 785 PetscFunctionReturn(0); 786 } 787 788 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype) 789 { 790 PC_Telescope red = (PC_Telescope)pc->data; 791 792 PetscFunctionBegin; 793 if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up."); 794 red->subcommtype = subcommtype; 795 PetscFunctionReturn(0); 796 } 797 798 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact) 799 { 800 PC_Telescope red = (PC_Telescope)pc->data; 801 PetscFunctionBegin; 802 if (fact) *fact = red->redfactor; 803 PetscFunctionReturn(0); 804 } 805 806 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact) 807 { 808 PC_Telescope red = (PC_Telescope)pc->data; 809 PetscMPIInt size; 810 PetscErrorCode ierr; 811 812 PetscFunctionBegin; 813 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 814 if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact); 815 if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact); 816 red->redfactor = fact; 817 PetscFunctionReturn(0); 818 } 819 820 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v) 821 { 822 PC_Telescope red = (PC_Telescope)pc->data; 823 PetscFunctionBegin; 824 if (v) *v = red->ignore_dm; 825 PetscFunctionReturn(0); 826 } 827 828 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v) 829 { 830 PC_Telescope red = (PC_Telescope)pc->data; 831 PetscFunctionBegin; 832 red->ignore_dm = v; 833 PetscFunctionReturn(0); 834 } 835 836 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool *v) 837 { 838 PC_Telescope red = (PC_Telescope)pc->data; 839 PetscFunctionBegin; 840 if (v) *v = red->use_coarse_dm; 841 PetscFunctionReturn(0); 842 } 843 844 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v) 845 { 846 PC_Telescope red = (PC_Telescope)pc->data; 847 PetscFunctionBegin; 848 red->use_coarse_dm = v; 849 PetscFunctionReturn(0); 850 } 851 852 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v) 853 { 854 PC_Telescope red = (PC_Telescope)pc->data; 855 PetscFunctionBegin; 856 if (v) *v = red->ignore_kspcomputeoperators; 857 PetscFunctionReturn(0); 858 } 859 860 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v) 861 { 862 PC_Telescope red = (PC_Telescope)pc->data; 863 PetscFunctionBegin; 864 red->ignore_kspcomputeoperators = v; 865 PetscFunctionReturn(0); 866 } 867 868 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm) 869 { 870 PC_Telescope red = (PC_Telescope)pc->data; 871 PetscFunctionBegin; 872 *dm = private_PCTelescopeGetSubDM(red); 873 PetscFunctionReturn(0); 874 } 875 876 /*@ 877 PCTelescopeGetKSP - Gets the KSP created by the telescoping PC. 878 879 Not Collective 880 881 Input Parameter: 882 . pc - the preconditioner context 883 884 Output Parameter: 885 . subksp - the KSP defined the smaller set of processes 886 887 Level: advanced 888 889 @*/ 890 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp) 891 { 892 PetscErrorCode ierr; 893 PetscFunctionBegin; 894 ierr = PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));CHKERRQ(ierr); 895 PetscFunctionReturn(0); 896 } 897 898 /*@ 899 PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by. 900 901 Not Collective 902 903 Input Parameter: 904 . pc - the preconditioner context 905 906 Output Parameter: 907 . fact - the reduction factor 908 909 Level: advanced 910 911 @*/ 912 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact) 913 { 914 PetscErrorCode ierr; 915 PetscFunctionBegin; 916 ierr = PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));CHKERRQ(ierr); 917 PetscFunctionReturn(0); 918 } 919 920 /*@ 921 PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by. 922 923 Not Collective 924 925 Input Parameter: 926 . pc - the preconditioner context 927 928 Output Parameter: 929 . fact - the reduction factor 930 931 Level: advanced 932 933 @*/ 934 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact) 935 { 936 PetscErrorCode ierr; 937 PetscFunctionBegin; 938 ierr = PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));CHKERRQ(ierr); 939 PetscFunctionReturn(0); 940 } 941 942 /*@ 943 PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used. 944 945 Not Collective 946 947 Input Parameter: 948 . pc - the preconditioner context 949 950 Output Parameter: 951 . v - the flag 952 953 Level: advanced 954 955 @*/ 956 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v) 957 { 958 PetscErrorCode ierr; 959 PetscFunctionBegin; 960 ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr); 961 PetscFunctionReturn(0); 962 } 963 964 /*@ 965 PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC. 966 967 Not Collective 968 969 Input Parameter: 970 . pc - the preconditioner context 971 972 Output Parameter: 973 . v - Use PETSC_TRUE to ignore any DM 974 975 Level: advanced 976 977 @*/ 978 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v) 979 { 980 PetscErrorCode ierr; 981 PetscFunctionBegin; 982 ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr); 983 PetscFunctionReturn(0); 984 } 985 986 /*@ 987 PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used. 988 989 Not Collective 990 991 Input Parameter: 992 . pc - the preconditioner context 993 994 Output Parameter: 995 . v - the flag 996 997 Level: advanced 998 999 @*/ 1000 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc,PetscBool *v) 1001 { 1002 PetscErrorCode ierr; 1003 PetscFunctionBegin; 1004 ierr = PetscUseMethod(pc,"PCTelescopeGetUseCoarseDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr); 1005 PetscFunctionReturn(0); 1006 } 1007 1008 /*@ 1009 PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM 1010 1011 Not Collective 1012 1013 Input Parameter: 1014 . pc - the preconditioner context 1015 1016 Output Parameter: 1017 . v - Use PETSC_FALSE to ignore any coarse DM 1018 1019 Notes: 1020 When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope 1021 will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and 1022 -pc_telescope_subcomm_type will no longer have any meaning. 1023 It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes. 1024 An error will occur of the size of the communicator associated with the coarse DM 1025 is the same as that of the parent DM. 1026 Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent. 1027 This will be checked at the time the preconditioner is setup and an error will occur if 1028 the coarse DM does not define a sub-communicator of that used by the parent DM. 1029 1030 The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of 1031 the DM used (e.g. it supports DMSHELL, DMPLEX, etc). 1032 1033 Support is currently only provided for the case when you are using KSPSetComputeOperators() 1034 1035 The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs. 1036 In the user code, this is achieved via 1037 .vb 1038 { 1039 DM dm_fine; 1040 PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method); 1041 } 1042 .ve 1043 The signature of the user provided field scatter method is 1044 .vb 1045 PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse); 1046 .ve 1047 The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE. 1048 SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM. 1049 1050 Optionally, the user may also compose a function with the parent DM to facilitate the transfer 1051 of state variables between the fine and coarse DMs. 1052 In the context of a finite element discretization, an example state variable might be 1053 values associated with quadrature points within each element. 1054 A user provided state scatter method is composed via 1055 .vb 1056 { 1057 DM dm_fine; 1058 PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method); 1059 } 1060 .ve 1061 The signature of the user provided state scatter method is 1062 .vb 1063 PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse); 1064 .ve 1065 SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM. 1066 The user is only required to support mode = SCATTER_FORWARD. 1067 No assumption is made about the data type of the state variables. 1068 These must be managed by the user and must be accessible from the DM. 1069 1070 Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be 1071 associated with the sub-KSP residing within PCTelescope. 1072 In general, PCTelescope assumes that the context on the fine and coarse DM used with 1073 KSPSetComputeOperators() should be "similar" in type or origin. 1074 Specifically the following rules are used to infer what context on the sub-KSP should be. 1075 1076 First the contexts from the KSP and the fine and coarse DMs are retrieved. 1077 Note that the special case of a DMSHELL context is queried. 1078 1079 .vb 1080 DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx); 1081 DMGetApplicationContext(dm_fine,&dmfine_appctx); 1082 DMShellGetContext(dm_fine,&dmfine_shellctx); 1083 1084 DMGetApplicationContext(dm_coarse,&dmcoarse_appctx); 1085 DMShellGetContext(dm_coarse,&dmcoarse_shellctx); 1086 .ve 1087 1088 The following rules are then enforced: 1089 1090 1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP: 1091 KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL); 1092 1093 2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx, 1094 check that dmcoarse_appctx is also non-NULL. If this is true, then: 1095 KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx); 1096 1097 3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx, 1098 check that dmcoarse_shellctx is also non-NULL. If this is true, then: 1099 KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx); 1100 1101 If neither of the above three tests passed, then PCTelescope cannot safely determine what 1102 context should be provided to KSPSetComputeOperators() for use with the sub-KSP. 1103 In this case, an additional mechanism is provided via a composed function which will return 1104 the actual context to be used. To use this feature you must compose the "getter" function 1105 with the coarse DM, e.g. 1106 .vb 1107 { 1108 DM dm_coarse; 1109 PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter); 1110 } 1111 .ve 1112 The signature of the user provided method is 1113 .vb 1114 PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext); 1115 .ve 1116 1117 Level: advanced 1118 1119 @*/ 1120 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc,PetscBool v) 1121 { 1122 PetscErrorCode ierr; 1123 PetscFunctionBegin; 1124 ierr = PetscTryMethod(pc,"PCTelescopeSetUseCoarseDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr); 1125 PetscFunctionReturn(0); 1126 } 1127 1128 /*@ 1129 PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used. 1130 1131 Not Collective 1132 1133 Input Parameter: 1134 . pc - the preconditioner context 1135 1136 Output Parameter: 1137 . v - the flag 1138 1139 Level: advanced 1140 1141 @*/ 1142 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v) 1143 { 1144 PetscErrorCode ierr; 1145 PetscFunctionBegin; 1146 ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr); 1147 PetscFunctionReturn(0); 1148 } 1149 1150 /*@ 1151 PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators. 1152 1153 Not Collective 1154 1155 Input Parameter: 1156 . pc - the preconditioner context 1157 1158 Output Parameter: 1159 . v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc 1160 1161 Level: advanced 1162 1163 @*/ 1164 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v) 1165 { 1166 PetscErrorCode ierr; 1167 PetscFunctionBegin; 1168 ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr); 1169 PetscFunctionReturn(0); 1170 } 1171 1172 /*@ 1173 PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP. 1174 1175 Not Collective 1176 1177 Input Parameter: 1178 . pc - the preconditioner context 1179 1180 Output Parameter: 1181 . subdm - The re-partitioned DM 1182 1183 Level: advanced 1184 1185 @*/ 1186 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm) 1187 { 1188 PetscErrorCode ierr; 1189 PetscFunctionBegin; 1190 ierr = PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));CHKERRQ(ierr); 1191 PetscFunctionReturn(0); 1192 } 1193 1194 /*@ 1195 PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous) 1196 1197 Logically Collective 1198 1199 Input Parameter: 1200 + pc - the preconditioner context 1201 - subcommtype - the subcommunicator type (see PetscSubcommType) 1202 1203 Level: advanced 1204 1205 .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE 1206 @*/ 1207 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype) 1208 { 1209 PetscErrorCode ierr; 1210 PetscFunctionBegin; 1211 ierr = PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));CHKERRQ(ierr); 1212 PetscFunctionReturn(0); 1213 } 1214 1215 /*@ 1216 PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous) 1217 1218 Not Collective 1219 1220 Input Parameter: 1221 . pc - the preconditioner context 1222 1223 Output Parameter: 1224 . subcommtype - the subcommunicator type (see PetscSubcommType) 1225 1226 Level: advanced 1227 1228 .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE 1229 @*/ 1230 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype) 1231 { 1232 PetscErrorCode ierr; 1233 PetscFunctionBegin; 1234 ierr = PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));CHKERRQ(ierr); 1235 PetscFunctionReturn(0); 1236 } 1237 1238 /* -------------------------------------------------------------------------------------*/ 1239 /*MC 1240 PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve. 1241 1242 Options Database: 1243 + -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks. 1244 . -pc_telescope_ignore_dm - flag to indicate whether an attached DM should be ignored. 1245 . -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information. 1246 . -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP. 1247 - -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator. 1248 1249 Level: advanced 1250 1251 Notes: 1252 Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation 1253 creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC). 1254 The preconditioner is deemed telescopic as it only calls KSPSolve() on a single 1255 sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators. 1256 This means there will be MPI ranks which will be idle during the application of this preconditioner. 1257 Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM. 1258 1259 The default type of the sub KSP (the KSP defined on c') is PREONLY. 1260 1261 There are three setup mechanisms for PCTelescope. Features support by each type are described below. 1262 In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the 1263 communicators c and c' respectively. 1264 1265 [1] Default setup 1266 The sub-communicator c' is created via PetscSubcommCreate(). 1267 Explicitly defined nullspace and near nullspace vectors will be propogated from B to B'. 1268 Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()). 1269 No support is provided for KSPSetComputeOperators(). 1270 Currently there is no support for the flag -pc_use_amat. 1271 1272 [2] DM aware setup 1273 If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'. 1274 c' is created via PetscSubcommCreate(). 1275 Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM. 1276 Currently only support for re-partitioning a DMDA is provided. 1277 Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B'). 1278 Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()). 1279 Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP. 1280 This is fragile since the user must ensure that their user context is valid for use on c'. 1281 Currently there is no support for the flag -pc_use_amat. 1282 1283 [3] Coarse DM setup 1284 If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM(). 1285 PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c. 1286 The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE. 1287 PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported. 1288 The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be 1289 available with using multi-grid on unstructured meshes. 1290 This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type. 1291 Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'. 1292 Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()). 1293 There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported. 1294 The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse. 1295 Propogation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators(). 1296 Currently there is no support for the flag -pc_use_amat. 1297 This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE); 1298 Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM(). 1299 1300 Developer Notes: 1301 During PCSetup, the B operator is scattered onto c'. 1302 Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c'). 1303 Then, KSPSolve() is executed on the c' communicator. 1304 1305 The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED 1306 creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero. 1307 1308 The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator. 1309 In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into 1310 a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c') 1311 1312 The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D - 1313 support for 1D DMDAs is not provided). If a DMDA is found, a topolgically equivalent DMDA is created on c' 1314 and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support 1315 for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs. 1316 Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm. 1317 1318 With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'. 1319 1320 When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B 1321 into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the 1322 locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat() 1323 1324 Limitations/improvements include the following. 1325 VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage. 1326 A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction(). 1327 1328 The symmetric permutation used when a DMDA is encountered is performed via explicitly assmbleming a permutation matrix P, 1329 and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears 1330 VecPermute() does not supported for the use case required here. By computing P, one can permute both the operator and RHS in a 1331 consistent manner. 1332 1333 Mapping of vectors (default setup mode) is performed in the following way. 1334 Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2. 1335 Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2. 1336 We perform the scatter to the sub-communicator in the following way. 1337 [1] Given a vector x defined on communicator c 1338 1339 .vb 1340 rank(c) local values of x 1341 ------- ---------------------------------------- 1342 0 [ 0.0, 1.0, 2.0, 3.0, 4.0, 5.0 ] 1343 1 [ 6.0, 7.0, 8.0, 9.0, 10.0, 11.0 ] 1344 2 [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ] 1345 3 [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ] 1346 .ve 1347 1348 scatter into xtmp defined also on comm c, so that we have the following values 1349 1350 .vb 1351 rank(c) local values of xtmp 1352 ------- ---------------------------------------------------------------------------- 1353 0 [ 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0 ] 1354 1 [ ] 1355 2 [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ] 1356 3 [ ] 1357 .ve 1358 1359 The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values 1360 1361 1362 [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'. 1363 Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0. 1364 1365 .vb 1366 rank(c') local values of xred 1367 -------- ---------------------------------------------------------------------------- 1368 0 [ 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0 ] 1369 1 [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ] 1370 .ve 1371 1372 Contributed by Dave May 1373 1374 Reference: 1375 Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913 1376 1377 .seealso: PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT 1378 M*/ 1379 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc) 1380 { 1381 PetscErrorCode ierr; 1382 struct _PC_Telescope *sred; 1383 1384 PetscFunctionBegin; 1385 ierr = PetscNewLog(pc,&sred);CHKERRQ(ierr); 1386 sred->psubcomm = NULL; 1387 sred->subcommtype = PETSC_SUBCOMM_INTERLACED; 1388 sred->subcomm = MPI_COMM_NULL; 1389 sred->redfactor = 1; 1390 sred->ignore_dm = PETSC_FALSE; 1391 sred->ignore_kspcomputeoperators = PETSC_FALSE; 1392 sred->use_coarse_dm = PETSC_FALSE; 1393 pc->data = (void*)sred; 1394 1395 pc->ops->apply = PCApply_Telescope; 1396 pc->ops->applytranspose = NULL; 1397 pc->ops->applyrichardson = PCApplyRichardson_Telescope; 1398 pc->ops->setup = PCSetUp_Telescope; 1399 pc->ops->destroy = PCDestroy_Telescope; 1400 pc->ops->reset = PCReset_Telescope; 1401 pc->ops->setfromoptions = PCSetFromOptions_Telescope; 1402 pc->ops->view = PCView_Telescope; 1403 1404 sred->pctelescope_setup_type = PCTelescopeSetUp_default; 1405 sred->pctelescope_matcreate_type = PCTelescopeMatCreate_default; 1406 sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default; 1407 sred->pctelescope_reset_type = NULL; 1408 1409 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);CHKERRQ(ierr); 1410 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope);CHKERRQ(ierr); 1411 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope);CHKERRQ(ierr); 1412 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);CHKERRQ(ierr); 1413 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);CHKERRQ(ierr); 1414 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);CHKERRQ(ierr); 1415 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);CHKERRQ(ierr); 1416 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr); 1417 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr); 1418 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);CHKERRQ(ierr); 1419 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",PCTelescopeGetUseCoarseDM_Telescope);CHKERRQ(ierr); 1420 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",PCTelescopeSetUseCoarseDM_Telescope);CHKERRQ(ierr); 1421 PetscFunctionReturn(0); 1422 } 1423