1 /* TODOLIST 2 DofSplitting and DM attached to pc? 3 Change SetNeumannBoundaries to SetNeumannBoundariesLocal and provide new SetNeumannBoundaries (same Dirichlet) 4 change how to deal with the coarse problem (PCBDDCSetCoarseEnvironment): 5 - simplify coarse problem structure -> PCBDDC or PCREDUDANT, nothing else -> same comm for all levels? 6 - remove coarse enums and allow use of PCBDDCGetCoarseKSP 7 - remove metis dependency -> use MatPartitioning for multilevel -> Assemble serial adjacency in PCBDDCAnalyzeInterface? 8 code refactoring: 9 - pick up better names for static functions 10 change options structure: 11 - insert BDDC into MG framework? 12 provide other ops? Ask to developers 13 remove all unused printf 14 man pages 15 */ 16 17 /* ---------------------------------------------------------------------------------------------------------------------------------------------- 18 Implementation of BDDC preconditioner based on: 19 C. Dohrmann "An approximate BDDC preconditioner", Numerical Linear Algebra with Applications Volume 14, Issue 2, pages 149-168, March 2007 20 ---------------------------------------------------------------------------------------------------------------------------------------------- */ 21 22 #include "bddc.h" /*I "petscpc.h" I*/ /* includes for fortran wrappers */ 23 #include "bddcprivate.h" 24 #include <petscblaslapack.h> 25 26 /* prototypes for static functions contained in bddc.c */ 27 static PetscErrorCode PCBDDCSetUseExactDirichlet(PC,PetscBool); 28 static PetscErrorCode PCBDDCSetLevel(PC,PetscInt); 29 static PetscErrorCode PCBDDCCoarseSetUp(PC); 30 static PetscErrorCode PCBDDCSetUpCoarseEnvironment(PC,PetscScalar*); 31 32 /* -------------------------------------------------------------------------- */ 33 #undef __FUNCT__ 34 #define __FUNCT__ "PCSetFromOptions_BDDC" 35 PetscErrorCode PCSetFromOptions_BDDC(PC pc) 36 { 37 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 38 PetscErrorCode ierr; 39 40 PetscFunctionBegin; 41 ierr = PetscOptionsHead("BDDC options");CHKERRQ(ierr); 42 /* Verbose debugging of main data structures */ 43 ierr = PetscOptionsInt("-pc_bddc_check_level" ,"Verbose (debugging) output for PCBDDC" ,"none",pcbddc->dbg_flag ,&pcbddc->dbg_flag ,NULL);CHKERRQ(ierr); 44 /* Some customization for default primal space */ 45 ierr = PetscOptionsBool("-pc_bddc_vertices_only" ,"Use only vertices in coarse space (i.e. discard constraints)","none",pcbddc->vertices_flag ,&pcbddc->vertices_flag ,NULL);CHKERRQ(ierr); 46 ierr = PetscOptionsBool("-pc_bddc_constraints_only","Use only constraints in coarse space (i.e. discard vertices)","none",pcbddc->constraints_flag,&pcbddc->constraints_flag,NULL);CHKERRQ(ierr); 47 ierr = PetscOptionsBool("-pc_bddc_faces_only" ,"Use only faces among constraints of coarse space (i.e. discard edges)" ,"none",pcbddc->faces_flag ,&pcbddc->faces_flag ,NULL);CHKERRQ(ierr); 48 ierr = PetscOptionsBool("-pc_bddc_edges_only" ,"Use only edges among constraints of coarse space (i.e. discard faces)" ,"none",pcbddc->edges_flag ,&pcbddc->edges_flag ,NULL);CHKERRQ(ierr); 49 /* Coarse solver context */ 50 static const char * const avail_coarse_problems[] = {"sequential","replicated","parallel","multilevel","CoarseProblemType","PC_BDDC_",0}; /*order of choiches depends on ENUM defined in bddc.h */ 51 ierr = PetscOptionsEnum("-pc_bddc_coarse_problem_type","Set coarse problem type","none",avail_coarse_problems,(PetscEnum)pcbddc->coarse_problem_type,(PetscEnum*)&pcbddc->coarse_problem_type,NULL);CHKERRQ(ierr); 52 /* Two different application of BDDC to the whole set of dofs, internal and interface */ 53 ierr = PetscOptionsBool("-pc_bddc_switch_preconditioning_type","Switch between M_2 (default) and M_3 preconditioners (as defined by Dohrmann)","none",pcbddc->inexact_prec_type,&pcbddc->inexact_prec_type,NULL);CHKERRQ(ierr); 54 ierr = PetscOptionsBool("-pc_bddc_use_change_of_basis","Use change of basis approach for primal space","none",pcbddc->use_change_of_basis,&pcbddc->use_change_of_basis,NULL);CHKERRQ(ierr); 55 ierr = PetscOptionsBool("-pc_bddc_use_change_on_faces","Use change of basis approach for face constraints","none",pcbddc->use_change_on_faces,&pcbddc->use_change_on_faces,NULL);CHKERRQ(ierr); 56 if (!pcbddc->use_change_of_basis) { 57 pcbddc->use_change_on_faces = PETSC_FALSE; 58 } 59 ierr = PetscOptionsInt("-pc_bddc_coarsening_ratio","Set coarsening ratio used in multilevel coarsening","none",pcbddc->coarsening_ratio,&pcbddc->coarsening_ratio,NULL);CHKERRQ(ierr); 60 ierr = PetscOptionsInt("-pc_bddc_max_levels","Set maximum number of levels for multilevel","none",pcbddc->max_levels,&pcbddc->max_levels,NULL);CHKERRQ(ierr); 61 ierr = PetscOptionsBool("-pc_bddc_use_deluxe_scaling","Use deluxe scaling for BDDC","none",pcbddc->use_deluxe_scaling,&pcbddc->use_deluxe_scaling,NULL);CHKERRQ(ierr); 62 ierr = PetscOptionsTail();CHKERRQ(ierr); 63 PetscFunctionReturn(0); 64 } 65 /* -------------------------------------------------------------------------- */ 66 #undef __FUNCT__ 67 #define __FUNCT__ "PCBDDCSetPrimalVerticesLocalIS_BDDC" 68 static PetscErrorCode PCBDDCSetPrimalVerticesLocalIS_BDDC(PC pc, IS PrimalVertices) 69 { 70 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 71 PetscErrorCode ierr; 72 73 PetscFunctionBegin; 74 ierr = ISDestroy(&pcbddc->user_primal_vertices);CHKERRQ(ierr); 75 ierr = PetscObjectReference((PetscObject)PrimalVertices);CHKERRQ(ierr); 76 pcbddc->user_primal_vertices = PrimalVertices; 77 PetscFunctionReturn(0); 78 } 79 #undef __FUNCT__ 80 #define __FUNCT__ "PCBDDCSetPrimalVerticesLocalIS" 81 /*@ 82 PCBDDCSetPrimalVerticesLocalIS - Set user defined primal vertices in PCBDDC. 83 84 Not collective 85 86 Input Parameters: 87 + pc - the preconditioning context 88 - PrimalVertices - index sets of primal vertices in local numbering 89 90 Level: intermediate 91 92 Notes: 93 94 .seealso: PCBDDC 95 @*/ 96 PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PC pc, IS PrimalVertices) 97 { 98 PetscErrorCode ierr; 99 100 PetscFunctionBegin; 101 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 102 PetscValidHeaderSpecific(PrimalVertices,IS_CLASSID,2); 103 ierr = PetscTryMethod(pc,"PCBDDCSetPrimalVerticesLocalIS_C",(PC,IS),(pc,PrimalVertices));CHKERRQ(ierr); 104 PetscFunctionReturn(0); 105 } 106 /* -------------------------------------------------------------------------- */ 107 #undef __FUNCT__ 108 #define __FUNCT__ "PCBDDCSetCoarseProblemType_BDDC" 109 static PetscErrorCode PCBDDCSetCoarseProblemType_BDDC(PC pc, CoarseProblemType CPT) 110 { 111 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 112 113 PetscFunctionBegin; 114 pcbddc->coarse_problem_type = CPT; 115 PetscFunctionReturn(0); 116 } 117 118 #undef __FUNCT__ 119 #define __FUNCT__ "PCBDDCSetCoarseProblemType" 120 /*@ 121 PCBDDCSetCoarseProblemType - Set coarse problem type in PCBDDC. 122 123 Not collective 124 125 Input Parameters: 126 + pc - the preconditioning context 127 - CoarseProblemType - pick a better name and explain what this is 128 129 Level: intermediate 130 131 Notes: 132 Not collective but all procs must call with same arguments. 133 134 .seealso: PCBDDC 135 @*/ 136 PetscErrorCode PCBDDCSetCoarseProblemType(PC pc, CoarseProblemType CPT) 137 { 138 PetscErrorCode ierr; 139 140 PetscFunctionBegin; 141 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 142 ierr = PetscTryMethod(pc,"PCBDDCSetCoarseProblemType_C",(PC,CoarseProblemType),(pc,CPT));CHKERRQ(ierr); 143 PetscFunctionReturn(0); 144 } 145 /* -------------------------------------------------------------------------- */ 146 #undef __FUNCT__ 147 #define __FUNCT__ "PCBDDCSetCoarseningRatio_BDDC" 148 static PetscErrorCode PCBDDCSetCoarseningRatio_BDDC(PC pc,PetscInt k) 149 { 150 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 151 152 PetscFunctionBegin; 153 pcbddc->coarsening_ratio=k; 154 PetscFunctionReturn(0); 155 } 156 157 #undef __FUNCT__ 158 #define __FUNCT__ "PCBDDCSetCoarseningRatio" 159 /*@ 160 PCBDDCSetCoarseningRatio - Set coarsening ratio used in multilevel coarsening 161 162 Logically collective on PC 163 164 Input Parameters: 165 + pc - the preconditioning context 166 - k - coarsening ratio 167 168 Approximatively k subdomains at the finer level will be aggregated into a single subdomain at the coarser level. 169 170 Level: intermediate 171 172 Notes: 173 174 .seealso: PCBDDC 175 @*/ 176 PetscErrorCode PCBDDCSetCoarseningRatio(PC pc,PetscInt k) 177 { 178 PetscErrorCode ierr; 179 180 PetscFunctionBegin; 181 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 182 ierr = PetscTryMethod(pc,"PCBDDCSetCoarseningRatio_C",(PC,PetscInt),(pc,k));CHKERRQ(ierr); 183 PetscFunctionReturn(0); 184 } 185 /* -------------------------------------------------------------------------- */ 186 187 #undef __FUNCT__ 188 #define __FUNCT__ "PCBDDCSetMaxLevels_BDDC" 189 static PetscErrorCode PCBDDCSetMaxLevels_BDDC(PC pc,PetscInt max_levels) 190 { 191 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 192 193 PetscFunctionBegin; 194 pcbddc->max_levels=max_levels; 195 PetscFunctionReturn(0); 196 } 197 198 #undef __FUNCT__ 199 #define __FUNCT__ "PCBDDCSetMaxLevels" 200 /*@ 201 PCBDDCSetMaxLevels - Sets the maximum number of levels within the multilevel approach. 202 203 Logically collective on PC 204 205 Input Parameters: 206 + pc - the preconditioning context 207 - max_levels - the maximum number of levels 208 209 Default value is 1, i.e. coarse problem will be solved inexactly with one application 210 of PCBDDC preconditioner if the multilevel approach is requested. 211 212 Level: intermediate 213 214 Notes: 215 216 .seealso: PCBDDC 217 @*/ 218 PetscErrorCode PCBDDCSetMaxLevels(PC pc,PetscInt max_levels) 219 { 220 PetscErrorCode ierr; 221 222 PetscFunctionBegin; 223 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 224 ierr = PetscTryMethod(pc,"PCBDDCSetMaxLevels_C",(PC,PetscInt),(pc,max_levels));CHKERRQ(ierr); 225 PetscFunctionReturn(0); 226 } 227 /* -------------------------------------------------------------------------- */ 228 229 #undef __FUNCT__ 230 #define __FUNCT__ "PCBDDCSetNullSpace_BDDC" 231 static PetscErrorCode PCBDDCSetNullSpace_BDDC(PC pc,MatNullSpace NullSpace) 232 { 233 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 234 PetscErrorCode ierr; 235 236 PetscFunctionBegin; 237 ierr = PetscObjectReference((PetscObject)NullSpace);CHKERRQ(ierr); 238 ierr = MatNullSpaceDestroy(&pcbddc->NullSpace);CHKERRQ(ierr); 239 pcbddc->NullSpace=NullSpace; 240 PetscFunctionReturn(0); 241 } 242 243 #undef __FUNCT__ 244 #define __FUNCT__ "PCBDDCSetNullSpace" 245 /*@ 246 PCBDDCSetNullSpace - Set NullSpace of global operator of BDDC preconditioned mat. 247 248 Logically collective on PC and MatNullSpace 249 250 Input Parameters: 251 + pc - the preconditioning context 252 - NullSpace - Null space of the linear operator to be preconditioned. 253 254 Level: intermediate 255 256 Notes: 257 258 .seealso: PCBDDC 259 @*/ 260 PetscErrorCode PCBDDCSetNullSpace(PC pc,MatNullSpace NullSpace) 261 { 262 PetscErrorCode ierr; 263 264 PetscFunctionBegin; 265 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 266 PetscValidHeaderSpecific(NullSpace,MAT_NULLSPACE_CLASSID,2); 267 ierr = PetscTryMethod(pc,"PCBDDCSetNullSpace_C",(PC,MatNullSpace),(pc,NullSpace));CHKERRQ(ierr); 268 PetscFunctionReturn(0); 269 } 270 /* -------------------------------------------------------------------------- */ 271 272 #undef __FUNCT__ 273 #define __FUNCT__ "PCBDDCSetDirichletBoundaries_BDDC" 274 static PetscErrorCode PCBDDCSetDirichletBoundaries_BDDC(PC pc,IS DirichletBoundaries) 275 { 276 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 277 PetscErrorCode ierr; 278 279 PetscFunctionBegin; 280 ierr = ISDestroy(&pcbddc->DirichletBoundaries);CHKERRQ(ierr); 281 ierr = PetscObjectReference((PetscObject)DirichletBoundaries);CHKERRQ(ierr); 282 pcbddc->DirichletBoundaries=DirichletBoundaries; 283 PetscFunctionReturn(0); 284 } 285 286 #undef __FUNCT__ 287 #define __FUNCT__ "PCBDDCSetDirichletBoundaries" 288 /*@ 289 PCBDDCSetDirichletBoundaries - Set index set defining subdomain part (in local ordering) 290 of Dirichlet boundaries for the global problem. 291 292 Not collective 293 294 Input Parameters: 295 + pc - the preconditioning context 296 - DirichletBoundaries - sequential index set defining the subdomain part of Dirichlet boundaries (can be NULL) 297 298 Level: intermediate 299 300 Notes: 301 302 .seealso: PCBDDC 303 @*/ 304 PetscErrorCode PCBDDCSetDirichletBoundaries(PC pc,IS DirichletBoundaries) 305 { 306 PetscErrorCode ierr; 307 308 PetscFunctionBegin; 309 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 310 PetscValidHeaderSpecific(DirichletBoundaries,IS_CLASSID,2); 311 ierr = PetscTryMethod(pc,"PCBDDCSetDirichletBoundaries_C",(PC,IS),(pc,DirichletBoundaries));CHKERRQ(ierr); 312 PetscFunctionReturn(0); 313 } 314 /* -------------------------------------------------------------------------- */ 315 316 #undef __FUNCT__ 317 #define __FUNCT__ "PCBDDCSetNeumannBoundaries_BDDC" 318 static PetscErrorCode PCBDDCSetNeumannBoundaries_BDDC(PC pc,IS NeumannBoundaries) 319 { 320 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 321 PetscErrorCode ierr; 322 323 PetscFunctionBegin; 324 ierr = ISDestroy(&pcbddc->NeumannBoundaries);CHKERRQ(ierr); 325 ierr = PetscObjectReference((PetscObject)NeumannBoundaries);CHKERRQ(ierr); 326 pcbddc->NeumannBoundaries=NeumannBoundaries; 327 PetscFunctionReturn(0); 328 } 329 330 #undef __FUNCT__ 331 #define __FUNCT__ "PCBDDCSetNeumannBoundaries" 332 /*@ 333 PCBDDCSetNeumannBoundaries - Set index set defining subdomain part (in local ordering) 334 of Neumann boundaries for the global problem. 335 336 Not collective 337 338 Input Parameters: 339 + pc - the preconditioning context 340 - NeumannBoundaries - sequential index set defining the subdomain part of Neumann boundaries (can be NULL) 341 342 Level: intermediate 343 344 Notes: 345 346 .seealso: PCBDDC 347 @*/ 348 PetscErrorCode PCBDDCSetNeumannBoundaries(PC pc,IS NeumannBoundaries) 349 { 350 PetscErrorCode ierr; 351 352 PetscFunctionBegin; 353 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 354 PetscValidHeaderSpecific(NeumannBoundaries,IS_CLASSID,2); 355 ierr = PetscTryMethod(pc,"PCBDDCSetNeumannBoundaries_C",(PC,IS),(pc,NeumannBoundaries));CHKERRQ(ierr); 356 PetscFunctionReturn(0); 357 } 358 /* -------------------------------------------------------------------------- */ 359 360 #undef __FUNCT__ 361 #define __FUNCT__ "PCBDDCGetDirichletBoundaries_BDDC" 362 static PetscErrorCode PCBDDCGetDirichletBoundaries_BDDC(PC pc,IS *DirichletBoundaries) 363 { 364 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 365 366 PetscFunctionBegin; 367 *DirichletBoundaries = pcbddc->DirichletBoundaries; 368 PetscFunctionReturn(0); 369 } 370 371 #undef __FUNCT__ 372 #define __FUNCT__ "PCBDDCGetDirichletBoundaries" 373 /*@ 374 PCBDDCGetDirichletBoundaries - Get index set defining subdomain part (in local ordering) 375 of Dirichlet boundaries for the global problem. 376 377 Not collective 378 379 Input Parameters: 380 + pc - the preconditioning context 381 382 Output Parameters: 383 + DirichletBoundaries - index set defining the subdomain part of Dirichlet boundaries 384 385 Level: intermediate 386 387 Notes: 388 389 .seealso: PCBDDC 390 @*/ 391 PetscErrorCode PCBDDCGetDirichletBoundaries(PC pc,IS *DirichletBoundaries) 392 { 393 PetscErrorCode ierr; 394 395 PetscFunctionBegin; 396 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 397 ierr = PetscUseMethod(pc,"PCBDDCGetDirichletBoundaries_C",(PC,IS*),(pc,DirichletBoundaries));CHKERRQ(ierr); 398 PetscFunctionReturn(0); 399 } 400 /* -------------------------------------------------------------------------- */ 401 402 #undef __FUNCT__ 403 #define __FUNCT__ "PCBDDCGetNeumannBoundaries_BDDC" 404 static PetscErrorCode PCBDDCGetNeumannBoundaries_BDDC(PC pc,IS *NeumannBoundaries) 405 { 406 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 407 408 PetscFunctionBegin; 409 *NeumannBoundaries = pcbddc->NeumannBoundaries; 410 PetscFunctionReturn(0); 411 } 412 413 #undef __FUNCT__ 414 #define __FUNCT__ "PCBDDCGetNeumannBoundaries" 415 /*@ 416 PCBDDCGetNeumannBoundaries - Get index set defining subdomain part (in local ordering) 417 of Neumann boundaries for the global problem. 418 419 Not collective 420 421 Input Parameters: 422 + pc - the preconditioning context 423 424 Output Parameters: 425 + NeumannBoundaries - index set defining the subdomain part of Neumann boundaries 426 427 Level: intermediate 428 429 Notes: 430 431 .seealso: PCBDDC 432 @*/ 433 PetscErrorCode PCBDDCGetNeumannBoundaries(PC pc,IS *NeumannBoundaries) 434 { 435 PetscErrorCode ierr; 436 437 PetscFunctionBegin; 438 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 439 ierr = PetscUseMethod(pc,"PCBDDCGetNeumannBoundaries_C",(PC,IS*),(pc,NeumannBoundaries));CHKERRQ(ierr); 440 PetscFunctionReturn(0); 441 } 442 /* -------------------------------------------------------------------------- */ 443 444 #undef __FUNCT__ 445 #define __FUNCT__ "PCBDDCSetLocalAdjacencyGraph_BDDC" 446 static PetscErrorCode PCBDDCSetLocalAdjacencyGraph_BDDC(PC pc, PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode) 447 { 448 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 449 PCBDDCGraph mat_graph = pcbddc->mat_graph; 450 PetscErrorCode ierr; 451 452 PetscFunctionBegin; 453 /* free old CSR */ 454 ierr = PCBDDCGraphResetCSR(mat_graph);CHKERRQ(ierr); 455 /* get CSR into graph structure */ 456 if (copymode == PETSC_COPY_VALUES) { 457 ierr = PetscMalloc((nvtxs+1)*sizeof(PetscInt),&mat_graph->xadj);CHKERRQ(ierr); 458 ierr = PetscMalloc(xadj[nvtxs]*sizeof(PetscInt),&mat_graph->adjncy);CHKERRQ(ierr); 459 ierr = PetscMemcpy(mat_graph->xadj,xadj,(nvtxs+1)*sizeof(PetscInt));CHKERRQ(ierr); 460 ierr = PetscMemcpy(mat_graph->adjncy,adjncy,xadj[nvtxs]*sizeof(PetscInt));CHKERRQ(ierr); 461 } else if (copymode == PETSC_OWN_POINTER) { 462 mat_graph->xadj = (PetscInt*)xadj; 463 mat_graph->adjncy = (PetscInt*)adjncy; 464 } 465 mat_graph->nvtxs_csr = nvtxs; 466 PetscFunctionReturn(0); 467 } 468 469 #undef __FUNCT__ 470 #define __FUNCT__ "PCBDDCSetLocalAdjacencyGraph" 471 /*@ 472 PCBDDCSetLocalAdjacencyGraph - Set CSR graph of local matrix for use of PCBDDC. 473 474 Not collective 475 476 Input Parameters: 477 + pc - the preconditioning context 478 - nvtxs - number of local vertices of the graph 479 - xadj, adjncy - the CSR graph 480 - copymode - either PETSC_COPY_VALUES or PETSC_OWN_POINTER. In the former case the user must free the array passed in; 481 in the latter case, memory must be obtained with PetscMalloc. 482 483 Level: intermediate 484 485 Notes: 486 487 .seealso: PCBDDC 488 @*/ 489 PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PC pc,PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode) 490 { 491 void (*f)(void) = 0; 492 PetscErrorCode ierr; 493 494 PetscFunctionBegin; 495 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 496 PetscValidIntPointer(xadj,3); 497 PetscValidIntPointer(xadj,4); 498 if (copymode != PETSC_COPY_VALUES && copymode != PETSC_OWN_POINTER) { 499 SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unsupported copy mode %d in %s\n",copymode,__FUNCT__); 500 } 501 ierr = PetscTryMethod(pc,"PCBDDCSetLocalAdjacencyGraph_C",(PC,PetscInt,const PetscInt[],const PetscInt[],PetscCopyMode),(pc,nvtxs,xadj,adjncy,copymode));CHKERRQ(ierr); 502 /* free arrays if PCBDDC is not the PC type */ 503 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",&f);CHKERRQ(ierr); 504 if (!f && copymode == PETSC_OWN_POINTER) { 505 ierr = PetscFree(xadj);CHKERRQ(ierr); 506 ierr = PetscFree(adjncy);CHKERRQ(ierr); 507 } 508 PetscFunctionReturn(0); 509 } 510 /* -------------------------------------------------------------------------- */ 511 512 #undef __FUNCT__ 513 #define __FUNCT__ "PCBDDCSetDofsSplitting_BDDC" 514 static PetscErrorCode PCBDDCSetDofsSplitting_BDDC(PC pc,PetscInt n_is, IS ISForDofs[]) 515 { 516 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 517 PetscInt i; 518 PetscErrorCode ierr; 519 520 PetscFunctionBegin; 521 /* Destroy ISes if they were already set */ 522 for (i=0;i<pcbddc->n_ISForDofs;i++) { 523 ierr = ISDestroy(&pcbddc->ISForDofs[i]);CHKERRQ(ierr); 524 } 525 ierr = PetscFree(pcbddc->ISForDofs);CHKERRQ(ierr); 526 /* allocate space then set */ 527 ierr = PetscMalloc(n_is*sizeof(IS),&pcbddc->ISForDofs);CHKERRQ(ierr); 528 for (i=0;i<n_is;i++) { 529 ierr = PetscObjectReference((PetscObject)ISForDofs[i]);CHKERRQ(ierr); 530 pcbddc->ISForDofs[i]=ISForDofs[i]; 531 } 532 pcbddc->n_ISForDofs=n_is; 533 PetscFunctionReturn(0); 534 } 535 536 #undef __FUNCT__ 537 #define __FUNCT__ "PCBDDCSetDofsSplitting" 538 /*@ 539 PCBDDCSetDofsSplitting - Set index sets defining fields of local mat. 540 541 Not collective 542 543 Input Parameters: 544 + pc - the preconditioning context 545 - n - number of index sets defining the fields 546 - IS[] - array of IS describing the fields 547 548 Level: intermediate 549 550 Notes: 551 552 .seealso: PCBDDC 553 @*/ 554 PetscErrorCode PCBDDCSetDofsSplitting(PC pc,PetscInt n_is, IS ISForDofs[]) 555 { 556 PetscErrorCode ierr; 557 558 PetscFunctionBegin; 559 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 560 ierr = PetscTryMethod(pc,"PCBDDCSetDofsSplitting_C",(PC,PetscInt,IS[]),(pc,n_is,ISForDofs));CHKERRQ(ierr); 561 PetscFunctionReturn(0); 562 } 563 /* -------------------------------------------------------------------------- */ 564 #undef __FUNCT__ 565 #define __FUNCT__ "PCPreSolve_BDDC" 566 /* -------------------------------------------------------------------------- */ 567 /* 568 PCPreSolve_BDDC - Changes the right hand side and (if necessary) the initial 569 guess if a transformation of basis approach has been selected. 570 571 Input Parameter: 572 + pc - the preconditioner contex 573 574 Application Interface Routine: PCPreSolve() 575 576 Notes: 577 The interface routine PCPreSolve() is not usually called directly by 578 the user, but instead is called by KSPSolve(). 579 */ 580 static PetscErrorCode PCPreSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x) 581 { 582 PetscErrorCode ierr; 583 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 584 PC_IS *pcis = (PC_IS*)(pc->data); 585 Mat_IS *matis = (Mat_IS*)pc->pmat->data; 586 Mat temp_mat; 587 IS dirIS; 588 PetscInt dirsize,i,*is_indices; 589 PetscScalar *array_x,*array_diagonal; 590 Vec used_vec; 591 PetscBool guess_nonzero; 592 593 PetscFunctionBegin; 594 if (x) { 595 ierr = PetscObjectReference((PetscObject)x);CHKERRQ(ierr); 596 used_vec = x; 597 } else { 598 ierr = PetscObjectReference((PetscObject)pcbddc->temp_solution);CHKERRQ(ierr); 599 used_vec = pcbddc->temp_solution; 600 ierr = VecSet(used_vec,0.0);CHKERRQ(ierr); 601 } 602 /* hack into ksp data structure PCPreSolve comes earlier in src/ksp/ksp/interface/itfunc.c */ 603 if (ksp) { 604 ierr = KSPGetInitialGuessNonzero(ksp,&guess_nonzero);CHKERRQ(ierr); 605 if ( !guess_nonzero ) { 606 ierr = VecSet(used_vec,0.0);CHKERRQ(ierr); 607 } 608 } 609 /* store the original rhs */ 610 ierr = VecCopy(rhs,pcbddc->original_rhs);CHKERRQ(ierr); 611 612 /* Take into account zeroed rows -> change rhs and store solution removed */ 613 ierr = MatGetDiagonal(pc->pmat,pcis->vec1_global);CHKERRQ(ierr); 614 ierr = VecPointwiseDivide(pcis->vec1_global,rhs,pcis->vec1_global);CHKERRQ(ierr); 615 ierr = VecScatterBegin(matis->ctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 616 ierr = VecScatterEnd(matis->ctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 617 ierr = VecScatterBegin(matis->ctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 618 ierr = VecScatterEnd(matis->ctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 619 ierr = PCBDDCGetDirichletBoundaries(pc,&dirIS);CHKERRQ(ierr); 620 if (dirIS) { 621 ierr = ISGetSize(dirIS,&dirsize);CHKERRQ(ierr); 622 ierr = VecGetArray(pcis->vec1_N,&array_x);CHKERRQ(ierr); 623 ierr = VecGetArray(pcis->vec2_N,&array_diagonal);CHKERRQ(ierr); 624 ierr = ISGetIndices(dirIS,(const PetscInt**)&is_indices);CHKERRQ(ierr); 625 for (i=0; i<dirsize; i++) array_x[is_indices[i]] = array_diagonal[is_indices[i]]; 626 ierr = ISRestoreIndices(dirIS,(const PetscInt**)&is_indices);CHKERRQ(ierr); 627 ierr = VecRestoreArray(pcis->vec2_N,&array_diagonal);CHKERRQ(ierr); 628 ierr = VecRestoreArray(pcis->vec1_N,&array_x);CHKERRQ(ierr); 629 } 630 ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 631 ierr = VecScatterEnd(matis->ctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 632 633 /* remove the computed solution from the rhs */ 634 ierr = VecScale(used_vec,-1.0);CHKERRQ(ierr); 635 ierr = MatMultAdd(pc->pmat,used_vec,rhs,rhs);CHKERRQ(ierr); 636 ierr = VecScale(used_vec,-1.0);CHKERRQ(ierr); 637 638 /* store partially computed solution and set initial guess */ 639 if (x) { 640 ierr = VecCopy(used_vec,pcbddc->temp_solution);CHKERRQ(ierr); 641 ierr = VecSet(used_vec,0.0);CHKERRQ(ierr); 642 if (pcbddc->use_exact_dirichlet) { 643 ierr = VecScatterBegin(pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 644 ierr = VecScatterEnd (pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 645 ierr = KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);CHKERRQ(ierr); 646 ierr = VecScatterBegin(pcis->global_to_D,pcis->vec2_D,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 647 ierr = VecScatterEnd (pcis->global_to_D,pcis->vec2_D,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 648 if (ksp) { 649 ierr = KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);CHKERRQ(ierr); 650 } 651 } 652 } 653 654 /* rhs change of basis */ 655 if (pcbddc->use_change_of_basis) { 656 /* swap pointers for local matrices */ 657 temp_mat = matis->A; 658 matis->A = pcbddc->local_mat; 659 pcbddc->local_mat = temp_mat; 660 /* Get local rhs and apply transformation of basis */ 661 ierr = VecScatterBegin(pcis->global_to_B,rhs,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 662 ierr = VecScatterEnd (pcis->global_to_B,rhs,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 663 /* from original basis to modified basis */ 664 ierr = MatMultTranspose(pcbddc->ChangeOfBasisMatrix,pcis->vec1_B,pcis->vec2_B);CHKERRQ(ierr); 665 /* put back modified values into the global vec using INSERT_VALUES copy mode */ 666 ierr = VecScatterBegin(pcis->global_to_B,pcis->vec2_B,rhs,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 667 ierr = VecScatterEnd (pcis->global_to_B,pcis->vec2_B,rhs,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 668 } 669 if (ksp && pcbddc->NullSpace) { 670 ierr = MatNullSpaceRemove(pcbddc->NullSpace,used_vec,NULL);CHKERRQ(ierr); 671 ierr = MatNullSpaceRemove(pcbddc->NullSpace,rhs,NULL);CHKERRQ(ierr); 672 } 673 ierr = VecDestroy(&used_vec);CHKERRQ(ierr); 674 PetscFunctionReturn(0); 675 } 676 /* -------------------------------------------------------------------------- */ 677 #undef __FUNCT__ 678 #define __FUNCT__ "PCPostSolve_BDDC" 679 /* -------------------------------------------------------------------------- */ 680 /* 681 PCPostSolve_BDDC - Changes the computed solution if a transformation of basis 682 approach has been selected. Also, restores rhs to its original state. 683 684 Input Parameter: 685 + pc - the preconditioner contex 686 687 Application Interface Routine: PCPostSolve() 688 689 Notes: 690 The interface routine PCPostSolve() is not usually called directly by 691 the user, but instead is called by KSPSolve(). 692 */ 693 static PetscErrorCode PCPostSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x) 694 { 695 PetscErrorCode ierr; 696 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 697 PC_IS *pcis = (PC_IS*)(pc->data); 698 Mat_IS *matis = (Mat_IS*)pc->pmat->data; 699 Mat temp_mat; 700 701 PetscFunctionBegin; 702 if (pcbddc->use_change_of_basis) { 703 /* swap pointers for local matrices */ 704 temp_mat = matis->A; 705 matis->A = pcbddc->local_mat; 706 pcbddc->local_mat = temp_mat; 707 /* restore rhs to its original state */ 708 if (rhs) { 709 ierr = VecCopy(pcbddc->original_rhs,rhs);CHKERRQ(ierr); 710 } 711 /* Get Local boundary and apply transformation of basis to solution vector */ 712 ierr = VecScatterBegin(pcis->global_to_B,x,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 713 ierr = VecScatterEnd (pcis->global_to_B,x,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 714 /* from modified basis to original basis */ 715 ierr = MatMult(pcbddc->ChangeOfBasisMatrix,pcis->vec1_B,pcis->vec2_B);CHKERRQ(ierr); 716 /* put back modified values into the global vec using INSERT_VALUES copy mode */ 717 ierr = VecScatterBegin(pcis->global_to_B,pcis->vec2_B,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 718 ierr = VecScatterEnd (pcis->global_to_B,pcis->vec2_B,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 719 } 720 /* add solution removed in presolve */ 721 if (x) { 722 ierr = VecAXPY(x,1.0,pcbddc->temp_solution);CHKERRQ(ierr); 723 } 724 PetscFunctionReturn(0); 725 } 726 /* -------------------------------------------------------------------------- */ 727 #undef __FUNCT__ 728 #define __FUNCT__ "PCSetUp_BDDC" 729 /* -------------------------------------------------------------------------- */ 730 /* 731 PCSetUp_BDDC - Prepares for the use of the BDDC preconditioner 732 by setting data structures and options. 733 734 Input Parameter: 735 + pc - the preconditioner context 736 737 Application Interface Routine: PCSetUp() 738 739 Notes: 740 The interface routine PCSetUp() is not usually called directly by 741 the user, but instead is called by PCApply() if necessary. 742 */ 743 PetscErrorCode PCSetUp_BDDC(PC pc) 744 { 745 PetscErrorCode ierr; 746 PC_BDDC* pcbddc = (PC_BDDC*)pc->data; 747 MatStructure flag; 748 PetscBool computeis,computetopography,computesolvers; 749 750 PetscFunctionBegin; 751 /* the following lines of code should be replaced by a better logic between PCIS, PCNN, PCBDDC and other nonoverlapping preconditioners */ 752 /* For BDDC we need to define a local "Neumann" problem different to that defined in PCISSetup 753 So, we set to pcnone the Neumann problem of pcis in order to avoid unneeded computation 754 Also, we decide to directly build the (same) Dirichlet problem */ 755 ierr = PetscOptionsSetValue("-is_localN_pc_type","none");CHKERRQ(ierr); 756 ierr = PetscOptionsSetValue("-is_localD_pc_type","none");CHKERRQ(ierr); 757 /* Get stdout for dbg */ 758 if (pcbddc->dbg_flag && !pcbddc->dbg_viewer) { 759 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&pcbddc->dbg_viewer);CHKERRQ(ierr); 760 ierr = PetscViewerASCIISynchronizedAllow(pcbddc->dbg_viewer,PETSC_TRUE);CHKERRQ(ierr); 761 } 762 /* first attempt to split work */ 763 if (pc->setupcalled) { 764 computeis = PETSC_FALSE; 765 ierr = PCGetOperators(pc,NULL,NULL,&flag);CHKERRQ(ierr); 766 if (flag == SAME_PRECONDITIONER) { 767 computetopography = PETSC_FALSE; 768 computesolvers = PETSC_FALSE; 769 } else if (flag == SAME_NONZERO_PATTERN) { 770 computetopography = PETSC_FALSE; 771 computesolvers = PETSC_TRUE; 772 } else { /* DIFFERENT_NONZERO_PATTERN */ 773 computetopography = PETSC_TRUE; 774 computesolvers = PETSC_TRUE; 775 } 776 } else { 777 computeis = PETSC_TRUE; 778 computetopography = PETSC_TRUE; 779 computesolvers = PETSC_TRUE; 780 } 781 /* Set up all the "iterative substructuring" common block */ 782 if (computeis) { 783 ierr = PCISSetUp(pc);CHKERRQ(ierr); 784 } 785 /* Analyze interface and set up local constraint and change of basis matrices */ 786 if (computetopography) { 787 /* reset data */ 788 ierr = PCBDDCResetTopography(pc);CHKERRQ(ierr); 789 ierr = PCBDDCAnalyzeInterface(pc);CHKERRQ(ierr); 790 ierr = PCBDDCConstraintsSetUp(pc);CHKERRQ(ierr); 791 } 792 if (computesolvers) { 793 /* reset data */ 794 ierr = PCBDDCResetSolvers(pc);CHKERRQ(ierr); 795 ierr = PCBDDCScalingDestroy(pc);CHKERRQ(ierr); 796 /* Create coarse and local stuffs used for evaluating action of preconditioner */ 797 ierr = PCBDDCCoarseSetUp(pc);CHKERRQ(ierr); 798 ierr = PCBDDCScalingSetUp(pc);CHKERRQ(ierr); 799 } 800 PetscFunctionReturn(0); 801 } 802 803 /* -------------------------------------------------------------------------- */ 804 /* 805 PCApply_BDDC - Applies the BDDC preconditioner to a vector. 806 807 Input Parameters: 808 . pc - the preconditioner context 809 . r - input vector (global) 810 811 Output Parameter: 812 . z - output vector (global) 813 814 Application Interface Routine: PCApply() 815 */ 816 #undef __FUNCT__ 817 #define __FUNCT__ "PCApply_BDDC" 818 PetscErrorCode PCApply_BDDC(PC pc,Vec r,Vec z) 819 { 820 PC_IS *pcis = (PC_IS*)(pc->data); 821 PC_BDDC *pcbddc = (PC_BDDC*)(pc->data); 822 PetscErrorCode ierr; 823 const PetscScalar one = 1.0; 824 const PetscScalar m_one = -1.0; 825 const PetscScalar zero = 0.0; 826 827 /* This code is similar to that provided in nn.c for PCNN 828 NN interface preconditioner changed to BDDC 829 Added support for M_3 preconditioner in the reference article (code is active if pcbddc->inexact_prec_type = PETSC_TRUE) */ 830 831 PetscFunctionBegin; 832 if (!pcbddc->use_exact_dirichlet) { 833 /* First Dirichlet solve */ 834 ierr = VecScatterBegin(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 835 ierr = VecScatterEnd (pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 836 ierr = KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);CHKERRQ(ierr); 837 /* 838 Assembling right hand side for BDDC operator 839 - pcis->vec1_D for the Dirichlet part (if needed, i.e. prec_flag=PETSC_TRUE) 840 - pcis->vec1_B the interface part of the global vector z 841 */ 842 ierr = VecScale(pcis->vec2_D,m_one);CHKERRQ(ierr); 843 ierr = MatMult(pcis->A_BI,pcis->vec2_D,pcis->vec1_B);CHKERRQ(ierr); 844 if (pcbddc->inexact_prec_type) { ierr = MatMultAdd(pcis->A_II,pcis->vec2_D,pcis->vec1_D,pcis->vec1_D);CHKERRQ(ierr); } 845 ierr = VecScale(pcis->vec2_D,m_one);CHKERRQ(ierr); 846 ierr = VecCopy(r,z);CHKERRQ(ierr); 847 ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 848 ierr = VecScatterEnd (pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 849 ierr = PCBDDCScalingRestriction(pc,z,pcis->vec1_B);CHKERRQ(ierr); 850 } else { 851 ierr = VecSet(pcis->vec1_D,zero);CHKERRQ(ierr); 852 ierr = VecSet(pcis->vec2_D,zero);CHKERRQ(ierr); 853 ierr = PCBDDCScalingRestriction(pc,r,pcis->vec1_B);CHKERRQ(ierr); 854 } 855 856 /* Apply interface preconditioner 857 input/output vecs: pcis->vec1_B and pcis->vec1_D */ 858 ierr = PCBDDCApplyInterfacePreconditioner(pc);CHKERRQ(ierr); 859 860 /* Apply transpose of partition of unity operator */ 861 ierr = PCBDDCScalingExtension(pc,pcis->vec1_B,z);CHKERRQ(ierr); 862 863 /* Second Dirichlet solve and assembling of output */ 864 ierr = VecScatterBegin(pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 865 ierr = VecScatterEnd (pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 866 ierr = MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec3_D);CHKERRQ(ierr); 867 if (pcbddc->inexact_prec_type) { ierr = MatMultAdd(pcis->A_II,pcis->vec1_D,pcis->vec3_D,pcis->vec3_D);CHKERRQ(ierr); } 868 ierr = KSPSolve(pcbddc->ksp_D,pcis->vec3_D,pcbddc->vec4_D);CHKERRQ(ierr); 869 ierr = VecScale(pcbddc->vec4_D,m_one);CHKERRQ(ierr); 870 if (pcbddc->inexact_prec_type) { ierr = VecAXPY (pcbddc->vec4_D,one,pcis->vec1_D);CHKERRQ(ierr); } 871 ierr = VecAXPY (pcis->vec2_D,one,pcbddc->vec4_D);CHKERRQ(ierr); 872 ierr = VecScatterBegin(pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 873 ierr = VecScatterEnd (pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 874 PetscFunctionReturn(0); 875 } 876 /* -------------------------------------------------------------------------- */ 877 878 #undef __FUNCT__ 879 #define __FUNCT__ "PCDestroy_BDDC" 880 PetscErrorCode PCDestroy_BDDC(PC pc) 881 { 882 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 883 PetscErrorCode ierr; 884 885 PetscFunctionBegin; 886 /* free data created by PCIS */ 887 ierr = PCISDestroy(pc);CHKERRQ(ierr); 888 /* free BDDC custom data */ 889 ierr = PCBDDCResetCustomization(pc);CHKERRQ(ierr); 890 /* destroy objects related to topography */ 891 ierr = PCBDDCResetTopography(pc);CHKERRQ(ierr); 892 /* free allocated graph structure */ 893 ierr = PetscFree(pcbddc->mat_graph);CHKERRQ(ierr); 894 /* free data for scaling operator */ 895 ierr = PCBDDCScalingDestroy(pc);CHKERRQ(ierr); 896 /* free solvers stuff */ 897 ierr = PCBDDCResetSolvers(pc);CHKERRQ(ierr); 898 /* remove functions */ 899 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",NULL);CHKERRQ(ierr); 900 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",NULL);CHKERRQ(ierr); 901 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetMaxLevels_C",NULL);CHKERRQ(ierr); 902 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNullSpace_C",NULL);CHKERRQ(ierr); 903 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",NULL);CHKERRQ(ierr); 904 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",NULL);CHKERRQ(ierr); 905 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",NULL);CHKERRQ(ierr); 906 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",NULL);CHKERRQ(ierr); 907 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseProblemType_C",NULL);CHKERRQ(ierr); 908 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",NULL);CHKERRQ(ierr); 909 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",NULL);CHKERRQ(ierr); 910 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",NULL);CHKERRQ(ierr); 911 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",NULL);CHKERRQ(ierr); 912 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",NULL);CHKERRQ(ierr); 913 /* Free the private data structure */ 914 ierr = PetscFree(pc->data);CHKERRQ(ierr); 915 PetscFunctionReturn(0); 916 } 917 /* -------------------------------------------------------------------------- */ 918 919 #undef __FUNCT__ 920 #define __FUNCT__ "PCBDDCMatFETIDPGetRHS_BDDC" 921 static PetscErrorCode PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs) 922 { 923 FETIDPMat_ctx mat_ctx; 924 PC_IS* pcis; 925 PC_BDDC* pcbddc; 926 PetscErrorCode ierr; 927 928 PetscFunctionBegin; 929 ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr); 930 pcis = (PC_IS*)mat_ctx->pc->data; 931 pcbddc = (PC_BDDC*)mat_ctx->pc->data; 932 933 /* change of basis for physical rhs if needed 934 It also changes the rhs in case of dirichlet boundaries */ 935 (*mat_ctx->pc->ops->presolve)(mat_ctx->pc,NULL,standard_rhs,NULL); 936 /* store vectors for computation of fetidp final solution */ 937 ierr = VecScatterBegin(pcis->global_to_D,standard_rhs,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 938 ierr = VecScatterEnd(pcis->global_to_D,standard_rhs,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 939 /* scale rhs since it should be unassembled : TODO use counter scaling? (also below) */ 940 ierr = VecScatterBegin(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 941 ierr = VecScatterEnd(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 942 /* Apply partition of unity */ 943 ierr = VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B);CHKERRQ(ierr); 944 /* ierr = PCBDDCScalingRestriction(mat_ctx->pc,standard_rhs,mat_ctx->temp_solution_B);CHKERRQ(ierr); */ 945 if (!pcbddc->inexact_prec_type) { 946 /* compute partially subassembled Schur complement right-hand side */ 947 ierr = KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr); 948 ierr = MatMult(pcis->A_BI,pcis->vec1_D,pcis->vec1_B);CHKERRQ(ierr); 949 ierr = VecAXPY(mat_ctx->temp_solution_B,-1.0,pcis->vec1_B);CHKERRQ(ierr); 950 ierr = VecSet(standard_rhs,0.0);CHKERRQ(ierr); 951 ierr = VecScatterBegin(pcis->global_to_B,mat_ctx->temp_solution_B,standard_rhs,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 952 ierr = VecScatterEnd(pcis->global_to_B,mat_ctx->temp_solution_B,standard_rhs,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 953 /* ierr = PCBDDCScalingRestriction(mat_ctx->pc,standard_rhs,mat_ctx->temp_solution_B);CHKERRQ(ierr); */ 954 ierr = VecScatterBegin(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 955 ierr = VecScatterEnd(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 956 ierr = VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B);CHKERRQ(ierr); 957 } 958 /* BDDC rhs */ 959 ierr = VecCopy(mat_ctx->temp_solution_B,pcis->vec1_B);CHKERRQ(ierr); 960 if (pcbddc->inexact_prec_type) { 961 ierr = VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr); 962 } 963 /* apply BDDC */ 964 ierr = PCBDDCApplyInterfacePreconditioner(mat_ctx->pc);CHKERRQ(ierr); 965 /* Application of B_delta and assembling of rhs for fetidp fluxes */ 966 ierr = VecSet(fetidp_flux_rhs,0.0);CHKERRQ(ierr); 967 ierr = MatMult(mat_ctx->B_delta,pcis->vec1_B,mat_ctx->lambda_local);CHKERRQ(ierr); 968 ierr = VecScatterBegin(mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 969 ierr = VecScatterEnd (mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 970 /* restore original rhs */ 971 ierr = VecCopy(pcbddc->original_rhs,standard_rhs);CHKERRQ(ierr); 972 PetscFunctionReturn(0); 973 } 974 975 #undef __FUNCT__ 976 #define __FUNCT__ "PCBDDCMatFETIDPGetRHS" 977 /*@ 978 PCBDDCMatFETIDPGetRHS - Get rhs for FETIDP linear system. 979 980 Collective 981 982 Input Parameters: 983 + fetidp_mat - the FETIDP mat obtained by a call to PCBDDCCreateFETIDPOperators 984 + standard_rhs - the rhs of your linear system 985 986 Output Parameters: 987 + fetidp_flux_rhs - the rhs of the FETIDP linear system 988 989 Level: developer 990 991 Notes: 992 993 .seealso: PCBDDC 994 @*/ 995 PetscErrorCode PCBDDCMatFETIDPGetRHS(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs) 996 { 997 FETIDPMat_ctx mat_ctx; 998 PetscErrorCode ierr; 999 1000 PetscFunctionBegin; 1001 ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr); 1002 ierr = PetscTryMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetRHS_C",(Mat,Vec,Vec),(fetidp_mat,standard_rhs,fetidp_flux_rhs));CHKERRQ(ierr); 1003 PetscFunctionReturn(0); 1004 } 1005 /* -------------------------------------------------------------------------- */ 1006 1007 #undef __FUNCT__ 1008 #define __FUNCT__ "PCBDDCMatFETIDPGetSolution_BDDC" 1009 static PetscErrorCode PCBDDCMatFETIDPGetSolution_BDDC(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol) 1010 { 1011 FETIDPMat_ctx mat_ctx; 1012 PC_IS* pcis; 1013 PC_BDDC* pcbddc; 1014 PetscErrorCode ierr; 1015 1016 PetscFunctionBegin; 1017 ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr); 1018 pcis = (PC_IS*)mat_ctx->pc->data; 1019 pcbddc = (PC_BDDC*)mat_ctx->pc->data; 1020 1021 /* apply B_delta^T */ 1022 ierr = VecScatterBegin(mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1023 ierr = VecScatterEnd (mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1024 ierr = MatMultTranspose(mat_ctx->B_delta,mat_ctx->lambda_local,pcis->vec1_B);CHKERRQ(ierr); 1025 /* compute rhs for BDDC application */ 1026 ierr = VecAYPX(pcis->vec1_B,-1.0,mat_ctx->temp_solution_B);CHKERRQ(ierr); 1027 if (pcbddc->inexact_prec_type) { 1028 ierr = VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr); 1029 } 1030 /* apply BDDC */ 1031 ierr = PCBDDCApplyInterfacePreconditioner(mat_ctx->pc);CHKERRQ(ierr); 1032 /* put values into standard global vector */ 1033 ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1034 ierr = VecScatterEnd (pcis->global_to_B,pcis->vec1_B,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1035 if (!pcbddc->inexact_prec_type) { 1036 /* compute values into the interior if solved for the partially subassembled Schur complement */ 1037 ierr = MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec1_D);CHKERRQ(ierr); 1038 ierr = VecAXPY(mat_ctx->temp_solution_D,-1.0,pcis->vec1_D);CHKERRQ(ierr); 1039 ierr = KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr); 1040 } 1041 ierr = VecScatterBegin(pcis->global_to_D,pcis->vec1_D,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1042 ierr = VecScatterEnd (pcis->global_to_D,pcis->vec1_D,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1043 /* final change of basis if needed 1044 Is also sums the dirichlet part removed during RHS assembling */ 1045 (*mat_ctx->pc->ops->postsolve)(mat_ctx->pc,NULL,NULL,standard_sol); 1046 PetscFunctionReturn(0); 1047 1048 } 1049 1050 #undef __FUNCT__ 1051 #define __FUNCT__ "PCBDDCMatFETIDPGetSolution" 1052 /*@ 1053 PCBDDCMatFETIDPGetSolution - Get Solution for FETIDP linear system. 1054 1055 Collective 1056 1057 Input Parameters: 1058 + fetidp_mat - the FETIDP mat obtained by a call to PCBDDCCreateFETIDPOperators 1059 + fetidp_flux_sol - the solution of the FETIDP linear system 1060 1061 Output Parameters: 1062 + standard_sol - the solution on the global domain 1063 1064 Level: developer 1065 1066 Notes: 1067 1068 .seealso: PCBDDC 1069 @*/ 1070 PetscErrorCode PCBDDCMatFETIDPGetSolution(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol) 1071 { 1072 FETIDPMat_ctx mat_ctx; 1073 PetscErrorCode ierr; 1074 1075 PetscFunctionBegin; 1076 ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr); 1077 ierr = PetscTryMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetSolution_C",(Mat,Vec,Vec),(fetidp_mat,fetidp_flux_sol,standard_sol));CHKERRQ(ierr); 1078 PetscFunctionReturn(0); 1079 } 1080 /* -------------------------------------------------------------------------- */ 1081 1082 extern PetscErrorCode FETIDPMatMult(Mat,Vec,Vec); 1083 extern PetscErrorCode PCBDDCDestroyFETIDPMat(Mat); 1084 extern PetscErrorCode FETIDPPCApply(PC,Vec,Vec); 1085 extern PetscErrorCode PCBDDCDestroyFETIDPPC(PC); 1086 1087 #undef __FUNCT__ 1088 #define __FUNCT__ "PCBDDCCreateFETIDPOperators_BDDC" 1089 static PetscErrorCode PCBDDCCreateFETIDPOperators_BDDC(PC pc, Mat *fetidp_mat, PC *fetidp_pc) 1090 { 1091 1092 FETIDPMat_ctx fetidpmat_ctx; 1093 Mat newmat; 1094 FETIDPPC_ctx fetidppc_ctx; 1095 PC newpc; 1096 MPI_Comm comm; 1097 PetscErrorCode ierr; 1098 1099 PetscFunctionBegin; 1100 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1101 /* FETIDP linear matrix */ 1102 ierr = PCBDDCCreateFETIDPMatContext(pc,&fetidpmat_ctx);CHKERRQ(ierr); 1103 ierr = PCBDDCSetupFETIDPMatContext(fetidpmat_ctx);CHKERRQ(ierr); 1104 ierr = MatCreateShell(comm,PETSC_DECIDE,PETSC_DECIDE,fetidpmat_ctx->n_lambda,fetidpmat_ctx->n_lambda,fetidpmat_ctx,&newmat);CHKERRQ(ierr); 1105 ierr = MatShellSetOperation(newmat,MATOP_MULT,(void (*)(void))FETIDPMatMult);CHKERRQ(ierr); 1106 ierr = MatShellSetOperation(newmat,MATOP_DESTROY,(void (*)(void))PCBDDCDestroyFETIDPMat);CHKERRQ(ierr); 1107 ierr = MatSetUp(newmat);CHKERRQ(ierr); 1108 /* FETIDP preconditioner */ 1109 ierr = PCBDDCCreateFETIDPPCContext(pc,&fetidppc_ctx);CHKERRQ(ierr); 1110 ierr = PCBDDCSetupFETIDPPCContext(newmat,fetidppc_ctx);CHKERRQ(ierr); 1111 ierr = PCCreate(comm,&newpc);CHKERRQ(ierr); 1112 ierr = PCSetType(newpc,PCSHELL);CHKERRQ(ierr); 1113 ierr = PCShellSetContext(newpc,fetidppc_ctx);CHKERRQ(ierr); 1114 ierr = PCShellSetApply(newpc,FETIDPPCApply);CHKERRQ(ierr); 1115 ierr = PCShellSetDestroy(newpc,PCBDDCDestroyFETIDPPC);CHKERRQ(ierr); 1116 ierr = PCSetOperators(newpc,newmat,newmat,SAME_PRECONDITIONER);CHKERRQ(ierr); 1117 ierr = PCSetUp(newpc);CHKERRQ(ierr); 1118 /* return pointers for objects created */ 1119 *fetidp_mat=newmat; 1120 *fetidp_pc=newpc; 1121 PetscFunctionReturn(0); 1122 } 1123 1124 #undef __FUNCT__ 1125 #define __FUNCT__ "PCBDDCCreateFETIDPOperators" 1126 /*@ 1127 PCBDDCCreateFETIDPOperators - Create operators for FETIDP. 1128 1129 Collective 1130 1131 Input Parameters: 1132 + pc - the BDDC preconditioning context (setup must be already called) 1133 1134 Level: developer 1135 1136 Notes: 1137 1138 .seealso: PCBDDC 1139 @*/ 1140 PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, Mat *fetidp_mat, PC *fetidp_pc) 1141 { 1142 PetscErrorCode ierr; 1143 1144 PetscFunctionBegin; 1145 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1146 if (pc->setupcalled) { 1147 ierr = PetscTryMethod(pc,"PCBDDCCreateFETIDPOperators_C",(PC,Mat*,PC*),(pc,fetidp_mat,fetidp_pc));CHKERRQ(ierr); 1148 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"You must call PCSetup_BDDC() first \n"); 1149 PetscFunctionReturn(0); 1150 } 1151 /* -------------------------------------------------------------------------- */ 1152 /*MC 1153 PCBDDC - Balancing Domain Decomposition by Constraints. 1154 1155 Options Database Keys: 1156 . -pcbddc ??? - 1157 1158 Level: intermediate 1159 1160 Notes: The matrix used with this preconditioner must be of type MATIS 1161 1162 Unlike more 'conventional' interface preconditioners, this iterates over ALL the 1163 degrees of freedom, NOT just those on the interface (this allows the use of approximate solvers 1164 on the subdomains). 1165 1166 Options for the coarse grid preconditioner can be set with - 1167 Options for the Dirichlet subproblem can be set with - 1168 Options for the Neumann subproblem can be set with - 1169 1170 Contributed by Stefano Zampini 1171 1172 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, MATIS 1173 M*/ 1174 1175 #undef __FUNCT__ 1176 #define __FUNCT__ "PCCreate_BDDC" 1177 PETSC_EXTERN PetscErrorCode PCCreate_BDDC(PC pc) 1178 { 1179 PetscErrorCode ierr; 1180 PC_BDDC *pcbddc; 1181 1182 PetscFunctionBegin; 1183 /* Creates the private data structure for this preconditioner and attach it to the PC object. */ 1184 ierr = PetscNewLog(pc,PC_BDDC,&pcbddc);CHKERRQ(ierr); 1185 pc->data = (void*)pcbddc; 1186 1187 /* create PCIS data structure */ 1188 ierr = PCISCreate(pc);CHKERRQ(ierr); 1189 1190 /* BDDC specific */ 1191 pcbddc->user_primal_vertices = 0; 1192 pcbddc->NullSpace = 0; 1193 pcbddc->temp_solution = 0; 1194 pcbddc->original_rhs = 0; 1195 pcbddc->local_mat = 0; 1196 pcbddc->ChangeOfBasisMatrix = 0; 1197 pcbddc->use_change_of_basis = PETSC_TRUE; 1198 pcbddc->use_change_on_faces = PETSC_FALSE; 1199 pcbddc->coarse_vec = 0; 1200 pcbddc->coarse_rhs = 0; 1201 pcbddc->coarse_ksp = 0; 1202 pcbddc->coarse_phi_B = 0; 1203 pcbddc->coarse_phi_D = 0; 1204 pcbddc->vec1_P = 0; 1205 pcbddc->vec1_R = 0; 1206 pcbddc->vec2_R = 0; 1207 pcbddc->local_auxmat1 = 0; 1208 pcbddc->local_auxmat2 = 0; 1209 pcbddc->R_to_B = 0; 1210 pcbddc->R_to_D = 0; 1211 pcbddc->ksp_D = 0; 1212 pcbddc->ksp_R = 0; 1213 pcbddc->local_primal_indices = 0; 1214 pcbddc->inexact_prec_type = PETSC_FALSE; 1215 pcbddc->NeumannBoundaries = 0; 1216 pcbddc->ISForDofs = 0; 1217 pcbddc->ConstraintMatrix = 0; 1218 pcbddc->use_nnsp_true = PETSC_FALSE; 1219 pcbddc->local_primal_sizes = 0; 1220 pcbddc->local_primal_displacements = 0; 1221 pcbddc->coarse_loc_to_glob = 0; 1222 pcbddc->dbg_flag = 0; 1223 pcbddc->coarsening_ratio = 8; 1224 pcbddc->use_exact_dirichlet = PETSC_TRUE; 1225 pcbddc->current_level = 0; 1226 pcbddc->max_levels = 1; 1227 pcbddc->replicated_local_primal_indices = 0; 1228 pcbddc->replicated_local_primal_values = 0; 1229 1230 /* create local graph structure */ 1231 ierr = PCBDDCGraphCreate(&pcbddc->mat_graph);CHKERRQ(ierr); 1232 1233 /* scaling */ 1234 pcbddc->use_deluxe_scaling = PETSC_FALSE; 1235 pcbddc->work_scaling = 0; 1236 1237 /* function pointers */ 1238 pc->ops->apply = PCApply_BDDC; 1239 pc->ops->applytranspose = 0; 1240 pc->ops->setup = PCSetUp_BDDC; 1241 pc->ops->destroy = PCDestroy_BDDC; 1242 pc->ops->setfromoptions = PCSetFromOptions_BDDC; 1243 pc->ops->view = 0; 1244 pc->ops->applyrichardson = 0; 1245 pc->ops->applysymmetricleft = 0; 1246 pc->ops->applysymmetricright = 0; 1247 pc->ops->presolve = PCPreSolve_BDDC; 1248 pc->ops->postsolve = PCPostSolve_BDDC; 1249 1250 /* composing function */ 1251 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",PCBDDCSetPrimalVerticesLocalIS_BDDC);CHKERRQ(ierr); 1252 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",PCBDDCSetCoarseningRatio_BDDC);CHKERRQ(ierr); 1253 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetMaxLevels_C",PCBDDCSetMaxLevels_BDDC);CHKERRQ(ierr); 1254 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNullSpace_C",PCBDDCSetNullSpace_BDDC);CHKERRQ(ierr); 1255 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",PCBDDCSetDirichletBoundaries_BDDC);CHKERRQ(ierr); 1256 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",PCBDDCSetNeumannBoundaries_BDDC);CHKERRQ(ierr); 1257 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",PCBDDCGetDirichletBoundaries_BDDC);CHKERRQ(ierr); 1258 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",PCBDDCGetNeumannBoundaries_BDDC);CHKERRQ(ierr); 1259 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseProblemType_C",PCBDDCSetCoarseProblemType_BDDC);CHKERRQ(ierr); 1260 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",PCBDDCSetDofsSplitting_BDDC);CHKERRQ(ierr); 1261 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",PCBDDCSetLocalAdjacencyGraph_BDDC);CHKERRQ(ierr); 1262 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",PCBDDCCreateFETIDPOperators_BDDC);CHKERRQ(ierr); 1263 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",PCBDDCMatFETIDPGetRHS_BDDC);CHKERRQ(ierr); 1264 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",PCBDDCMatFETIDPGetSolution_BDDC);CHKERRQ(ierr); 1265 PetscFunctionReturn(0); 1266 } 1267 1268 /* -------------------------------------------------------------------------- */ 1269 /* All static functions from now on */ 1270 /* -------------------------------------------------------------------------- */ 1271 1272 #undef __FUNCT__ 1273 #define __FUNCT__ "PCBDDCSetUseExactDirichlet" 1274 static PetscErrorCode PCBDDCSetUseExactDirichlet(PC pc,PetscBool use) 1275 { 1276 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 1277 1278 PetscFunctionBegin; 1279 pcbddc->use_exact_dirichlet=use; 1280 PetscFunctionReturn(0); 1281 } 1282 1283 #undef __FUNCT__ 1284 #define __FUNCT__ "PCBDDCSetLevel" 1285 static PetscErrorCode PCBDDCSetLevel(PC pc,PetscInt level) 1286 { 1287 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 1288 1289 PetscFunctionBegin; 1290 pcbddc->current_level=level; 1291 PetscFunctionReturn(0); 1292 } 1293 1294 /* -------------------------------------------------------------------------- */ 1295 #undef __FUNCT__ 1296 #define __FUNCT__ "PCBDDCCoarseSetUp" 1297 static PetscErrorCode PCBDDCCoarseSetUp(PC pc) 1298 { 1299 PetscErrorCode ierr; 1300 1301 PC_IS* pcis = (PC_IS*)(pc->data); 1302 PC_BDDC* pcbddc = (PC_BDDC*)pc->data; 1303 Mat_IS *matis = (Mat_IS*)pc->pmat->data; 1304 IS is_R_local; 1305 VecType impVecType; 1306 MatType impMatType; 1307 PetscInt n_R=0; 1308 PetscInt n_D=0; 1309 PetscInt n_B=0; 1310 PetscScalar zero=0.0; 1311 PetscScalar one=1.0; 1312 PetscScalar m_one=-1.0; 1313 PetscScalar* array; 1314 PetscScalar *coarse_submat_vals; 1315 PetscInt *idx_R_local; 1316 PetscScalar *coarsefunctions_errors; 1317 PetscScalar *constraints_errors; 1318 /* auxiliary indices */ 1319 PetscInt i,j,k; 1320 /* for verbose output of bddc */ 1321 PetscViewer viewer=pcbddc->dbg_viewer; 1322 PetscBool dbg_flag=pcbddc->dbg_flag; 1323 /* for counting coarse dofs */ 1324 PetscInt n_vertices,n_constraints; 1325 PetscInt size_of_constraint; 1326 PetscInt *row_cmat_indices; 1327 PetscScalar *row_cmat_values; 1328 PetscInt *vertices; 1329 1330 PetscFunctionBegin; 1331 /* Set Non-overlapping dimensions */ 1332 n_B = pcis->n_B; n_D = pcis->n - n_B; 1333 1334 /* transform local matrices if needed */ 1335 if (pcbddc->use_change_of_basis) { 1336 Mat change_mat_all; 1337 PetscInt *nnz,*is_indices,*temp_indices; 1338 1339 ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&nnz);CHKERRQ(ierr); 1340 ierr = ISGetIndices(pcis->is_I_local,(const PetscInt**)&is_indices);CHKERRQ(ierr); 1341 for (i=0;i<n_D;i++) nnz[is_indices[i]] = 1; 1342 ierr = ISRestoreIndices(pcis->is_I_local,(const PetscInt**)&is_indices);CHKERRQ(ierr); 1343 ierr = ISGetIndices(pcis->is_B_local,(const PetscInt**)&is_indices);CHKERRQ(ierr); 1344 k=1; 1345 for (i=0;i<n_B;i++) { 1346 ierr = MatGetRow(pcbddc->ChangeOfBasisMatrix,i,&j,NULL,NULL);CHKERRQ(ierr); 1347 nnz[is_indices[i]]=j; 1348 if (k < j) k = j; 1349 ierr = MatRestoreRow(pcbddc->ChangeOfBasisMatrix,i,&j,NULL,NULL);CHKERRQ(ierr); 1350 } 1351 ierr = ISRestoreIndices(pcis->is_B_local,(const PetscInt**)&is_indices);CHKERRQ(ierr); 1352 /* assemble change of basis matrix on the whole set of local dofs */ 1353 ierr = PetscMalloc(k*sizeof(PetscInt),&temp_indices);CHKERRQ(ierr); 1354 ierr = MatCreate(PETSC_COMM_SELF,&change_mat_all);CHKERRQ(ierr); 1355 ierr = MatSetSizes(change_mat_all,pcis->n,pcis->n,pcis->n,pcis->n);CHKERRQ(ierr); 1356 ierr = MatSetType(change_mat_all,MATSEQAIJ);CHKERRQ(ierr); 1357 ierr = MatSeqAIJSetPreallocation(change_mat_all,0,nnz);CHKERRQ(ierr); 1358 ierr = ISGetIndices(pcis->is_I_local,(const PetscInt**)&is_indices);CHKERRQ(ierr); 1359 for (i=0;i<n_D;i++) { 1360 ierr = MatSetValue(change_mat_all,is_indices[i],is_indices[i],1.0,INSERT_VALUES);CHKERRQ(ierr); 1361 } 1362 ierr = ISRestoreIndices(pcis->is_I_local,(const PetscInt**)&is_indices);CHKERRQ(ierr); 1363 ierr = ISGetIndices(pcis->is_B_local,(const PetscInt**)&is_indices);CHKERRQ(ierr); 1364 for (i=0;i<n_B;i++) { 1365 ierr = MatGetRow(pcbddc->ChangeOfBasisMatrix,i,&j,(const PetscInt**)&row_cmat_indices,(const PetscScalar**)&row_cmat_values);CHKERRQ(ierr); 1366 for (k=0; k<j; k++) temp_indices[k]=is_indices[row_cmat_indices[k]]; 1367 ierr = MatSetValues(change_mat_all,1,&is_indices[i],j,temp_indices,row_cmat_values,INSERT_VALUES);CHKERRQ(ierr); 1368 ierr = MatRestoreRow(pcbddc->ChangeOfBasisMatrix,i,&j,(const PetscInt**)&row_cmat_indices,(const PetscScalar**)&row_cmat_values);CHKERRQ(ierr); 1369 } 1370 ierr = MatAssemblyBegin(change_mat_all,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1371 ierr = MatAssemblyEnd(change_mat_all,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1372 /* TODO: HOW TO WORK WITH BAIJ? PtAP not provided */ 1373 ierr = MatGetBlockSize(matis->A,&i);CHKERRQ(ierr); 1374 if (i==1) { 1375 ierr = MatPtAP(matis->A,change_mat_all,MAT_INITIAL_MATRIX,1.0,&pcbddc->local_mat);CHKERRQ(ierr); 1376 } else { 1377 Mat work_mat; 1378 ierr = MatConvert(matis->A,MATSEQAIJ,MAT_INITIAL_MATRIX,&work_mat);CHKERRQ(ierr); 1379 ierr = MatPtAP(work_mat,change_mat_all,MAT_INITIAL_MATRIX,1.0,&pcbddc->local_mat);CHKERRQ(ierr); 1380 ierr = MatDestroy(&work_mat);CHKERRQ(ierr); 1381 } 1382 ierr = MatDestroy(&pcis->A_IB);CHKERRQ(ierr); 1383 ierr = MatDestroy(&pcis->A_BI);CHKERRQ(ierr); 1384 ierr = MatDestroy(&pcis->A_BB);CHKERRQ(ierr); 1385 ierr = MatGetSubMatrix(pcbddc->local_mat,pcis->is_I_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_IB);CHKERRQ(ierr); 1386 ierr = MatGetSubMatrix(pcbddc->local_mat,pcis->is_B_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_BI);CHKERRQ(ierr); 1387 ierr = MatGetSubMatrix(pcbddc->local_mat,pcis->is_B_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_BB);CHKERRQ(ierr); 1388 ierr = MatDestroy(&change_mat_all);CHKERRQ(ierr); 1389 ierr = PetscFree(nnz);CHKERRQ(ierr); 1390 ierr = PetscFree(temp_indices);CHKERRQ(ierr); 1391 } else { 1392 /* without change of basis, the local matrix is unchanged */ 1393 ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr); 1394 pcbddc->local_mat = matis->A; 1395 } 1396 /* Change global null space passed in by the user if change of basis has been requested */ 1397 if (pcbddc->NullSpace && pcbddc->use_change_of_basis) { 1398 ierr = PCBDDCNullSpaceAdaptGlobal(pc);CHKERRQ(ierr); 1399 } 1400 1401 /* Set types for local objects needed by BDDC precondtioner */ 1402 impMatType = MATSEQDENSE; 1403 impVecType = VECSEQ; 1404 /* get vertex indices from constraint matrix */ 1405 ierr = PCBDDCGetPrimalVerticesLocalIdx(pc,&n_vertices,&vertices);CHKERRQ(ierr); 1406 /* Set number of constraints */ 1407 n_constraints = pcbddc->local_primal_size-n_vertices; 1408 /* Dohrmann's notation: dofs splitted in R (Remaining: all dofs but the vertices) and V (Vertices) */ 1409 ierr = VecSet(pcis->vec1_N,one);CHKERRQ(ierr); 1410 ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr); 1411 for (i=0;i<n_vertices;i++) array[vertices[i]] = zero; 1412 ierr = PetscMalloc((pcis->n-n_vertices)*sizeof(PetscInt),&idx_R_local);CHKERRQ(ierr); 1413 for (i=0, n_R=0; i<pcis->n; i++) { 1414 if (array[i] == one) { 1415 idx_R_local[n_R] = i; 1416 n_R++; 1417 } 1418 } 1419 ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr); 1420 ierr = PetscFree(vertices);CHKERRQ(ierr); 1421 if (dbg_flag) { 1422 ierr = PetscViewerASCIIPrintf(viewer,"--------------------------------------------------\n");CHKERRQ(ierr); 1423 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1424 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d local dimensions\n",PetscGlobalRank);CHKERRQ(ierr); 1425 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"local_size = %d, dirichlet_size = %d, boundary_size = %d\n",pcis->n,n_D,n_B);CHKERRQ(ierr); 1426 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"r_size = %d, v_size = %d, constraints = %d, local_primal_size = %d\n",n_R,n_vertices,n_constraints,pcbddc->local_primal_size);CHKERRQ(ierr); 1427 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"pcbddc->n_vertices = %d, pcbddc->n_constraints = %d\n",pcbddc->n_vertices,pcbddc->n_constraints);CHKERRQ(ierr); 1428 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1429 } 1430 1431 /* Allocate needed vectors */ 1432 ierr = VecDuplicate(pcis->vec1_global,&pcbddc->original_rhs);CHKERRQ(ierr); 1433 ierr = VecDuplicate(pcis->vec1_global,&pcbddc->temp_solution);CHKERRQ(ierr); 1434 ierr = VecDuplicate(pcis->vec1_D,&pcbddc->vec4_D);CHKERRQ(ierr); 1435 ierr = VecCreate(PETSC_COMM_SELF,&pcbddc->vec1_R);CHKERRQ(ierr); 1436 ierr = VecSetSizes(pcbddc->vec1_R,n_R,n_R);CHKERRQ(ierr); 1437 ierr = VecSetType(pcbddc->vec1_R,impVecType);CHKERRQ(ierr); 1438 ierr = VecDuplicate(pcbddc->vec1_R,&pcbddc->vec2_R);CHKERRQ(ierr); 1439 ierr = VecCreate(PETSC_COMM_SELF,&pcbddc->vec1_P);CHKERRQ(ierr); 1440 ierr = VecSetSizes(pcbddc->vec1_P,pcbddc->local_primal_size,pcbddc->local_primal_size);CHKERRQ(ierr); 1441 ierr = VecSetType(pcbddc->vec1_P,impVecType);CHKERRQ(ierr); 1442 1443 /* Creating some index sets needed */ 1444 /* For submatrices */ 1445 ierr = ISCreateGeneral(PETSC_COMM_SELF,n_R,idx_R_local,PETSC_OWN_POINTER,&is_R_local);CHKERRQ(ierr); 1446 1447 /* For VecScatters pcbddc->R_to_B and (optionally) pcbddc->R_to_D */ 1448 { 1449 IS is_aux1,is_aux2; 1450 PetscInt *aux_array1; 1451 PetscInt *aux_array2; 1452 PetscInt *idx_I_local; 1453 1454 ierr = PetscMalloc((pcis->n_B-n_vertices)*sizeof(PetscInt),&aux_array1);CHKERRQ(ierr); 1455 ierr = PetscMalloc((pcis->n_B-n_vertices)*sizeof(PetscInt),&aux_array2);CHKERRQ(ierr); 1456 1457 ierr = ISGetIndices(pcis->is_I_local,(const PetscInt**)&idx_I_local);CHKERRQ(ierr); 1458 ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr); 1459 for (i=0; i<n_D; i++) array[idx_I_local[i]] = 0; 1460 ierr = ISRestoreIndices(pcis->is_I_local,(const PetscInt**)&idx_I_local);CHKERRQ(ierr); 1461 for (i=0, j=0; i<n_R; i++) { 1462 if (array[idx_R_local[i]] == one) { 1463 aux_array1[j] = i; 1464 j++; 1465 } 1466 } 1467 ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr); 1468 ierr = ISCreateGeneral(PETSC_COMM_SELF,j,aux_array1,PETSC_COPY_VALUES,&is_aux1);CHKERRQ(ierr); 1469 ierr = VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1470 ierr = VecScatterEnd (pcis->N_to_B,pcis->vec1_N,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1471 ierr = VecGetArray(pcis->vec1_B,&array);CHKERRQ(ierr); 1472 for (i=0, j=0; i<n_B; i++) { 1473 if (array[i] == one) { 1474 aux_array2[j] = i; j++; 1475 } 1476 } 1477 ierr = VecRestoreArray(pcis->vec1_B,&array);CHKERRQ(ierr); 1478 ierr = ISCreateGeneral(PETSC_COMM_SELF,j,aux_array2,PETSC_COPY_VALUES,&is_aux2);CHKERRQ(ierr); 1479 ierr = VecScatterCreate(pcbddc->vec1_R,is_aux1,pcis->vec1_B,is_aux2,&pcbddc->R_to_B);CHKERRQ(ierr); 1480 ierr = PetscFree(aux_array1);CHKERRQ(ierr); 1481 ierr = PetscFree(aux_array2);CHKERRQ(ierr); 1482 ierr = ISDestroy(&is_aux1);CHKERRQ(ierr); 1483 ierr = ISDestroy(&is_aux2);CHKERRQ(ierr); 1484 1485 if (pcbddc->inexact_prec_type || dbg_flag ) { 1486 ierr = PetscMalloc(n_D*sizeof(PetscInt),&aux_array1);CHKERRQ(ierr); 1487 ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr); 1488 for (i=0, j=0; i<n_R; i++) { 1489 if (array[idx_R_local[i]] == zero) { 1490 aux_array1[j] = i; 1491 j++; 1492 } 1493 } 1494 ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr); 1495 ierr = ISCreateGeneral(PETSC_COMM_SELF,j,aux_array1,PETSC_COPY_VALUES,&is_aux1);CHKERRQ(ierr); 1496 ierr = VecScatterCreate(pcbddc->vec1_R,is_aux1,pcis->vec1_D,(IS)0,&pcbddc->R_to_D);CHKERRQ(ierr); 1497 ierr = PetscFree(aux_array1);CHKERRQ(ierr); 1498 ierr = ISDestroy(&is_aux1);CHKERRQ(ierr); 1499 } 1500 } 1501 1502 /* Creating PC contexts for local Dirichlet and Neumann problems */ 1503 { 1504 Mat A_RR; 1505 PC pc_temp; 1506 MatStructure matstruct; 1507 /* Matrix for Dirichlet problem is A_II */ 1508 /* HACK (TODO) A_II can be changed between nonlinear iterations */ 1509 if (pc->setupcalled) { /* we dont need to rebuild dirichlet problem the first time we build BDDC */ 1510 ierr = PCGetOperators(pc,NULL,NULL,&matstruct);CHKERRQ(ierr); 1511 if (matstruct == SAME_NONZERO_PATTERN) { 1512 ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,MAT_REUSE_MATRIX,&pcis->A_II);CHKERRQ(ierr); 1513 } else { 1514 ierr = MatDestroy(&pcis->A_II);CHKERRQ(ierr); 1515 ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_II);CHKERRQ(ierr); 1516 } 1517 } 1518 ierr = KSPCreate(PETSC_COMM_SELF,&pcbddc->ksp_D);CHKERRQ(ierr); 1519 ierr = PetscObjectIncrementTabLevel((PetscObject)pcbddc->ksp_D,(PetscObject)pc,1);CHKERRQ(ierr); 1520 ierr = KSPSetOperators(pcbddc->ksp_D,pcis->A_II,pcis->A_II,SAME_PRECONDITIONER);CHKERRQ(ierr); 1521 ierr = KSPSetType(pcbddc->ksp_D,KSPPREONLY);CHKERRQ(ierr); 1522 ierr = KSPSetOptionsPrefix(pcbddc->ksp_D,"dirichlet_");CHKERRQ(ierr); 1523 /* default */ 1524 ierr = KSPGetPC(pcbddc->ksp_D,&pc_temp);CHKERRQ(ierr); 1525 ierr = PCSetType(pc_temp,PCLU);CHKERRQ(ierr); 1526 /* Allow user's customization */ 1527 ierr = KSPSetFromOptions(pcbddc->ksp_D);CHKERRQ(ierr); 1528 /* umfpack interface has a bug when matrix dimension is zero */ 1529 if (!n_D) { 1530 ierr = PCSetType(pc_temp,PCNONE);CHKERRQ(ierr); 1531 } 1532 /* Set Up KSP for Dirichlet problem of BDDC */ 1533 ierr = KSPSetUp(pcbddc->ksp_D);CHKERRQ(ierr); 1534 /* set ksp_D into pcis data */ 1535 ierr = KSPDestroy(&pcis->ksp_D);CHKERRQ(ierr); 1536 ierr = PetscObjectReference((PetscObject)pcbddc->ksp_D);CHKERRQ(ierr); 1537 pcis->ksp_D = pcbddc->ksp_D; 1538 /* Matrix for Neumann problem is A_RR -> we need to create it */ 1539 ierr = MatGetSubMatrix(pcbddc->local_mat,is_R_local,is_R_local,MAT_INITIAL_MATRIX,&A_RR);CHKERRQ(ierr); 1540 ierr = KSPCreate(PETSC_COMM_SELF,&pcbddc->ksp_R);CHKERRQ(ierr); 1541 ierr = PetscObjectIncrementTabLevel((PetscObject)pcbddc->ksp_R,(PetscObject)pc,1);CHKERRQ(ierr); 1542 ierr = KSPSetOperators(pcbddc->ksp_R,A_RR,A_RR,SAME_PRECONDITIONER);CHKERRQ(ierr); 1543 ierr = KSPSetType(pcbddc->ksp_R,KSPPREONLY);CHKERRQ(ierr); 1544 ierr = KSPSetOptionsPrefix(pcbddc->ksp_R,"neumann_");CHKERRQ(ierr); 1545 /* default */ 1546 ierr = KSPGetPC(pcbddc->ksp_R,&pc_temp);CHKERRQ(ierr); 1547 ierr = PCSetType(pc_temp,PCLU);CHKERRQ(ierr); 1548 /* Allow user's customization */ 1549 ierr = KSPSetFromOptions(pcbddc->ksp_R);CHKERRQ(ierr); 1550 /* umfpack interface has a bug when matrix dimension is zero */ 1551 if (!n_R) { 1552 ierr = PCSetType(pc_temp,PCNONE);CHKERRQ(ierr); 1553 } 1554 /* Set Up KSP for Neumann problem of BDDC */ 1555 ierr = KSPSetUp(pcbddc->ksp_R);CHKERRQ(ierr); 1556 /* check Dirichlet and Neumann solvers and adapt them if a nullspace correction is needed */ 1557 { 1558 Vec temp_vec; 1559 PetscReal value; 1560 PetscInt use_exact,use_exact_reduced; 1561 1562 ierr = VecDuplicate(pcis->vec1_D,&temp_vec);CHKERRQ(ierr); 1563 ierr = VecSetRandom(pcis->vec1_D,NULL);CHKERRQ(ierr); 1564 ierr = MatMult(pcis->A_II,pcis->vec1_D,pcis->vec2_D);CHKERRQ(ierr); 1565 ierr = KSPSolve(pcbddc->ksp_D,pcis->vec2_D,temp_vec);CHKERRQ(ierr); 1566 ierr = VecAXPY(temp_vec,m_one,pcis->vec1_D);CHKERRQ(ierr); 1567 ierr = VecNorm(temp_vec,NORM_INFINITY,&value);CHKERRQ(ierr); 1568 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 1569 use_exact = 1; 1570 if (PetscAbsReal(value) > 1.e-4) use_exact = 0; 1571 ierr = MPI_Allreduce(&use_exact,&use_exact_reduced,1,MPIU_INT,MPI_LAND,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 1572 ierr = PCBDDCSetUseExactDirichlet(pc,(PetscBool)use_exact_reduced);CHKERRQ(ierr); 1573 if (dbg_flag) { 1574 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1575 ierr = PetscViewerASCIIPrintf(viewer,"--------------------------------------------------\n");CHKERRQ(ierr); 1576 ierr = PetscViewerASCIIPrintf(viewer,"Checking solution of Dirichlet and Neumann problems\n");CHKERRQ(ierr); 1577 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d infinity error for Dirichlet solve = % 1.14e \n",PetscGlobalRank,value);CHKERRQ(ierr); 1578 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1579 } 1580 if (n_D && pcbddc->NullSpace && !use_exact_reduced && !pcbddc->inexact_prec_type) { 1581 ierr = PCBDDCNullSpaceAssembleCorrection(pc,pcis->is_I_local); 1582 } 1583 ierr = VecDuplicate(pcbddc->vec1_R,&temp_vec);CHKERRQ(ierr); 1584 ierr = VecSetRandom(pcbddc->vec1_R,NULL);CHKERRQ(ierr); 1585 ierr = MatMult(A_RR,pcbddc->vec1_R,pcbddc->vec2_R);CHKERRQ(ierr); 1586 ierr = KSPSolve(pcbddc->ksp_R,pcbddc->vec2_R,temp_vec);CHKERRQ(ierr); 1587 ierr = VecAXPY(temp_vec,m_one,pcbddc->vec1_R);CHKERRQ(ierr); 1588 ierr = VecNorm(temp_vec,NORM_INFINITY,&value);CHKERRQ(ierr); 1589 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 1590 use_exact = 1; 1591 if (PetscAbsReal(value) > 1.e-4) use_exact = 0; 1592 ierr = MPI_Allreduce(&use_exact,&use_exact_reduced,1,MPIU_INT,MPI_LAND,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 1593 if (dbg_flag) { 1594 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d infinity error for Neumann solve = % 1.14e \n",PetscGlobalRank,value);CHKERRQ(ierr); 1595 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1596 } 1597 if (n_R && pcbddc->NullSpace && !use_exact_reduced) { /* is it the right logic? */ 1598 ierr = PCBDDCNullSpaceAssembleCorrection(pc,is_R_local); 1599 } 1600 } 1601 /* free Neumann problem's matrix */ 1602 ierr = MatDestroy(&A_RR);CHKERRQ(ierr); 1603 } 1604 1605 /* Assemble all remaining stuff needed to apply BDDC */ 1606 { 1607 Mat A_RV,A_VR,A_VV; 1608 Mat M1; 1609 Mat C_CR; 1610 Mat AUXMAT; 1611 Vec vec1_C; 1612 Vec vec2_C; 1613 Vec vec1_V; 1614 Vec vec2_V; 1615 IS is_C_local,is_V_local,is_aux1; 1616 ISLocalToGlobalMapping BtoNmap; 1617 PetscInt *nnz; 1618 PetscInt *idx_V_B; 1619 PetscInt *auxindices; 1620 PetscInt index; 1621 PetscScalar* array2; 1622 MatFactorInfo matinfo; 1623 1624 /* Allocating some extra storage just to be safe */ 1625 ierr = PetscMalloc (pcis->n*sizeof(PetscInt),&nnz);CHKERRQ(ierr); 1626 ierr = PetscMalloc (pcis->n*sizeof(PetscInt),&auxindices);CHKERRQ(ierr); 1627 for (i=0;i<pcis->n;i++) auxindices[i]=i; 1628 1629 ierr = PCBDDCGetPrimalVerticesLocalIdx(pc,&n_vertices,&vertices);CHKERRQ(ierr); 1630 /* vertices in boundary numbering */ 1631 ierr = PetscMalloc(n_vertices*sizeof(PetscInt),&idx_V_B);CHKERRQ(ierr); 1632 ierr = ISLocalToGlobalMappingCreateIS(pcis->is_B_local,&BtoNmap);CHKERRQ(ierr); 1633 ierr = ISGlobalToLocalMappingApply(BtoNmap,IS_GTOLM_DROP,n_vertices,vertices,&i,idx_V_B);CHKERRQ(ierr); 1634 ierr = ISLocalToGlobalMappingDestroy(&BtoNmap);CHKERRQ(ierr); 1635 if (i != n_vertices) { 1636 SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Error in boundary numbering for BDDC vertices! %d != %d\n",n_vertices,i); 1637 } 1638 1639 /* some work vectors on vertices and/or constraints */ 1640 if (n_vertices) { 1641 ierr = VecCreate(PETSC_COMM_SELF,&vec1_V);CHKERRQ(ierr); 1642 ierr = VecSetSizes(vec1_V,n_vertices,n_vertices);CHKERRQ(ierr); 1643 ierr = VecSetType(vec1_V,impVecType);CHKERRQ(ierr); 1644 ierr = VecDuplicate(vec1_V,&vec2_V);CHKERRQ(ierr); 1645 } 1646 if (n_constraints) { 1647 ierr = VecCreate(PETSC_COMM_SELF,&vec1_C);CHKERRQ(ierr); 1648 ierr = VecSetSizes(vec1_C,n_constraints,n_constraints);CHKERRQ(ierr); 1649 ierr = VecSetType(vec1_C,impVecType);CHKERRQ(ierr); 1650 ierr = VecDuplicate(vec1_C,&vec2_C);CHKERRQ(ierr); 1651 ierr = VecDuplicate(vec1_C,&pcbddc->vec1_C);CHKERRQ(ierr); 1652 } 1653 /* Precompute stuffs needed for preprocessing and application of BDDC*/ 1654 if (n_constraints) { 1655 ierr = MatCreate(PETSC_COMM_SELF,&pcbddc->local_auxmat2);CHKERRQ(ierr); 1656 ierr = MatSetSizes(pcbddc->local_auxmat2,n_R,n_constraints,n_R,n_constraints);CHKERRQ(ierr); 1657 ierr = MatSetType(pcbddc->local_auxmat2,impMatType);CHKERRQ(ierr); 1658 ierr = MatSeqDenseSetPreallocation(pcbddc->local_auxmat2,NULL);CHKERRQ(ierr); 1659 1660 /* Create Constraint matrix on R nodes: C_{CR} */ 1661 ierr = ISCreateStride(PETSC_COMM_SELF,n_constraints,n_vertices,1,&is_C_local);CHKERRQ(ierr); 1662 ierr = MatGetSubMatrix(pcbddc->ConstraintMatrix,is_C_local,is_R_local,MAT_INITIAL_MATRIX,&C_CR);CHKERRQ(ierr); 1663 ierr = ISDestroy(&is_C_local);CHKERRQ(ierr); 1664 1665 /* Assemble local_auxmat2 = - A_{RR}^{-1} C^T_{CR} needed by BDDC application */ 1666 for (i=0;i<n_constraints;i++) { 1667 ierr = VecSet(pcbddc->vec1_R,zero);CHKERRQ(ierr); 1668 /* Get row of constraint matrix in R numbering */ 1669 ierr = VecGetArray(pcbddc->vec1_R,&array);CHKERRQ(ierr); 1670 ierr = MatGetRow(C_CR,i,&size_of_constraint,(const PetscInt**)&row_cmat_indices,(const PetscScalar**)&row_cmat_values);CHKERRQ(ierr); 1671 for (j=0;j<size_of_constraint;j++) array[row_cmat_indices[j]] = -row_cmat_values[j]; 1672 ierr = MatRestoreRow(C_CR,i,&size_of_constraint,(const PetscInt**)&row_cmat_indices,(const PetscScalar**)&row_cmat_values);CHKERRQ(ierr); 1673 ierr = VecRestoreArray(pcbddc->vec1_R,&array);CHKERRQ(ierr); 1674 1675 /* Solve for row of constraint matrix in R numbering */ 1676 ierr = KSPSolve(pcbddc->ksp_R,pcbddc->vec1_R,pcbddc->vec2_R);CHKERRQ(ierr); 1677 1678 /* Set values */ 1679 ierr = VecGetArray(pcbddc->vec2_R,&array);CHKERRQ(ierr); 1680 ierr = MatSetValues(pcbddc->local_auxmat2,n_R,auxindices,1,&i,array,INSERT_VALUES);CHKERRQ(ierr); 1681 ierr = VecRestoreArray(pcbddc->vec2_R,&array);CHKERRQ(ierr); 1682 } 1683 ierr = MatAssemblyBegin(pcbddc->local_auxmat2,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1684 ierr = MatAssemblyEnd(pcbddc->local_auxmat2,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1685 1686 /* Assemble AUXMAT = ( LUFactor )( -C_{CR} A_{RR}^{-1} C^T_{CR} )^{-1} */ 1687 ierr = MatMatMult(C_CR,pcbddc->local_auxmat2,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AUXMAT);CHKERRQ(ierr); 1688 ierr = MatFactorInfoInitialize(&matinfo);CHKERRQ(ierr); 1689 ierr = ISCreateStride(PETSC_COMM_SELF,n_constraints,0,1,&is_aux1);CHKERRQ(ierr); 1690 ierr = MatLUFactor(AUXMAT,is_aux1,is_aux1,&matinfo);CHKERRQ(ierr); 1691 ierr = ISDestroy(&is_aux1);CHKERRQ(ierr); 1692 1693 /* Assemble explicitly M1 = ( C_{CR} A_{RR}^{-1} C^T_{CR} )^{-1} needed in preproc */ 1694 ierr = MatCreate(PETSC_COMM_SELF,&M1);CHKERRQ(ierr); 1695 ierr = MatSetSizes(M1,n_constraints,n_constraints,n_constraints,n_constraints);CHKERRQ(ierr); 1696 ierr = MatSetType(M1,impMatType);CHKERRQ(ierr); 1697 ierr = MatSeqDenseSetPreallocation(M1,NULL);CHKERRQ(ierr); 1698 for (i=0;i<n_constraints;i++) { 1699 ierr = VecSet(vec1_C,zero);CHKERRQ(ierr); 1700 ierr = VecSetValue(vec1_C,i,one,INSERT_VALUES);CHKERRQ(ierr); 1701 ierr = VecAssemblyBegin(vec1_C);CHKERRQ(ierr); 1702 ierr = VecAssemblyEnd(vec1_C);CHKERRQ(ierr); 1703 ierr = MatSolve(AUXMAT,vec1_C,vec2_C);CHKERRQ(ierr); 1704 ierr = VecScale(vec2_C,m_one);CHKERRQ(ierr); 1705 ierr = VecGetArray(vec2_C,&array);CHKERRQ(ierr); 1706 ierr = MatSetValues(M1,n_constraints,auxindices,1,&i,array,INSERT_VALUES);CHKERRQ(ierr); 1707 ierr = VecRestoreArray(vec2_C,&array);CHKERRQ(ierr); 1708 } 1709 ierr = MatAssemblyBegin(M1,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1710 ierr = MatAssemblyEnd(M1,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1711 ierr = MatDestroy(&AUXMAT);CHKERRQ(ierr); 1712 /* Assemble local_auxmat1 = M1*C_{CR} needed by BDDC application in KSP and in preproc */ 1713 ierr = MatMatMult(M1,C_CR,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&pcbddc->local_auxmat1);CHKERRQ(ierr); 1714 1715 } 1716 1717 /* Get submatrices from subdomain matrix */ 1718 if (n_vertices) { 1719 ierr = ISCreateGeneral(PETSC_COMM_SELF,n_vertices,vertices,PETSC_OWN_POINTER,&is_V_local);CHKERRQ(ierr); 1720 ierr = MatGetSubMatrix(pcbddc->local_mat,is_R_local,is_V_local,MAT_INITIAL_MATRIX,&A_RV);CHKERRQ(ierr); 1721 ierr = MatGetSubMatrix(pcbddc->local_mat,is_V_local,is_R_local,MAT_INITIAL_MATRIX,&A_VR);CHKERRQ(ierr); 1722 ierr = MatGetSubMatrix(pcbddc->local_mat,is_V_local,is_V_local,MAT_INITIAL_MATRIX,&A_VV);CHKERRQ(ierr); 1723 } 1724 1725 /* Matrix of coarse basis functions (local) */ 1726 ierr = MatCreate(PETSC_COMM_SELF,&pcbddc->coarse_phi_B);CHKERRQ(ierr); 1727 ierr = MatSetSizes(pcbddc->coarse_phi_B,n_B,pcbddc->local_primal_size,n_B,pcbddc->local_primal_size);CHKERRQ(ierr); 1728 ierr = MatSetType(pcbddc->coarse_phi_B,impMatType);CHKERRQ(ierr); 1729 ierr = MatSeqDenseSetPreallocation(pcbddc->coarse_phi_B,NULL);CHKERRQ(ierr); 1730 if (pcbddc->inexact_prec_type || dbg_flag ) { 1731 ierr = MatCreate(PETSC_COMM_SELF,&pcbddc->coarse_phi_D);CHKERRQ(ierr); 1732 ierr = MatSetSizes(pcbddc->coarse_phi_D,n_D,pcbddc->local_primal_size,n_D,pcbddc->local_primal_size);CHKERRQ(ierr); 1733 ierr = MatSetType(pcbddc->coarse_phi_D,impMatType);CHKERRQ(ierr); 1734 ierr = MatSeqDenseSetPreallocation(pcbddc->coarse_phi_D,NULL);CHKERRQ(ierr); 1735 } 1736 1737 if (dbg_flag) { 1738 ierr = PetscMalloc( pcbddc->local_primal_size*sizeof(PetscScalar),&coarsefunctions_errors);CHKERRQ(ierr); 1739 ierr = PetscMalloc( pcbddc->local_primal_size*sizeof(PetscScalar),&constraints_errors);CHKERRQ(ierr); 1740 } 1741 /* Subdomain contribution (Non-overlapping) to coarse matrix */ 1742 ierr = PetscMalloc ((pcbddc->local_primal_size)*(pcbddc->local_primal_size)*sizeof(PetscScalar),&coarse_submat_vals);CHKERRQ(ierr); 1743 1744 /* We are now ready to evaluate coarse basis functions and subdomain contribution to coarse problem */ 1745 for (i=0;i<n_vertices;i++){ 1746 ierr = VecSet(vec1_V,zero);CHKERRQ(ierr); 1747 ierr = VecSetValue(vec1_V,i,one,INSERT_VALUES);CHKERRQ(ierr); 1748 ierr = VecAssemblyBegin(vec1_V);CHKERRQ(ierr); 1749 ierr = VecAssemblyEnd(vec1_V);CHKERRQ(ierr); 1750 /* solution of saddle point problem */ 1751 ierr = MatMult(A_RV,vec1_V,pcbddc->vec1_R);CHKERRQ(ierr); 1752 ierr = KSPSolve(pcbddc->ksp_R,pcbddc->vec1_R,pcbddc->vec1_R);CHKERRQ(ierr); 1753 ierr = VecScale(pcbddc->vec1_R,m_one);CHKERRQ(ierr); 1754 if (n_constraints) { 1755 ierr = MatMult(pcbddc->local_auxmat1,pcbddc->vec1_R,vec1_C);CHKERRQ(ierr); 1756 ierr = MatMultAdd(pcbddc->local_auxmat2,vec1_C,pcbddc->vec1_R,pcbddc->vec1_R);CHKERRQ(ierr); 1757 ierr = VecScale(vec1_C,m_one);CHKERRQ(ierr); 1758 } 1759 ierr = MatMult(A_VR,pcbddc->vec1_R,vec2_V);CHKERRQ(ierr); 1760 ierr = MatMultAdd(A_VV,vec1_V,vec2_V,vec2_V);CHKERRQ(ierr); 1761 1762 /* Set values in coarse basis function and subdomain part of coarse_mat */ 1763 /* coarse basis functions */ 1764 ierr = VecSet(pcis->vec1_B,zero);CHKERRQ(ierr); 1765 ierr = VecScatterBegin(pcbddc->R_to_B,pcbddc->vec1_R,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1766 ierr = VecScatterEnd (pcbddc->R_to_B,pcbddc->vec1_R,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1767 ierr = VecGetArray(pcis->vec1_B,&array);CHKERRQ(ierr); 1768 ierr = MatSetValues(pcbddc->coarse_phi_B,n_B,auxindices,1,&i,array,INSERT_VALUES);CHKERRQ(ierr); 1769 ierr = VecRestoreArray(pcis->vec1_B,&array);CHKERRQ(ierr); 1770 ierr = MatSetValue(pcbddc->coarse_phi_B,idx_V_B[i],i,one,INSERT_VALUES);CHKERRQ(ierr); 1771 if ( pcbddc->inexact_prec_type || dbg_flag ) { 1772 ierr = VecScatterBegin(pcbddc->R_to_D,pcbddc->vec1_R,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1773 ierr = VecScatterEnd(pcbddc->R_to_D,pcbddc->vec1_R,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1774 ierr = VecGetArray(pcis->vec1_D,&array);CHKERRQ(ierr); 1775 ierr = MatSetValues(pcbddc->coarse_phi_D,n_D,auxindices,1,&i,array,INSERT_VALUES);CHKERRQ(ierr); 1776 ierr = VecRestoreArray(pcis->vec1_D,&array);CHKERRQ(ierr); 1777 } 1778 /* subdomain contribution to coarse matrix */ 1779 ierr = VecGetArray(vec2_V,&array);CHKERRQ(ierr); 1780 for (j=0; j<n_vertices; j++) coarse_submat_vals[i*pcbddc->local_primal_size+j] = array[j]; /* WARNING -> column major ordering */ 1781 ierr = VecRestoreArray(vec2_V,&array);CHKERRQ(ierr); 1782 if (n_constraints) { 1783 ierr = VecGetArray(vec1_C,&array);CHKERRQ(ierr); 1784 for (j=0; j<n_constraints; j++) coarse_submat_vals[i*pcbddc->local_primal_size+j+n_vertices] = array[j]; /* WARNING -> column major ordering */ 1785 ierr = VecRestoreArray(vec1_C,&array);CHKERRQ(ierr); 1786 } 1787 1788 if ( dbg_flag ) { 1789 /* assemble subdomain vector on nodes */ 1790 ierr = VecSet(pcis->vec1_N,zero);CHKERRQ(ierr); 1791 ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr); 1792 ierr = VecGetArray(pcbddc->vec1_R,&array2);CHKERRQ(ierr); 1793 for (j=0;j<n_R;j++) array[idx_R_local[j]] = array2[j]; 1794 array[ vertices[i] ] = one; 1795 ierr = VecRestoreArray(pcbddc->vec1_R,&array2);CHKERRQ(ierr); 1796 ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr); 1797 /* assemble subdomain vector of lagrange multipliers (i.e. primal nodes) */ 1798 ierr = VecSet(pcbddc->vec1_P,zero);CHKERRQ(ierr); 1799 ierr = VecGetArray(pcbddc->vec1_P,&array2);CHKERRQ(ierr); 1800 ierr = VecGetArray(vec2_V,&array);CHKERRQ(ierr); 1801 for (j=0;j<n_vertices;j++) array2[j]=array[j]; 1802 ierr = VecRestoreArray(vec2_V,&array);CHKERRQ(ierr); 1803 if (n_constraints) { 1804 ierr = VecGetArray(vec1_C,&array);CHKERRQ(ierr); 1805 for (j=0;j<n_constraints;j++) array2[j+n_vertices]=array[j]; 1806 ierr = VecRestoreArray(vec1_C,&array);CHKERRQ(ierr); 1807 } 1808 ierr = VecRestoreArray(pcbddc->vec1_P,&array2);CHKERRQ(ierr); 1809 ierr = VecScale(pcbddc->vec1_P,m_one);CHKERRQ(ierr); 1810 /* check saddle point solution */ 1811 ierr = MatMult(pcbddc->local_mat,pcis->vec1_N,pcis->vec2_N);CHKERRQ(ierr); 1812 ierr = MatMultTransposeAdd(pcbddc->ConstraintMatrix,pcbddc->vec1_P,pcis->vec2_N,pcis->vec2_N);CHKERRQ(ierr); 1813 ierr = VecNorm(pcis->vec2_N,NORM_INFINITY,&coarsefunctions_errors[i]);CHKERRQ(ierr); 1814 ierr = MatMult(pcbddc->ConstraintMatrix,pcis->vec1_N,pcbddc->vec1_P);CHKERRQ(ierr); 1815 ierr = VecGetArray(pcbddc->vec1_P,&array);CHKERRQ(ierr); 1816 array[i]=array[i]+m_one; /* shift by the identity matrix */ 1817 ierr = VecRestoreArray(pcbddc->vec1_P,&array);CHKERRQ(ierr); 1818 ierr = VecNorm(pcbddc->vec1_P,NORM_INFINITY,&constraints_errors[i]);CHKERRQ(ierr); 1819 } 1820 } 1821 ierr = PetscFree(idx_V_B);CHKERRQ(ierr); 1822 1823 for (i=0;i<n_constraints;i++){ 1824 ierr = VecSet(vec2_C,zero);CHKERRQ(ierr); 1825 ierr = VecSetValue(vec2_C,i,m_one,INSERT_VALUES);CHKERRQ(ierr); 1826 ierr = VecAssemblyBegin(vec2_C);CHKERRQ(ierr); 1827 ierr = VecAssemblyEnd(vec2_C);CHKERRQ(ierr); 1828 /* solution of saddle point problem */ 1829 ierr = MatMult(M1,vec2_C,vec1_C);CHKERRQ(ierr); 1830 ierr = MatMult(pcbddc->local_auxmat2,vec1_C,pcbddc->vec1_R);CHKERRQ(ierr); 1831 ierr = VecScale(vec1_C,m_one);CHKERRQ(ierr); 1832 if (n_vertices) { ierr = MatMult(A_VR,pcbddc->vec1_R,vec2_V);CHKERRQ(ierr); } 1833 /* Set values in coarse basis function and subdomain part of coarse_mat */ 1834 /* coarse basis functions */ 1835 index=i+n_vertices; 1836 ierr = VecSet(pcis->vec1_B,zero);CHKERRQ(ierr); 1837 ierr = VecScatterBegin(pcbddc->R_to_B,pcbddc->vec1_R,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1838 ierr = VecScatterEnd (pcbddc->R_to_B,pcbddc->vec1_R,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1839 ierr = VecGetArray(pcis->vec1_B,&array);CHKERRQ(ierr); 1840 ierr = MatSetValues(pcbddc->coarse_phi_B,n_B,auxindices,1,&index,array,INSERT_VALUES);CHKERRQ(ierr); 1841 ierr = VecRestoreArray(pcis->vec1_B,&array);CHKERRQ(ierr); 1842 if ( pcbddc->inexact_prec_type || dbg_flag ) { 1843 ierr = VecScatterBegin(pcbddc->R_to_D,pcbddc->vec1_R,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1844 ierr = VecScatterEnd(pcbddc->R_to_D,pcbddc->vec1_R,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1845 ierr = VecGetArray(pcis->vec1_D,&array);CHKERRQ(ierr); 1846 ierr = MatSetValues(pcbddc->coarse_phi_D,n_D,auxindices,1,&index,array,INSERT_VALUES);CHKERRQ(ierr); 1847 ierr = VecRestoreArray(pcis->vec1_D,&array);CHKERRQ(ierr); 1848 } 1849 /* subdomain contribution to coarse matrix */ 1850 if (n_vertices) { 1851 ierr = VecGetArray(vec2_V,&array);CHKERRQ(ierr); 1852 for (j=0; j<n_vertices; j++) coarse_submat_vals[index*pcbddc->local_primal_size+j]=array[j]; /* WARNING -> column major ordering */ 1853 ierr = VecRestoreArray(vec2_V,&array);CHKERRQ(ierr); 1854 } 1855 ierr = VecGetArray(vec1_C,&array);CHKERRQ(ierr); 1856 for (j=0; j<n_constraints; j++) coarse_submat_vals[index*pcbddc->local_primal_size+j+n_vertices]=array[j]; /* WARNING -> column major ordering */ 1857 ierr = VecRestoreArray(vec1_C,&array);CHKERRQ(ierr); 1858 1859 if ( dbg_flag ) { 1860 /* assemble subdomain vector on nodes */ 1861 ierr = VecSet(pcis->vec1_N,zero);CHKERRQ(ierr); 1862 ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr); 1863 ierr = VecGetArray(pcbddc->vec1_R,&array2);CHKERRQ(ierr); 1864 for (j=0;j<n_R;j++) array[idx_R_local[j]] = array2[j]; 1865 ierr = VecRestoreArray(pcbddc->vec1_R,&array2);CHKERRQ(ierr); 1866 ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr); 1867 /* assemble subdomain vector of lagrange multipliers */ 1868 ierr = VecSet(pcbddc->vec1_P,zero);CHKERRQ(ierr); 1869 ierr = VecGetArray(pcbddc->vec1_P,&array2);CHKERRQ(ierr); 1870 if ( n_vertices) { 1871 ierr = VecGetArray(vec2_V,&array);CHKERRQ(ierr); 1872 for (j=0;j<n_vertices;j++) array2[j]=-array[j]; 1873 ierr = VecRestoreArray(vec2_V,&array);CHKERRQ(ierr); 1874 } 1875 ierr = VecGetArray(vec1_C,&array);CHKERRQ(ierr); 1876 for (j=0;j<n_constraints;j++) {array2[j+n_vertices]=-array[j];} 1877 ierr = VecRestoreArray(vec1_C,&array);CHKERRQ(ierr); 1878 ierr = VecRestoreArray(pcbddc->vec1_P,&array2);CHKERRQ(ierr); 1879 /* check saddle point solution */ 1880 ierr = MatMult(pcbddc->local_mat,pcis->vec1_N,pcis->vec2_N);CHKERRQ(ierr); 1881 ierr = MatMultTransposeAdd(pcbddc->ConstraintMatrix,pcbddc->vec1_P,pcis->vec2_N,pcis->vec2_N);CHKERRQ(ierr); 1882 ierr = VecNorm(pcis->vec2_N,NORM_INFINITY,&coarsefunctions_errors[index]);CHKERRQ(ierr); 1883 ierr = MatMult(pcbddc->ConstraintMatrix,pcis->vec1_N,pcbddc->vec1_P);CHKERRQ(ierr); 1884 ierr = VecGetArray(pcbddc->vec1_P,&array);CHKERRQ(ierr); 1885 array[index]=array[index]+m_one; /* shift by the identity matrix */ 1886 ierr = VecRestoreArray(pcbddc->vec1_P,&array);CHKERRQ(ierr); 1887 ierr = VecNorm(pcbddc->vec1_P,NORM_INFINITY,&constraints_errors[index]);CHKERRQ(ierr); 1888 } 1889 } 1890 ierr = MatAssemblyBegin(pcbddc->coarse_phi_B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1891 ierr = MatAssemblyEnd (pcbddc->coarse_phi_B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1892 if ( pcbddc->inexact_prec_type || dbg_flag ) { 1893 ierr = MatAssemblyBegin(pcbddc->coarse_phi_D,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1894 ierr = MatAssemblyEnd (pcbddc->coarse_phi_D,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1895 } 1896 /* Checking coarse_sub_mat and coarse basis functios */ 1897 /* It shuld be \Phi^{(j)^T} A^{(j)} \Phi^{(j)}=coarse_sub_mat */ 1898 if (dbg_flag) { 1899 Mat coarse_sub_mat; 1900 Mat TM1,TM2,TM3,TM4; 1901 Mat coarse_phi_D,coarse_phi_B,A_II,A_BB,A_IB,A_BI; 1902 MatType checkmattype=MATSEQAIJ; 1903 PetscScalar value; 1904 1905 ierr = MatConvert(pcis->A_II,checkmattype,MAT_INITIAL_MATRIX,&A_II);CHKERRQ(ierr); 1906 ierr = MatConvert(pcis->A_IB,checkmattype,MAT_INITIAL_MATRIX,&A_IB);CHKERRQ(ierr); 1907 ierr = MatConvert(pcis->A_BI,checkmattype,MAT_INITIAL_MATRIX,&A_BI);CHKERRQ(ierr); 1908 ierr = MatConvert(pcis->A_BB,checkmattype,MAT_INITIAL_MATRIX,&A_BB);CHKERRQ(ierr); 1909 ierr = MatConvert(pcbddc->coarse_phi_D,checkmattype,MAT_INITIAL_MATRIX,&coarse_phi_D);CHKERRQ(ierr); 1910 ierr = MatConvert(pcbddc->coarse_phi_B,checkmattype,MAT_INITIAL_MATRIX,&coarse_phi_B);CHKERRQ(ierr); 1911 ierr = MatCreateSeqDense(PETSC_COMM_SELF,pcbddc->local_primal_size,pcbddc->local_primal_size,coarse_submat_vals,&coarse_sub_mat);CHKERRQ(ierr); 1912 ierr = MatConvert(coarse_sub_mat,checkmattype,MAT_REUSE_MATRIX,&coarse_sub_mat);CHKERRQ(ierr); 1913 1914 ierr = PetscViewerASCIIPrintf(viewer,"--------------------------------------------------\n");CHKERRQ(ierr); 1915 ierr = PetscViewerASCIIPrintf(viewer,"Check coarse sub mat and local basis functions\n");CHKERRQ(ierr); 1916 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1917 ierr = MatPtAP(A_II,coarse_phi_D,MAT_INITIAL_MATRIX,1.0,&TM1);CHKERRQ(ierr); 1918 ierr = MatPtAP(A_BB,coarse_phi_B,MAT_INITIAL_MATRIX,1.0,&TM2);CHKERRQ(ierr); 1919 ierr = MatMatMult(A_IB,coarse_phi_B,MAT_INITIAL_MATRIX,1.0,&AUXMAT);CHKERRQ(ierr); 1920 ierr = MatTransposeMatMult(coarse_phi_D,AUXMAT,MAT_INITIAL_MATRIX,1.0,&TM3);CHKERRQ(ierr); 1921 ierr = MatDestroy(&AUXMAT);CHKERRQ(ierr); 1922 ierr = MatMatMult(A_BI,coarse_phi_D,MAT_INITIAL_MATRIX,1.0,&AUXMAT);CHKERRQ(ierr); 1923 ierr = MatTransposeMatMult(coarse_phi_B,AUXMAT,MAT_INITIAL_MATRIX,1.0,&TM4);CHKERRQ(ierr); 1924 ierr = MatDestroy(&AUXMAT);CHKERRQ(ierr); 1925 ierr = MatAXPY(TM1,one,TM2,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr); 1926 ierr = MatAXPY(TM1,one,TM3,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr); 1927 ierr = MatAXPY(TM1,one,TM4,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr); 1928 ierr = MatAXPY(TM1,m_one,coarse_sub_mat,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr); 1929 ierr = MatNorm(TM1,NORM_INFINITY,&value);CHKERRQ(ierr); 1930 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"----------------------------------\n");CHKERRQ(ierr); 1931 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d \n",PetscGlobalRank);CHKERRQ(ierr); 1932 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"matrix error = % 1.14e\n",value);CHKERRQ(ierr); 1933 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"coarse functions errors\n");CHKERRQ(ierr); 1934 for (i=0;i<pcbddc->local_primal_size;i++) { ierr = PetscViewerASCIISynchronizedPrintf(viewer,"local %02d-th function error = % 1.14e\n",i,coarsefunctions_errors[i]);CHKERRQ(ierr); } 1935 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"constraints errors\n");CHKERRQ(ierr); 1936 for (i=0;i<pcbddc->local_primal_size;i++) { ierr = PetscViewerASCIISynchronizedPrintf(viewer,"local %02d-th function error = % 1.14e\n",i,constraints_errors[i]);CHKERRQ(ierr); } 1937 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1938 ierr = MatDestroy(&A_II);CHKERRQ(ierr); 1939 ierr = MatDestroy(&A_BB);CHKERRQ(ierr); 1940 ierr = MatDestroy(&A_IB);CHKERRQ(ierr); 1941 ierr = MatDestroy(&A_BI);CHKERRQ(ierr); 1942 ierr = MatDestroy(&TM1);CHKERRQ(ierr); 1943 ierr = MatDestroy(&TM2);CHKERRQ(ierr); 1944 ierr = MatDestroy(&TM3);CHKERRQ(ierr); 1945 ierr = MatDestroy(&TM4);CHKERRQ(ierr); 1946 ierr = MatDestroy(&coarse_phi_D);CHKERRQ(ierr); 1947 ierr = MatDestroy(&coarse_sub_mat);CHKERRQ(ierr); 1948 ierr = MatDestroy(&coarse_phi_B);CHKERRQ(ierr); 1949 ierr = PetscFree(coarsefunctions_errors);CHKERRQ(ierr); 1950 ierr = PetscFree(constraints_errors);CHKERRQ(ierr); 1951 } 1952 /* free memory */ 1953 if (n_vertices) { 1954 ierr = ISDestroy(&is_V_local);CHKERRQ(ierr); 1955 ierr = VecDestroy(&vec1_V);CHKERRQ(ierr); 1956 ierr = VecDestroy(&vec2_V);CHKERRQ(ierr); 1957 ierr = MatDestroy(&A_RV);CHKERRQ(ierr); 1958 ierr = MatDestroy(&A_VR);CHKERRQ(ierr); 1959 ierr = MatDestroy(&A_VV);CHKERRQ(ierr); 1960 } 1961 if (n_constraints) { 1962 ierr = VecDestroy(&vec1_C);CHKERRQ(ierr); 1963 ierr = VecDestroy(&vec2_C);CHKERRQ(ierr); 1964 ierr = MatDestroy(&M1);CHKERRQ(ierr); 1965 ierr = MatDestroy(&C_CR);CHKERRQ(ierr); 1966 } 1967 ierr = PetscFree(auxindices);CHKERRQ(ierr); 1968 ierr = PetscFree(nnz);CHKERRQ(ierr); 1969 /* create coarse matrix and data structures for message passing associated actual choice of coarse problem type */ 1970 ierr = PCBDDCSetUpCoarseEnvironment(pc,coarse_submat_vals);CHKERRQ(ierr); 1971 ierr = PetscFree(coarse_submat_vals);CHKERRQ(ierr); 1972 } 1973 /* free memory */ 1974 ierr = ISDestroy(&is_R_local);CHKERRQ(ierr); 1975 1976 PetscFunctionReturn(0); 1977 } 1978 1979 /* -------------------------------------------------------------------------- */ 1980 1981 /* BDDC requires metis 5.0.1 for multilevel */ 1982 #if defined(PETSC_HAVE_METIS) 1983 #include "metis.h" 1984 #define MetisInt idx_t 1985 #define MetisScalar real_t 1986 #endif 1987 1988 #undef __FUNCT__ 1989 #define __FUNCT__ "PCBDDCSetUpCoarseEnvironment" 1990 static PetscErrorCode PCBDDCSetUpCoarseEnvironment(PC pc,PetscScalar* coarse_submat_vals) 1991 { 1992 1993 1994 Mat_IS *matis = (Mat_IS*)pc->pmat->data; 1995 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 1996 PC_IS *pcis = (PC_IS*)pc->data; 1997 MPI_Comm prec_comm; 1998 MPI_Comm coarse_comm; 1999 2000 MatNullSpace CoarseNullSpace; 2001 2002 /* common to all choiches */ 2003 PetscScalar *temp_coarse_mat_vals; 2004 PetscScalar *ins_coarse_mat_vals; 2005 PetscInt *ins_local_primal_indices; 2006 PetscMPIInt *localsizes2,*localdispl2; 2007 PetscMPIInt size_prec_comm; 2008 PetscMPIInt rank_prec_comm; 2009 PetscMPIInt active_rank=MPI_PROC_NULL; 2010 PetscMPIInt master_proc=0; 2011 PetscInt ins_local_primal_size; 2012 /* specific to MULTILEVEL_BDDC */ 2013 PetscMPIInt *ranks_recv; 2014 PetscMPIInt count_recv=0; 2015 PetscMPIInt rank_coarse_proc_send_to; 2016 PetscMPIInt coarse_color = MPI_UNDEFINED; 2017 ISLocalToGlobalMapping coarse_ISLG; 2018 /* some other variables */ 2019 PetscErrorCode ierr; 2020 MatType coarse_mat_type; 2021 PCType coarse_pc_type; 2022 KSPType coarse_ksp_type; 2023 PC pc_temp; 2024 PetscInt i,j,k; 2025 PetscInt max_it_coarse_ksp=1; /* don't increase this value */ 2026 /* verbose output viewer */ 2027 PetscViewer viewer=pcbddc->dbg_viewer; 2028 PetscBool dbg_flag=pcbddc->dbg_flag; 2029 2030 PetscInt offset,offset2; 2031 PetscMPIInt im_active,active_procs; 2032 PetscInt *dnz,*onz; 2033 2034 PetscBool setsym,issym=PETSC_FALSE; 2035 2036 PetscFunctionBegin; 2037 ierr = PetscObjectGetComm((PetscObject)pc,&prec_comm);CHKERRQ(ierr); 2038 ins_local_primal_indices = 0; 2039 ins_coarse_mat_vals = 0; 2040 localsizes2 = 0; 2041 localdispl2 = 0; 2042 temp_coarse_mat_vals = 0; 2043 coarse_ISLG = 0; 2044 2045 ierr = MPI_Comm_size(prec_comm,&size_prec_comm);CHKERRQ(ierr); 2046 ierr = MPI_Comm_rank(prec_comm,&rank_prec_comm);CHKERRQ(ierr); 2047 ierr = MatIsSymmetricKnown(pc->pmat,&setsym,&issym);CHKERRQ(ierr); 2048 2049 /* Assign global numbering to coarse dofs */ 2050 { 2051 PetscInt *auxlocal_primal,*aux_idx; 2052 PetscMPIInt mpi_local_primal_size; 2053 PetscScalar coarsesum,*array; 2054 2055 mpi_local_primal_size = (PetscMPIInt)pcbddc->local_primal_size; 2056 2057 /* Construct needed data structures for message passing */ 2058 j = 0; 2059 if (rank_prec_comm == 0 || pcbddc->coarse_problem_type == REPLICATED_BDDC || pcbddc->coarse_problem_type == MULTILEVEL_BDDC) { 2060 j = size_prec_comm; 2061 } 2062 ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->local_primal_sizes);CHKERRQ(ierr); 2063 ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->local_primal_displacements);CHKERRQ(ierr); 2064 /* Gather local_primal_size information for all processes */ 2065 if (pcbddc->coarse_problem_type == REPLICATED_BDDC || pcbddc->coarse_problem_type == MULTILEVEL_BDDC) { 2066 ierr = MPI_Allgather(&mpi_local_primal_size,1,MPIU_INT,&pcbddc->local_primal_sizes[0],1,MPIU_INT,prec_comm);CHKERRQ(ierr); 2067 } else { 2068 ierr = MPI_Gather(&mpi_local_primal_size,1,MPIU_INT,&pcbddc->local_primal_sizes[0],1,MPIU_INT,0,prec_comm);CHKERRQ(ierr); 2069 } 2070 pcbddc->replicated_primal_size = 0; 2071 for (i=0; i<j; i++) { 2072 pcbddc->local_primal_displacements[i] = pcbddc->replicated_primal_size ; 2073 pcbddc->replicated_primal_size += pcbddc->local_primal_sizes[i]; 2074 } 2075 2076 /* First let's count coarse dofs. 2077 This code fragment assumes that the number of local constraints per connected component 2078 is not greater than the number of nodes defined for the connected component 2079 (otherwise we will surely have linear dependence between constraints and thus a singular coarse problem) */ 2080 ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscInt),&auxlocal_primal);CHKERRQ(ierr); 2081 ierr = PCBDDCGetPrimalVerticesLocalIdx(pc,&i,&aux_idx);CHKERRQ(ierr); 2082 ierr = PetscMemcpy(auxlocal_primal,aux_idx,i*sizeof(PetscInt));CHKERRQ(ierr); 2083 ierr = PetscFree(aux_idx);CHKERRQ(ierr); 2084 ierr = PCBDDCGetPrimalConstraintsLocalIdx(pc,&j,&aux_idx);CHKERRQ(ierr); 2085 ierr = PetscMemcpy(&auxlocal_primal[i],aux_idx,j*sizeof(PetscInt));CHKERRQ(ierr); 2086 ierr = PetscFree(aux_idx);CHKERRQ(ierr); 2087 /* Compute number of coarse dofs */ 2088 ierr = PCBDDCSubsetNumbering(prec_comm,matis->mapping,pcbddc->local_primal_size,auxlocal_primal,NULL,&pcbddc->coarse_size,&pcbddc->local_primal_indices);CHKERRQ(ierr); 2089 2090 if (dbg_flag) { 2091 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 2092 ierr = PetscViewerASCIIPrintf(viewer,"--------------------------------------------------\n");CHKERRQ(ierr); 2093 ierr = PetscViewerASCIIPrintf(viewer,"Check coarse indices\n");CHKERRQ(ierr); 2094 ierr = VecSet(pcis->vec1_N,0.0);CHKERRQ(ierr); 2095 ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr); 2096 for (i=0;i<pcbddc->local_primal_size;i++) array[auxlocal_primal[i]]=1.0; 2097 ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr); 2098 ierr = VecSet(pcis->vec1_global,0.0);CHKERRQ(ierr); 2099 ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 2100 ierr = VecScatterEnd (matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 2101 ierr = VecScatterBegin(matis->ctx,pcis->vec1_global,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 2102 ierr = VecScatterEnd (matis->ctx,pcis->vec1_global,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 2103 ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr); 2104 for (i=0;i<pcis->n;i++) { 2105 if (array[i] == 1.0) { 2106 ierr = ISLocalToGlobalMappingApply(matis->mapping,1,&i,&j);CHKERRQ(ierr); 2107 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d: WRONG COARSE INDEX %d (local %d)\n",PetscGlobalRank,j,i);CHKERRQ(ierr); 2108 } 2109 } 2110 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 2111 for (i=0;i<pcis->n;i++) { 2112 if (array[i] > 0.0) array[i] = 1.0/array[i]; 2113 } 2114 ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr); 2115 ierr = VecSet(pcis->vec1_global,0.0);CHKERRQ(ierr); 2116 ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 2117 ierr = VecScatterEnd (matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 2118 ierr = VecSum(pcis->vec1_global,&coarsesum);CHKERRQ(ierr); 2119 ierr = PetscViewerASCIIPrintf(viewer,"Size of coarse problem SHOULD be %lf\n",coarsesum);CHKERRQ(ierr); 2120 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 2121 } 2122 ierr = PetscFree(auxlocal_primal);CHKERRQ(ierr); 2123 } 2124 2125 if (dbg_flag) { 2126 ierr = PetscViewerASCIIPrintf(viewer,"Size of coarse problem is %d\n",pcbddc->coarse_size);CHKERRQ(ierr); 2127 if (dbg_flag > 1) { 2128 ierr = PetscViewerASCIIPrintf(viewer,"Distribution of local primal indices\n");CHKERRQ(ierr); 2129 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 2130 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d\n",PetscGlobalRank);CHKERRQ(ierr); 2131 for (i=0;i<pcbddc->local_primal_size;i++) { 2132 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"local_primal_indices[%d]=%d \n",i,pcbddc->local_primal_indices[i]); 2133 } 2134 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 2135 } 2136 } 2137 2138 im_active = 0; 2139 if (pcis->n) im_active = 1; 2140 ierr = MPI_Allreduce(&im_active,&active_procs,1,MPIU_INT,MPI_SUM,prec_comm);CHKERRQ(ierr); 2141 2142 /* adapt coarse problem type */ 2143 #if defined(PETSC_HAVE_METIS) 2144 if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) { 2145 if (pcbddc->current_level < pcbddc->max_levels) { 2146 if ( (active_procs/pcbddc->coarsening_ratio) < 2 ) { 2147 if (dbg_flag) { 2148 ierr = PetscViewerASCIIPrintf(viewer,"Not enough active processes on level %d (active %d,ratio %d). Parallel direct solve for coarse problem\n",pcbddc->current_level,active_procs,pcbddc->coarsening_ratio);CHKERRQ(ierr); 2149 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 2150 } 2151 pcbddc->coarse_problem_type = PARALLEL_BDDC; 2152 } 2153 } else { 2154 if (dbg_flag) { 2155 ierr = PetscViewerASCIIPrintf(viewer,"Max number of levels reached. Using parallel direct solve for coarse problem\n",pcbddc->max_levels,active_procs,pcbddc->coarsening_ratio);CHKERRQ(ierr); 2156 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 2157 } 2158 pcbddc->coarse_problem_type = PARALLEL_BDDC; 2159 } 2160 } 2161 #else 2162 pcbddc->coarse_problem_type = PARALLEL_BDDC; 2163 #endif 2164 2165 switch(pcbddc->coarse_problem_type){ 2166 2167 case(MULTILEVEL_BDDC): /* we define a coarse mesh where subdomains are elements */ 2168 { 2169 #if defined(PETSC_HAVE_METIS) 2170 /* we need additional variables */ 2171 MetisInt n_subdomains,n_parts,objval,ncon,faces_nvtxs; 2172 MetisInt *metis_coarse_subdivision; 2173 MetisInt options[METIS_NOPTIONS]; 2174 PetscMPIInt size_coarse_comm,rank_coarse_comm; 2175 PetscMPIInt procs_jumps_coarse_comm; 2176 PetscMPIInt *coarse_subdivision; 2177 PetscMPIInt *total_count_recv; 2178 PetscMPIInt *total_ranks_recv; 2179 PetscMPIInt *displacements_recv; 2180 PetscMPIInt *my_faces_connectivity; 2181 PetscMPIInt *petsc_faces_adjncy; 2182 MetisInt *faces_adjncy; 2183 MetisInt *faces_xadj; 2184 PetscMPIInt *number_of_faces; 2185 PetscMPIInt *faces_displacements; 2186 PetscInt *array_int; 2187 PetscMPIInt my_faces=0; 2188 PetscMPIInt total_faces=0; 2189 PetscInt ranks_stretching_ratio; 2190 2191 /* define some quantities */ 2192 pcbddc->coarse_communications_type = SCATTERS_BDDC; 2193 coarse_mat_type = MATIS; 2194 coarse_pc_type = PCBDDC; 2195 coarse_ksp_type = KSPRICHARDSON; 2196 2197 /* details of coarse decomposition */ 2198 n_subdomains = active_procs; 2199 n_parts = n_subdomains/pcbddc->coarsening_ratio; 2200 ranks_stretching_ratio = size_prec_comm/active_procs; 2201 procs_jumps_coarse_comm = pcbddc->coarsening_ratio*ranks_stretching_ratio; 2202 2203 #if 0 2204 PetscMPIInt *old_ranks; 2205 PetscInt *new_ranks,*jj,*ii; 2206 MatPartitioning mat_part; 2207 IS coarse_new_decomposition,is_numbering; 2208 PetscViewer viewer_test; 2209 MPI_Comm test_coarse_comm; 2210 PetscMPIInt test_coarse_color; 2211 Mat mat_adj; 2212 /* Create new communicator for coarse problem splitting the old one */ 2213 /* procs with coarse_color = MPI_UNDEFINED will have coarse_comm = MPI_COMM_NULL (from mpi standards) 2214 key = rank_prec_comm -> keep same ordering of ranks from the old to the new communicator */ 2215 test_coarse_color = ( im_active ? 0 : MPI_UNDEFINED ); 2216 test_coarse_comm = MPI_COMM_NULL; 2217 ierr = MPI_Comm_split(prec_comm,test_coarse_color,rank_prec_comm,&test_coarse_comm);CHKERRQ(ierr); 2218 if (im_active) { 2219 ierr = PetscMalloc(n_subdomains*sizeof(PetscMPIInt),&old_ranks); 2220 ierr = PetscMalloc(size_prec_comm*sizeof(PetscInt),&new_ranks); 2221 ierr = MPI_Comm_rank(test_coarse_comm,&rank_coarse_comm);CHKERRQ(ierr); 2222 ierr = MPI_Comm_size(test_coarse_comm,&j);CHKERRQ(ierr); 2223 ierr = MPI_Allgather(&rank_prec_comm,1,MPIU_INT,old_ranks,1,MPIU_INT,test_coarse_comm);CHKERRQ(ierr); 2224 for (i=0; i<size_prec_comm; i++) new_ranks[i] = -1; 2225 for (i=0; i<n_subdomains; i++) new_ranks[old_ranks[i]] = i; 2226 ierr = PetscViewerASCIIOpen(test_coarse_comm,"test_mat_part.out",&viewer_test);CHKERRQ(ierr); 2227 k = pcis->n_neigh-1; 2228 ierr = PetscMalloc(2*sizeof(PetscInt),&ii); 2229 ii[0]=0; 2230 ii[1]=k; 2231 ierr = PetscMalloc(k*sizeof(PetscInt),&jj); 2232 for (i=0; i<k; i++) jj[i]=new_ranks[pcis->neigh[i+1]]; 2233 ierr = PetscSortInt(k,jj);CHKERRQ(ierr); 2234 ierr = MatCreateMPIAdj(test_coarse_comm,1,n_subdomains,ii,jj,NULL,&mat_adj);CHKERRQ(ierr); 2235 ierr = MatView(mat_adj,viewer_test);CHKERRQ(ierr); 2236 ierr = MatPartitioningCreate(test_coarse_comm,&mat_part);CHKERRQ(ierr); 2237 ierr = MatPartitioningSetAdjacency(mat_part,mat_adj);CHKERRQ(ierr); 2238 ierr = MatPartitioningSetFromOptions(mat_part);CHKERRQ(ierr); 2239 printf("Setting Nparts %d\n",n_parts); 2240 ierr = MatPartitioningSetNParts(mat_part,n_parts);CHKERRQ(ierr); 2241 ierr = MatPartitioningView(mat_part,viewer_test);CHKERRQ(ierr); 2242 ierr = MatPartitioningApply(mat_part,&coarse_new_decomposition);CHKERRQ(ierr); 2243 ierr = ISView(coarse_new_decomposition,viewer_test);CHKERRQ(ierr); 2244 ierr = ISPartitioningToNumbering(coarse_new_decomposition,&is_numbering);CHKERRQ(ierr); 2245 ierr = ISView(is_numbering,viewer_test);CHKERRQ(ierr); 2246 ierr = PetscViewerDestroy(&viewer_test);CHKERRQ(ierr); 2247 ierr = ISDestroy(&coarse_new_decomposition);CHKERRQ(ierr); 2248 ierr = ISDestroy(&is_numbering);CHKERRQ(ierr); 2249 ierr = MatPartitioningDestroy(&mat_part);CHKERRQ(ierr); 2250 ierr = PetscFree(old_ranks);CHKERRQ(ierr); 2251 ierr = PetscFree(new_ranks);CHKERRQ(ierr); 2252 ierr = MPI_Comm_free(&test_coarse_comm);CHKERRQ(ierr); 2253 } 2254 #endif 2255 2256 /* build CSR graph of subdomains' connectivity */ 2257 ierr = PetscMalloc (pcis->n*sizeof(PetscInt),&array_int);CHKERRQ(ierr); 2258 ierr = PetscMemzero(array_int,pcis->n*sizeof(PetscInt));CHKERRQ(ierr); 2259 for (i=1;i<pcis->n_neigh;i++){/* i=1 so I don't count myself -> faces nodes counts to 1 */ 2260 for (j=0;j<pcis->n_shared[i];j++){ 2261 array_int[ pcis->shared[i][j] ]+=1; 2262 } 2263 } 2264 for (i=1;i<pcis->n_neigh;i++){ 2265 for (j=0;j<pcis->n_shared[i];j++){ 2266 if (array_int[ pcis->shared[i][j] ] > 0 ){ 2267 my_faces++; 2268 break; 2269 } 2270 } 2271 } 2272 2273 ierr = MPI_Reduce(&my_faces,&total_faces,1,MPIU_INT,MPI_SUM,master_proc,prec_comm);CHKERRQ(ierr); 2274 ierr = PetscMalloc (my_faces*sizeof(PetscInt),&my_faces_connectivity);CHKERRQ(ierr); 2275 my_faces=0; 2276 for (i=1;i<pcis->n_neigh;i++){ 2277 for (j=0;j<pcis->n_shared[i];j++){ 2278 if (array_int[ pcis->shared[i][j] ] > 0 ){ 2279 my_faces_connectivity[my_faces]=pcis->neigh[i]; 2280 my_faces++; 2281 break; 2282 } 2283 } 2284 } 2285 if (rank_prec_comm == master_proc) { 2286 ierr = PetscMalloc (total_faces*sizeof(PetscMPIInt),&petsc_faces_adjncy);CHKERRQ(ierr); 2287 ierr = PetscMalloc (size_prec_comm*sizeof(PetscMPIInt),&number_of_faces);CHKERRQ(ierr); 2288 ierr = PetscMalloc (total_faces*sizeof(MetisInt),&faces_adjncy);CHKERRQ(ierr); 2289 ierr = PetscMalloc ((n_subdomains+1)*sizeof(MetisInt),&faces_xadj);CHKERRQ(ierr); 2290 ierr = PetscMalloc ((size_prec_comm+1)*sizeof(PetscMPIInt),&faces_displacements);CHKERRQ(ierr); 2291 } 2292 ierr = MPI_Gather(&my_faces,1,MPIU_INT,&number_of_faces[0],1,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr); 2293 if (rank_prec_comm == master_proc) { 2294 faces_xadj[0]=0; 2295 faces_displacements[0]=0; 2296 j=0; 2297 for (i=1;i<size_prec_comm+1;i++) { 2298 faces_displacements[i]=faces_displacements[i-1]+number_of_faces[i-1]; 2299 if (number_of_faces[i-1]) { 2300 j++; 2301 faces_xadj[j]=faces_xadj[j-1]+number_of_faces[i-1]; 2302 } 2303 } 2304 } 2305 ierr = MPI_Gatherv(&my_faces_connectivity[0],my_faces,MPIU_INT,&petsc_faces_adjncy[0],number_of_faces,faces_displacements,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr); 2306 ierr = PetscFree(my_faces_connectivity);CHKERRQ(ierr); 2307 ierr = PetscFree(array_int);CHKERRQ(ierr); 2308 if (rank_prec_comm == master_proc) { 2309 for (i=0;i<total_faces;i++) faces_adjncy[i]=(MetisInt)(petsc_faces_adjncy[i]/ranks_stretching_ratio); /* cast to MetisInt */ 2310 ierr = PetscFree(faces_displacements);CHKERRQ(ierr); 2311 ierr = PetscFree(number_of_faces);CHKERRQ(ierr); 2312 ierr = PetscFree(petsc_faces_adjncy);CHKERRQ(ierr); 2313 } 2314 2315 if ( rank_prec_comm == master_proc ) { 2316 2317 PetscInt heuristic_for_metis=3; 2318 2319 ncon=1; 2320 faces_nvtxs=n_subdomains; 2321 /* partition graoh induced by face connectivity */ 2322 ierr = PetscMalloc (n_subdomains*sizeof(MetisInt),&metis_coarse_subdivision);CHKERRQ(ierr); 2323 ierr = METIS_SetDefaultOptions(options); 2324 /* we need a contiguous partition of the coarse mesh */ 2325 options[METIS_OPTION_CONTIG]=1; 2326 options[METIS_OPTION_NITER]=30; 2327 if (pcbddc->coarsening_ratio > 1) { 2328 if (n_subdomains>n_parts*heuristic_for_metis) { 2329 options[METIS_OPTION_IPTYPE]=METIS_IPTYPE_EDGE; 2330 options[METIS_OPTION_OBJTYPE]=METIS_OBJTYPE_CUT; 2331 ierr = METIS_PartGraphKway(&faces_nvtxs,&ncon,faces_xadj,faces_adjncy,NULL,NULL,NULL,&n_parts,NULL,NULL,options,&objval,metis_coarse_subdivision); 2332 if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in METIS_PartGraphKway (metis error code %D) called from PCBDDCSetUpCoarseEnvironment\n",ierr); 2333 } else { 2334 ierr = METIS_PartGraphRecursive(&faces_nvtxs,&ncon,faces_xadj,faces_adjncy,NULL,NULL,NULL,&n_parts,NULL,NULL,options,&objval,metis_coarse_subdivision); 2335 if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in METIS_PartGraphRecursive (metis error code %D) called from PCBDDCSetUpCoarseEnvironment\n",ierr); 2336 } 2337 } else { 2338 for (i=0;i<n_subdomains;i++) metis_coarse_subdivision[i]=i; 2339 } 2340 ierr = PetscFree(faces_xadj);CHKERRQ(ierr); 2341 ierr = PetscFree(faces_adjncy);CHKERRQ(ierr); 2342 ierr = PetscMalloc(size_prec_comm*sizeof(PetscMPIInt),&coarse_subdivision);CHKERRQ(ierr); 2343 2344 /* copy/cast values avoiding possible type conflicts between PETSc, MPI and METIS */ 2345 for (i=0;i<size_prec_comm;i++) coarse_subdivision[i]=MPI_PROC_NULL; 2346 for (i=0;i<n_subdomains;i++) coarse_subdivision[ranks_stretching_ratio*i]=(PetscInt)(metis_coarse_subdivision[i]); 2347 ierr = PetscFree(metis_coarse_subdivision);CHKERRQ(ierr); 2348 } 2349 2350 /* Create new communicator for coarse problem splitting the old one */ 2351 if ( !(rank_prec_comm%procs_jumps_coarse_comm) && rank_prec_comm < procs_jumps_coarse_comm*n_parts ){ 2352 coarse_color=0; /* for communicator splitting */ 2353 active_rank=rank_prec_comm; /* for insertion of matrix values */ 2354 } 2355 /* procs with coarse_color = MPI_UNDEFINED will have coarse_comm = MPI_COMM_NULL (from mpi standards) 2356 key = rank_prec_comm -> keep same ordering of ranks from the old to the new communicator */ 2357 ierr = MPI_Comm_split(prec_comm,coarse_color,rank_prec_comm,&coarse_comm);CHKERRQ(ierr); 2358 2359 if ( coarse_color == 0 ) { 2360 ierr = MPI_Comm_size(coarse_comm,&size_coarse_comm);CHKERRQ(ierr); 2361 ierr = MPI_Comm_rank(coarse_comm,&rank_coarse_comm);CHKERRQ(ierr); 2362 } else { 2363 rank_coarse_comm = MPI_PROC_NULL; 2364 } 2365 2366 /* master proc take care of arranging and distributing coarse information */ 2367 if (rank_coarse_comm == master_proc) { 2368 ierr = PetscMalloc (size_coarse_comm*sizeof(PetscMPIInt),&displacements_recv);CHKERRQ(ierr); 2369 ierr = PetscMalloc (size_coarse_comm*sizeof(PetscMPIInt),&total_count_recv);CHKERRQ(ierr); 2370 ierr = PetscMalloc (n_subdomains*sizeof(PetscMPIInt),&total_ranks_recv);CHKERRQ(ierr); 2371 /* some initializations */ 2372 displacements_recv[0]=0; 2373 ierr = PetscMemzero(total_count_recv,size_coarse_comm*sizeof(PetscMPIInt));CHKERRQ(ierr); 2374 /* count from how many processes the j-th process of the coarse decomposition will receive data */ 2375 for (j=0;j<size_coarse_comm;j++) { 2376 for (i=0;i<size_prec_comm;i++) { 2377 if (coarse_subdivision[i]==j) total_count_recv[j]++; 2378 } 2379 } 2380 /* displacements needed for scatterv of total_ranks_recv */ 2381 for (i=1; i<size_coarse_comm; i++) displacements_recv[i]=displacements_recv[i-1]+total_count_recv[i-1]; 2382 2383 /* Now fill properly total_ranks_recv -> each coarse process will receive the ranks (in prec_comm communicator) of its friend (sending) processes */ 2384 ierr = PetscMemzero(total_count_recv,size_coarse_comm*sizeof(PetscMPIInt));CHKERRQ(ierr); 2385 for (j=0;j<size_coarse_comm;j++) { 2386 for (i=0;i<size_prec_comm;i++) { 2387 if (coarse_subdivision[i]==j) { 2388 total_ranks_recv[displacements_recv[j]+total_count_recv[j]]=i; 2389 total_count_recv[j]+=1; 2390 } 2391 } 2392 } 2393 /*for (j=0;j<size_coarse_comm;j++) { 2394 printf("process %d in new rank will receive from %d processes (original ranks follows)\n",j,total_count_recv[j]); 2395 for (i=0;i<total_count_recv[j];i++) { 2396 printf("%d ",total_ranks_recv[displacements_recv[j]+i]); 2397 } 2398 printf("\n"); 2399 }*/ 2400 2401 /* identify new decomposition in terms of ranks in the old communicator */ 2402 for (i=0;i<n_subdomains;i++) { 2403 coarse_subdivision[ranks_stretching_ratio*i]=coarse_subdivision[ranks_stretching_ratio*i]*procs_jumps_coarse_comm; 2404 } 2405 /*printf("coarse_subdivision in old end new ranks\n"); 2406 for (i=0;i<size_prec_comm;i++) 2407 if (coarse_subdivision[i]!=MPI_PROC_NULL) { 2408 printf("%d=(%d %d), ",i,coarse_subdivision[i],coarse_subdivision[i]/procs_jumps_coarse_comm); 2409 } else { 2410 printf("%d=(%d %d), ",i,coarse_subdivision[i],coarse_subdivision[i]); 2411 } 2412 printf("\n");*/ 2413 } 2414 2415 /* Scatter new decomposition for send details */ 2416 ierr = MPI_Scatter(&coarse_subdivision[0],1,MPIU_INT,&rank_coarse_proc_send_to,1,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr); 2417 /* Scatter receiving details to members of coarse decomposition */ 2418 if ( coarse_color == 0) { 2419 ierr = MPI_Scatter(&total_count_recv[0],1,MPIU_INT,&count_recv,1,MPIU_INT,master_proc,coarse_comm);CHKERRQ(ierr); 2420 ierr = PetscMalloc (count_recv*sizeof(PetscMPIInt),&ranks_recv);CHKERRQ(ierr); 2421 ierr = MPI_Scatterv(&total_ranks_recv[0],total_count_recv,displacements_recv,MPIU_INT,&ranks_recv[0],count_recv,MPIU_INT,master_proc,coarse_comm);CHKERRQ(ierr); 2422 } 2423 2424 /*printf("I will send my matrix data to proc %d\n",rank_coarse_proc_send_to); 2425 if (coarse_color == 0) { 2426 printf("I will receive some matrix data from %d processes (ranks follows)\n",count_recv); 2427 for (i=0;i<count_recv;i++) 2428 printf("%d ",ranks_recv[i]); 2429 printf("\n"); 2430 }*/ 2431 2432 if (rank_prec_comm == master_proc) { 2433 ierr = PetscFree(coarse_subdivision);CHKERRQ(ierr); 2434 ierr = PetscFree(total_count_recv);CHKERRQ(ierr); 2435 ierr = PetscFree(total_ranks_recv);CHKERRQ(ierr); 2436 ierr = PetscFree(displacements_recv);CHKERRQ(ierr); 2437 } 2438 #endif 2439 break; 2440 } 2441 2442 case(REPLICATED_BDDC): 2443 2444 pcbddc->coarse_communications_type = GATHERS_BDDC; 2445 coarse_mat_type = MATSEQAIJ; 2446 coarse_pc_type = PCLU; 2447 coarse_ksp_type = KSPPREONLY; 2448 coarse_comm = PETSC_COMM_SELF; 2449 active_rank = rank_prec_comm; 2450 break; 2451 2452 case(PARALLEL_BDDC): 2453 2454 pcbddc->coarse_communications_type = SCATTERS_BDDC; 2455 coarse_mat_type = MATAIJ; 2456 coarse_pc_type = PCREDUNDANT; 2457 coarse_ksp_type = KSPPREONLY; 2458 coarse_comm = prec_comm; 2459 active_rank = rank_prec_comm; 2460 break; 2461 2462 case(SEQUENTIAL_BDDC): 2463 pcbddc->coarse_communications_type = GATHERS_BDDC; 2464 coarse_mat_type = MATAIJ; 2465 coarse_pc_type = PCLU; 2466 coarse_ksp_type = KSPPREONLY; 2467 coarse_comm = PETSC_COMM_SELF; 2468 active_rank = master_proc; 2469 break; 2470 } 2471 2472 switch(pcbddc->coarse_communications_type){ 2473 2474 case(SCATTERS_BDDC): 2475 { 2476 if (pcbddc->coarse_problem_type==MULTILEVEL_BDDC) { 2477 2478 IS coarse_IS; 2479 2480 if(pcbddc->coarsening_ratio == 1) { 2481 ins_local_primal_size = pcbddc->local_primal_size; 2482 ins_local_primal_indices = pcbddc->local_primal_indices; 2483 if (coarse_color == 0) { ierr = PetscFree(ranks_recv);CHKERRQ(ierr); } 2484 /* nonzeros */ 2485 ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&dnz);CHKERRQ(ierr); 2486 ierr = PetscMemzero(dnz,ins_local_primal_size*sizeof(PetscInt));CHKERRQ(ierr); 2487 for (i=0;i<ins_local_primal_size;i++) { 2488 dnz[i] = ins_local_primal_size; 2489 } 2490 } else { 2491 PetscMPIInt send_size; 2492 PetscMPIInt *send_buffer; 2493 PetscInt *aux_ins_indices; 2494 PetscInt ii,jj; 2495 MPI_Request *requests; 2496 2497 ierr = PetscMalloc(count_recv*sizeof(PetscMPIInt),&localdispl2);CHKERRQ(ierr); 2498 /* reusing pcbddc->local_primal_displacements and pcbddc->replicated_primal_size */ 2499 ierr = PetscFree(pcbddc->local_primal_displacements);CHKERRQ(ierr); 2500 ierr = PetscMalloc((count_recv+1)*sizeof(PetscMPIInt),&pcbddc->local_primal_displacements);CHKERRQ(ierr); 2501 pcbddc->replicated_primal_size = count_recv; 2502 j = 0; 2503 for (i=0;i<count_recv;i++) { 2504 pcbddc->local_primal_displacements[i] = j; 2505 j += pcbddc->local_primal_sizes[ranks_recv[i]]; 2506 } 2507 pcbddc->local_primal_displacements[count_recv] = j; 2508 ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->replicated_local_primal_indices);CHKERRQ(ierr); 2509 /* allocate auxiliary space */ 2510 ierr = PetscMalloc(count_recv*sizeof(PetscMPIInt),&localsizes2);CHKERRQ(ierr); 2511 ierr = PetscMalloc(pcbddc->coarse_size*sizeof(PetscInt),&aux_ins_indices);CHKERRQ(ierr); 2512 ierr = PetscMemzero(aux_ins_indices,pcbddc->coarse_size*sizeof(PetscInt));CHKERRQ(ierr); 2513 /* allocate stuffs for message massing */ 2514 ierr = PetscMalloc((count_recv+1)*sizeof(MPI_Request),&requests);CHKERRQ(ierr); 2515 for (i=0;i<count_recv+1;i++) { requests[i]=MPI_REQUEST_NULL; } 2516 /* send indices to be inserted */ 2517 for (i=0;i<count_recv;i++) { 2518 send_size = pcbddc->local_primal_sizes[ranks_recv[i]]; 2519 ierr = MPI_Irecv(&pcbddc->replicated_local_primal_indices[pcbddc->local_primal_displacements[i]],send_size,MPIU_INT,ranks_recv[i],999,prec_comm,&requests[i]);CHKERRQ(ierr); 2520 } 2521 if (rank_coarse_proc_send_to != MPI_PROC_NULL ) { 2522 send_size = pcbddc->local_primal_size; 2523 ierr = PetscMalloc(send_size*sizeof(PetscMPIInt),&send_buffer);CHKERRQ(ierr); 2524 for (i=0;i<send_size;i++) { 2525 send_buffer[i]=(PetscMPIInt)pcbddc->local_primal_indices[i]; 2526 } 2527 ierr = MPI_Isend(send_buffer,send_size,MPIU_INT,rank_coarse_proc_send_to,999,prec_comm,&requests[count_recv]);CHKERRQ(ierr); 2528 } 2529 ierr = MPI_Waitall(count_recv+1,requests,MPI_STATUSES_IGNORE);CHKERRQ(ierr); 2530 if (rank_coarse_proc_send_to != MPI_PROC_NULL ) { 2531 ierr = PetscFree(send_buffer);CHKERRQ(ierr); 2532 } 2533 j = 0; 2534 for (i=0;i<count_recv;i++) { 2535 ii = pcbddc->local_primal_displacements[i+1]-pcbddc->local_primal_displacements[i]; 2536 localsizes2[i] = ii*ii; 2537 localdispl2[i] = j; 2538 j += localsizes2[i]; 2539 jj = pcbddc->local_primal_displacements[i]; 2540 /* it counts the coarse subdomains sharing the coarse node */ 2541 for (k=0;k<ii;k++) { 2542 aux_ins_indices[pcbddc->replicated_local_primal_indices[jj+k]] += 1; 2543 } 2544 } 2545 /* temp_coarse_mat_vals used to store matrix values to be received */ 2546 ierr = PetscMalloc(j*sizeof(PetscScalar),&temp_coarse_mat_vals);CHKERRQ(ierr); 2547 /* evaluate how many values I will insert in coarse mat */ 2548 ins_local_primal_size = 0; 2549 for (i=0;i<pcbddc->coarse_size;i++) { 2550 if (aux_ins_indices[i]) { 2551 ins_local_primal_size++; 2552 } 2553 } 2554 /* evaluate indices I will insert in coarse mat */ 2555 ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr); 2556 j = 0; 2557 for(i=0;i<pcbddc->coarse_size;i++) { 2558 if(aux_ins_indices[i]) { 2559 ins_local_primal_indices[j] = i; 2560 j++; 2561 } 2562 } 2563 /* processes partecipating in coarse problem receive matrix data from their friends */ 2564 for (i=0;i<count_recv;i++) { 2565 ierr = MPI_Irecv(&temp_coarse_mat_vals[localdispl2[i]],localsizes2[i],MPIU_SCALAR,ranks_recv[i],666,prec_comm,&requests[i]);CHKERRQ(ierr); 2566 } 2567 if (rank_coarse_proc_send_to != MPI_PROC_NULL ) { 2568 send_size = pcbddc->local_primal_size*pcbddc->local_primal_size; 2569 ierr = MPI_Isend(&coarse_submat_vals[0],send_size,MPIU_SCALAR,rank_coarse_proc_send_to,666,prec_comm,&requests[count_recv]);CHKERRQ(ierr); 2570 } 2571 ierr = MPI_Waitall(count_recv+1,requests,MPI_STATUSES_IGNORE);CHKERRQ(ierr); 2572 /* nonzeros */ 2573 ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&dnz);CHKERRQ(ierr); 2574 ierr = PetscMemzero(dnz,ins_local_primal_size*sizeof(PetscInt));CHKERRQ(ierr); 2575 /* use aux_ins_indices to realize a global to local mapping */ 2576 j=0; 2577 for(i=0;i<pcbddc->coarse_size;i++){ 2578 if(aux_ins_indices[i]==0){ 2579 aux_ins_indices[i]=-1; 2580 } else { 2581 aux_ins_indices[i]=j; 2582 j++; 2583 } 2584 } 2585 for (i=0;i<count_recv;i++) { 2586 j = pcbddc->local_primal_sizes[ranks_recv[i]]; 2587 for (k=0;k<j;k++) { 2588 dnz[aux_ins_indices[pcbddc->replicated_local_primal_indices[pcbddc->local_primal_displacements[i]+k]]] += j; 2589 } 2590 } 2591 /* check */ 2592 for (i=0;i<ins_local_primal_size;i++) { 2593 if (dnz[i] > ins_local_primal_size) { 2594 dnz[i] = ins_local_primal_size; 2595 } 2596 } 2597 ierr = PetscFree(requests);CHKERRQ(ierr); 2598 ierr = PetscFree(aux_ins_indices);CHKERRQ(ierr); 2599 if (coarse_color == 0) { ierr = PetscFree(ranks_recv);CHKERRQ(ierr); } 2600 } 2601 /* create local to global mapping needed by coarse MATIS */ 2602 if (coarse_comm != MPI_COMM_NULL ) {ierr = MPI_Comm_free(&coarse_comm);CHKERRQ(ierr);} 2603 coarse_comm = prec_comm; 2604 active_rank = rank_prec_comm; 2605 ierr = ISCreateGeneral(coarse_comm,ins_local_primal_size,ins_local_primal_indices,PETSC_COPY_VALUES,&coarse_IS);CHKERRQ(ierr); 2606 ierr = ISLocalToGlobalMappingCreateIS(coarse_IS,&coarse_ISLG);CHKERRQ(ierr); 2607 ierr = ISDestroy(&coarse_IS);CHKERRQ(ierr); 2608 } else if (pcbddc->coarse_problem_type==PARALLEL_BDDC) { 2609 /* arrays for values insertion */ 2610 ins_local_primal_size = pcbddc->local_primal_size; 2611 ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr); 2612 ierr = PetscMalloc(ins_local_primal_size*ins_local_primal_size*sizeof(PetscScalar),&ins_coarse_mat_vals);CHKERRQ(ierr); 2613 for (j=0;j<ins_local_primal_size;j++){ 2614 ins_local_primal_indices[j]=pcbddc->local_primal_indices[j]; 2615 for (i=0;i<ins_local_primal_size;i++) { 2616 ins_coarse_mat_vals[j*ins_local_primal_size+i]=coarse_submat_vals[j*ins_local_primal_size+i]; 2617 } 2618 } 2619 } 2620 break; 2621 2622 } 2623 2624 case(GATHERS_BDDC): 2625 { 2626 2627 PetscMPIInt mysize,mysize2; 2628 PetscMPIInt *send_buffer; 2629 2630 if (rank_prec_comm==active_rank) { 2631 ierr = PetscMalloc ( pcbddc->replicated_primal_size*sizeof(PetscMPIInt),&pcbddc->replicated_local_primal_indices);CHKERRQ(ierr); 2632 ierr = PetscMalloc ( pcbddc->replicated_primal_size*sizeof(PetscScalar),&pcbddc->replicated_local_primal_values);CHKERRQ(ierr); 2633 ierr = PetscMalloc ( size_prec_comm*sizeof(PetscMPIInt),&localsizes2);CHKERRQ(ierr); 2634 ierr = PetscMalloc ( size_prec_comm*sizeof(PetscMPIInt),&localdispl2);CHKERRQ(ierr); 2635 /* arrays for values insertion */ 2636 for (i=0;i<size_prec_comm;i++) localsizes2[i]=pcbddc->local_primal_sizes[i]*pcbddc->local_primal_sizes[i]; 2637 localdispl2[0]=0; 2638 for (i=1;i<size_prec_comm;i++) localdispl2[i]=localsizes2[i-1]+localdispl2[i-1]; 2639 j=0; 2640 for (i=0;i<size_prec_comm;i++) j+=localsizes2[i]; 2641 ierr = PetscMalloc ( j*sizeof(PetscScalar),&temp_coarse_mat_vals);CHKERRQ(ierr); 2642 } 2643 2644 mysize=pcbddc->local_primal_size; 2645 mysize2=pcbddc->local_primal_size*pcbddc->local_primal_size; 2646 ierr = PetscMalloc(mysize*sizeof(PetscMPIInt),&send_buffer);CHKERRQ(ierr); 2647 for (i=0; i<mysize; i++) send_buffer[i]=(PetscMPIInt)pcbddc->local_primal_indices[i]; 2648 2649 if (pcbddc->coarse_problem_type == SEQUENTIAL_BDDC){ 2650 ierr = MPI_Gatherv(send_buffer,mysize,MPIU_INT,&pcbddc->replicated_local_primal_indices[0],pcbddc->local_primal_sizes,pcbddc->local_primal_displacements,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr); 2651 ierr = MPI_Gatherv(&coarse_submat_vals[0],mysize2,MPIU_SCALAR,&temp_coarse_mat_vals[0],localsizes2,localdispl2,MPIU_SCALAR,master_proc,prec_comm);CHKERRQ(ierr); 2652 } else { 2653 ierr = MPI_Allgatherv(send_buffer,mysize,MPIU_INT,&pcbddc->replicated_local_primal_indices[0],pcbddc->local_primal_sizes,pcbddc->local_primal_displacements,MPIU_INT,prec_comm);CHKERRQ(ierr); 2654 ierr = MPI_Allgatherv(&coarse_submat_vals[0],mysize2,MPIU_SCALAR,&temp_coarse_mat_vals[0],localsizes2,localdispl2,MPIU_SCALAR,prec_comm);CHKERRQ(ierr); 2655 } 2656 ierr = PetscFree(send_buffer);CHKERRQ(ierr); 2657 break; 2658 }/* switch on coarse problem and communications associated with finished */ 2659 } 2660 2661 /* Now create and fill up coarse matrix */ 2662 if ( rank_prec_comm == active_rank ) { 2663 2664 Mat matis_coarse_local_mat; 2665 2666 if (pcbddc->coarse_problem_type != MULTILEVEL_BDDC) { 2667 ierr = MatCreate(coarse_comm,&pcbddc->coarse_mat);CHKERRQ(ierr); 2668 ierr = MatSetSizes(pcbddc->coarse_mat,PETSC_DECIDE,PETSC_DECIDE,pcbddc->coarse_size,pcbddc->coarse_size);CHKERRQ(ierr); 2669 ierr = MatSetType(pcbddc->coarse_mat,coarse_mat_type);CHKERRQ(ierr); 2670 ierr = MatSetOptionsPrefix(pcbddc->coarse_mat,"coarse_");CHKERRQ(ierr); 2671 ierr = MatSetFromOptions(pcbddc->coarse_mat);CHKERRQ(ierr); 2672 ierr = MatSetUp(pcbddc->coarse_mat);CHKERRQ(ierr); 2673 ierr = MatSetOption(pcbddc->coarse_mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); /* local values stored in column major */ 2674 ierr = MatSetOption(pcbddc->coarse_mat,MAT_IGNORE_ZERO_ENTRIES,PETSC_TRUE);CHKERRQ(ierr); 2675 } else { 2676 ierr = MatCreateIS(coarse_comm,1,PETSC_DECIDE,PETSC_DECIDE,pcbddc->coarse_size,pcbddc->coarse_size,coarse_ISLG,&pcbddc->coarse_mat);CHKERRQ(ierr); 2677 ierr = MatSetUp(pcbddc->coarse_mat);CHKERRQ(ierr); 2678 ierr = MatISGetLocalMat(pcbddc->coarse_mat,&matis_coarse_local_mat);CHKERRQ(ierr); 2679 ierr = MatSetOptionsPrefix(pcbddc->coarse_mat,"coarse_");CHKERRQ(ierr); 2680 ierr = MatSetFromOptions(pcbddc->coarse_mat);CHKERRQ(ierr); 2681 ierr = MatSetUp(matis_coarse_local_mat);CHKERRQ(ierr); 2682 ierr = MatSetOption(matis_coarse_local_mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); /* local values stored in column major */ 2683 ierr = MatSetOption(matis_coarse_local_mat,MAT_IGNORE_ZERO_ENTRIES,PETSC_TRUE);CHKERRQ(ierr); 2684 } 2685 /* preallocation */ 2686 if (pcbddc->coarse_problem_type != MULTILEVEL_BDDC) { 2687 2688 PetscInt lrows,lcols,bs; 2689 2690 ierr = MatGetLocalSize(pcbddc->coarse_mat,&lrows,&lcols);CHKERRQ(ierr); 2691 ierr = MatPreallocateInitialize(coarse_comm,lrows,lcols,dnz,onz);CHKERRQ(ierr); 2692 ierr = MatGetBlockSize(pcbddc->coarse_mat,&bs);CHKERRQ(ierr); 2693 2694 if (pcbddc->coarse_problem_type == PARALLEL_BDDC) { 2695 2696 Vec vec_dnz,vec_onz; 2697 PetscScalar *my_dnz,*my_onz,*array; 2698 PetscInt *mat_ranges,*row_ownership; 2699 PetscInt coarse_index_row,coarse_index_col,owner; 2700 2701 ierr = VecCreate(prec_comm,&vec_dnz);CHKERRQ(ierr); 2702 ierr = VecSetBlockSize(vec_dnz,bs);CHKERRQ(ierr); 2703 ierr = VecSetSizes(vec_dnz,PETSC_DECIDE,pcbddc->coarse_size);CHKERRQ(ierr); 2704 ierr = VecSetType(vec_dnz,VECMPI);CHKERRQ(ierr); 2705 ierr = VecDuplicate(vec_dnz,&vec_onz);CHKERRQ(ierr); 2706 2707 ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscScalar),&my_dnz);CHKERRQ(ierr); 2708 ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscScalar),&my_onz);CHKERRQ(ierr); 2709 ierr = PetscMemzero(my_dnz,pcbddc->local_primal_size*sizeof(PetscScalar));CHKERRQ(ierr); 2710 ierr = PetscMemzero(my_onz,pcbddc->local_primal_size*sizeof(PetscScalar));CHKERRQ(ierr); 2711 2712 ierr = PetscMalloc(pcbddc->coarse_size*sizeof(PetscInt),&row_ownership);CHKERRQ(ierr); 2713 ierr = MatGetOwnershipRanges(pcbddc->coarse_mat,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 2714 for (i=0;i<size_prec_comm;i++) { 2715 for (j=mat_ranges[i];j<mat_ranges[i+1];j++) { 2716 row_ownership[j]=i; 2717 } 2718 } 2719 2720 for (i=0;i<pcbddc->local_primal_size;i++) { 2721 coarse_index_row = pcbddc->local_primal_indices[i]; 2722 owner = row_ownership[coarse_index_row]; 2723 for (j=i;j<pcbddc->local_primal_size;j++) { 2724 owner = row_ownership[coarse_index_row]; 2725 coarse_index_col = pcbddc->local_primal_indices[j]; 2726 if (coarse_index_col > mat_ranges[owner]-1 && coarse_index_col < mat_ranges[owner+1] ) { 2727 my_dnz[i] += 1.0; 2728 } else { 2729 my_onz[i] += 1.0; 2730 } 2731 if (i != j) { 2732 owner = row_ownership[coarse_index_col]; 2733 if (coarse_index_row > mat_ranges[owner]-1 && coarse_index_row < mat_ranges[owner+1] ) { 2734 my_dnz[j] += 1.0; 2735 } else { 2736 my_onz[j] += 1.0; 2737 } 2738 } 2739 } 2740 } 2741 ierr = VecSet(vec_dnz,0.0);CHKERRQ(ierr); 2742 ierr = VecSet(vec_onz,0.0);CHKERRQ(ierr); 2743 if (pcbddc->local_primal_size) { 2744 ierr = VecSetValues(vec_dnz,pcbddc->local_primal_size,pcbddc->local_primal_indices,my_dnz,ADD_VALUES);CHKERRQ(ierr); 2745 ierr = VecSetValues(vec_onz,pcbddc->local_primal_size,pcbddc->local_primal_indices,my_onz,ADD_VALUES);CHKERRQ(ierr); 2746 } 2747 ierr = VecAssemblyBegin(vec_dnz);CHKERRQ(ierr); 2748 ierr = VecAssemblyBegin(vec_onz);CHKERRQ(ierr); 2749 ierr = VecAssemblyEnd(vec_dnz);CHKERRQ(ierr); 2750 ierr = VecAssemblyEnd(vec_onz);CHKERRQ(ierr); 2751 j = mat_ranges[rank_prec_comm+1]-mat_ranges[rank_prec_comm]; 2752 ierr = VecGetArray(vec_dnz,&array);CHKERRQ(ierr); 2753 for (i=0; i<j; i++) dnz[i] = (PetscInt)array[i]; 2754 2755 ierr = VecRestoreArray(vec_dnz,&array);CHKERRQ(ierr); 2756 ierr = VecGetArray(vec_onz,&array);CHKERRQ(ierr); 2757 for (i=0;i<j;i++) onz[i] = (PetscInt)array[i]; 2758 2759 ierr = VecRestoreArray(vec_onz,&array);CHKERRQ(ierr); 2760 ierr = PetscFree(my_dnz);CHKERRQ(ierr); 2761 ierr = PetscFree(my_onz);CHKERRQ(ierr); 2762 ierr = PetscFree(row_ownership);CHKERRQ(ierr); 2763 ierr = VecDestroy(&vec_dnz);CHKERRQ(ierr); 2764 ierr = VecDestroy(&vec_onz);CHKERRQ(ierr); 2765 } else { 2766 for (k=0;k<size_prec_comm;k++){ 2767 offset=pcbddc->local_primal_displacements[k]; 2768 offset2=localdispl2[k]; 2769 ins_local_primal_size = pcbddc->local_primal_sizes[k]; 2770 ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr); 2771 for (j=0;j<ins_local_primal_size;j++){ 2772 ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j]; 2773 } 2774 for (j=0;j<ins_local_primal_size;j++) { 2775 ierr = MatPreallocateSet(ins_local_primal_indices[j],ins_local_primal_size,ins_local_primal_indices,dnz,onz);CHKERRQ(ierr); 2776 } 2777 ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr); 2778 } 2779 } 2780 2781 /* check */ 2782 for (i=0;i<lrows;i++) { 2783 if (dnz[i]>lcols) dnz[i]=lcols; 2784 if (onz[i]>pcbddc->coarse_size-lcols) onz[i]=pcbddc->coarse_size-lcols; 2785 } 2786 ierr = MatSeqAIJSetPreallocation(pcbddc->coarse_mat,0,dnz);CHKERRQ(ierr); 2787 ierr = MatMPIAIJSetPreallocation(pcbddc->coarse_mat,0,dnz,0,onz);CHKERRQ(ierr); 2788 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 2789 } else { 2790 ierr = MatSeqAIJSetPreallocation(matis_coarse_local_mat,0,dnz);CHKERRQ(ierr); 2791 ierr = PetscFree(dnz);CHKERRQ(ierr); 2792 } 2793 /* insert values */ 2794 if (pcbddc->coarse_problem_type == PARALLEL_BDDC) { 2795 ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr); 2796 } else if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) { 2797 if (pcbddc->coarsening_ratio == 1) { 2798 ins_coarse_mat_vals = coarse_submat_vals; 2799 ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,INSERT_VALUES);CHKERRQ(ierr); 2800 } else { 2801 ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr); 2802 for (k=0;k<pcbddc->replicated_primal_size;k++) { 2803 offset = pcbddc->local_primal_displacements[k]; 2804 offset2 = localdispl2[k]; 2805 ins_local_primal_size = pcbddc->local_primal_displacements[k+1]-pcbddc->local_primal_displacements[k]; 2806 ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr); 2807 for (j=0;j<ins_local_primal_size;j++){ 2808 ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j]; 2809 } 2810 ins_coarse_mat_vals = &temp_coarse_mat_vals[offset2]; 2811 ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr); 2812 ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr); 2813 } 2814 } 2815 ins_local_primal_indices = 0; 2816 ins_coarse_mat_vals = 0; 2817 } else { 2818 for (k=0;k<size_prec_comm;k++){ 2819 offset=pcbddc->local_primal_displacements[k]; 2820 offset2=localdispl2[k]; 2821 ins_local_primal_size = pcbddc->local_primal_sizes[k]; 2822 ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr); 2823 for (j=0;j<ins_local_primal_size;j++){ 2824 ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j]; 2825 } 2826 ins_coarse_mat_vals = &temp_coarse_mat_vals[offset2]; 2827 ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr); 2828 ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr); 2829 } 2830 ins_local_primal_indices = 0; 2831 ins_coarse_mat_vals = 0; 2832 } 2833 ierr = MatAssemblyBegin(pcbddc->coarse_mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2834 ierr = MatAssemblyEnd(pcbddc->coarse_mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2835 /* symmetry of coarse matrix */ 2836 if (issym) { 2837 ierr = MatSetOption(pcbddc->coarse_mat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr); 2838 } 2839 ierr = MatGetVecs(pcbddc->coarse_mat,&pcbddc->coarse_vec,&pcbddc->coarse_rhs);CHKERRQ(ierr); 2840 } 2841 2842 /* create loc to glob scatters if needed */ 2843 if (pcbddc->coarse_communications_type == SCATTERS_BDDC) { 2844 IS local_IS,global_IS; 2845 ierr = ISCreateStride(PETSC_COMM_SELF,pcbddc->local_primal_size,0,1,&local_IS);CHKERRQ(ierr); 2846 ierr = ISCreateGeneral(PETSC_COMM_SELF,pcbddc->local_primal_size,pcbddc->local_primal_indices,PETSC_COPY_VALUES,&global_IS);CHKERRQ(ierr); 2847 ierr = VecScatterCreate(pcbddc->vec1_P,local_IS,pcbddc->coarse_vec,global_IS,&pcbddc->coarse_loc_to_glob);CHKERRQ(ierr); 2848 ierr = ISDestroy(&local_IS);CHKERRQ(ierr); 2849 ierr = ISDestroy(&global_IS);CHKERRQ(ierr); 2850 } 2851 2852 /* free memory no longer needed */ 2853 if (coarse_ISLG) { ierr = ISLocalToGlobalMappingDestroy(&coarse_ISLG);CHKERRQ(ierr); } 2854 if (ins_local_primal_indices) { ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr); } 2855 if (ins_coarse_mat_vals) { ierr = PetscFree(ins_coarse_mat_vals);CHKERRQ(ierr); } 2856 if (localsizes2) { ierr = PetscFree(localsizes2);CHKERRQ(ierr); } 2857 if (localdispl2) { ierr = PetscFree(localdispl2);CHKERRQ(ierr); } 2858 if (temp_coarse_mat_vals) { ierr = PetscFree(temp_coarse_mat_vals);CHKERRQ(ierr); } 2859 2860 /* Compute coarse null space */ 2861 CoarseNullSpace = 0; 2862 if (pcbddc->NullSpace) { 2863 ierr = PCBDDCNullSpaceAssembleCoarse(pc,&CoarseNullSpace);CHKERRQ(ierr); 2864 } 2865 2866 /* KSP for coarse problem */ 2867 if (rank_prec_comm == active_rank) { 2868 PetscBool isbddc=PETSC_FALSE; 2869 2870 ierr = KSPCreate(coarse_comm,&pcbddc->coarse_ksp);CHKERRQ(ierr); 2871 ierr = PetscObjectIncrementTabLevel((PetscObject)pcbddc->coarse_ksp,(PetscObject)pc,1);CHKERRQ(ierr); 2872 ierr = KSPSetOperators(pcbddc->coarse_ksp,pcbddc->coarse_mat,pcbddc->coarse_mat,SAME_PRECONDITIONER);CHKERRQ(ierr); 2873 ierr = KSPSetTolerances(pcbddc->coarse_ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,max_it_coarse_ksp);CHKERRQ(ierr); 2874 ierr = KSPSetType(pcbddc->coarse_ksp,coarse_ksp_type);CHKERRQ(ierr); 2875 ierr = KSPGetPC(pcbddc->coarse_ksp,&pc_temp);CHKERRQ(ierr); 2876 ierr = PCSetType(pc_temp,coarse_pc_type);CHKERRQ(ierr); 2877 /* Allow user's customization */ 2878 ierr = KSPSetOptionsPrefix(pcbddc->coarse_ksp,"coarse_");CHKERRQ(ierr); 2879 /* Set Up PC for coarse problem BDDC */ 2880 if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) { 2881 i = pcbddc->current_level+1; 2882 ierr = PCBDDCSetLevel(pc_temp,i);CHKERRQ(ierr); 2883 ierr = PCBDDCSetCoarseningRatio(pc_temp,pcbddc->coarsening_ratio);CHKERRQ(ierr); 2884 ierr = PCBDDCSetMaxLevels(pc_temp,pcbddc->max_levels);CHKERRQ(ierr); 2885 ierr = PCBDDCSetCoarseProblemType(pc_temp,MULTILEVEL_BDDC);CHKERRQ(ierr); 2886 if (CoarseNullSpace) { 2887 ierr = PCBDDCSetNullSpace(pc_temp,CoarseNullSpace);CHKERRQ(ierr); 2888 } 2889 if (dbg_flag) { 2890 ierr = PetscViewerASCIIPrintf(viewer,"----------------Level %d: Setting up level %d---------------\n",pcbddc->current_level,i);CHKERRQ(ierr); 2891 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 2892 } 2893 } else { 2894 if (CoarseNullSpace) { 2895 ierr = KSPSetNullSpace(pcbddc->coarse_ksp,CoarseNullSpace);CHKERRQ(ierr); 2896 } 2897 } 2898 ierr = KSPSetFromOptions(pcbddc->coarse_ksp);CHKERRQ(ierr); 2899 ierr = KSPSetUp(pcbddc->coarse_ksp);CHKERRQ(ierr); 2900 2901 ierr = KSPGetTolerances(pcbddc->coarse_ksp,NULL,NULL,NULL,&j);CHKERRQ(ierr); 2902 ierr = KSPGetPC(pcbddc->coarse_ksp,&pc_temp);CHKERRQ(ierr); 2903 ierr = PetscObjectTypeCompare((PetscObject)pc_temp,PCBDDC,&isbddc);CHKERRQ(ierr); 2904 if (j == 1) { 2905 ierr = KSPSetNormType(pcbddc->coarse_ksp,KSP_NORM_NONE);CHKERRQ(ierr); 2906 if (isbddc) { 2907 ierr = PCBDDCSetUseExactDirichlet(pc_temp,PETSC_FALSE);CHKERRQ(ierr); 2908 } 2909 } 2910 } 2911 /* Check coarse problem if requested */ 2912 if ( dbg_flag && rank_prec_comm == active_rank ) { 2913 KSP check_ksp; 2914 PC check_pc; 2915 Vec check_vec; 2916 PetscReal abs_infty_error,infty_error,lambda_min,lambda_max; 2917 KSPType check_ksp_type; 2918 2919 /* Create ksp object suitable for extreme eigenvalues' estimation */ 2920 ierr = KSPCreate(coarse_comm,&check_ksp);CHKERRQ(ierr); 2921 ierr = KSPSetOperators(check_ksp,pcbddc->coarse_mat,pcbddc->coarse_mat,SAME_PRECONDITIONER);CHKERRQ(ierr); 2922 ierr = KSPSetTolerances(check_ksp,1.e-12,1.e-12,PETSC_DEFAULT,pcbddc->coarse_size);CHKERRQ(ierr); 2923 if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) { 2924 if (issym) check_ksp_type = KSPCG; 2925 else check_ksp_type = KSPGMRES; 2926 ierr = KSPSetComputeSingularValues(check_ksp,PETSC_TRUE);CHKERRQ(ierr); 2927 } else { 2928 check_ksp_type = KSPPREONLY; 2929 } 2930 ierr = KSPSetType(check_ksp,check_ksp_type);CHKERRQ(ierr); 2931 ierr = KSPGetPC(pcbddc->coarse_ksp,&check_pc);CHKERRQ(ierr); 2932 ierr = KSPSetPC(check_ksp,check_pc);CHKERRQ(ierr); 2933 ierr = KSPSetUp(check_ksp);CHKERRQ(ierr); 2934 /* create random vec */ 2935 ierr = VecDuplicate(pcbddc->coarse_vec,&check_vec);CHKERRQ(ierr); 2936 ierr = VecSetRandom(check_vec,NULL);CHKERRQ(ierr); 2937 if (CoarseNullSpace) { 2938 ierr = MatNullSpaceRemove(CoarseNullSpace,check_vec,NULL);CHKERRQ(ierr); 2939 } 2940 ierr = MatMult(pcbddc->coarse_mat,check_vec,pcbddc->coarse_rhs);CHKERRQ(ierr); 2941 /* solve coarse problem */ 2942 ierr = KSPSolve(check_ksp,pcbddc->coarse_rhs,pcbddc->coarse_vec);CHKERRQ(ierr); 2943 if (CoarseNullSpace) { 2944 ierr = MatNullSpaceRemove(CoarseNullSpace,pcbddc->coarse_vec,NULL);CHKERRQ(ierr); 2945 } 2946 /* check coarse problem residual error */ 2947 ierr = VecAXPY(check_vec,-1.0,pcbddc->coarse_vec);CHKERRQ(ierr); 2948 ierr = VecNorm(check_vec,NORM_INFINITY,&infty_error);CHKERRQ(ierr); 2949 ierr = MatMult(pcbddc->coarse_mat,check_vec,pcbddc->coarse_rhs);CHKERRQ(ierr); 2950 ierr = VecNorm(pcbddc->coarse_rhs,NORM_INFINITY,&abs_infty_error);CHKERRQ(ierr); 2951 ierr = VecDestroy(&check_vec);CHKERRQ(ierr); 2952 /* get eigenvalue estimation if inexact */ 2953 if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) { 2954 ierr = KSPComputeExtremeSingularValues(check_ksp,&lambda_max,&lambda_min);CHKERRQ(ierr); 2955 ierr = KSPGetIterationNumber(check_ksp,&k);CHKERRQ(ierr); 2956 ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem eigenvalues estimated with %d iterations of %s.\n",k,check_ksp_type);CHKERRQ(ierr); 2957 ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem eigenvalues: % 1.14e %1.14e\n",lambda_min,lambda_max);CHKERRQ(ierr); 2958 } 2959 ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem exact infty_error : %1.14e\n",infty_error);CHKERRQ(ierr); 2960 ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem residual infty_error: %1.14e\n",abs_infty_error);CHKERRQ(ierr); 2961 ierr = KSPDestroy(&check_ksp);CHKERRQ(ierr); 2962 } 2963 if (dbg_flag) { 2964 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 2965 } 2966 ierr = MatNullSpaceDestroy(&CoarseNullSpace);CHKERRQ(ierr); 2967 2968 PetscFunctionReturn(0); 2969 } 2970 2971