1 2 #include <../src/ksp/pc/impls/is/pcis.h> /*I "petscpc.h" I*/ 3 4 #undef __FUNCT__ 5 #define __FUNCT__ "PCISSetUseStiffnessScaling_IS" 6 static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use) 7 { 8 PC_IS *pcis = (PC_IS*)pc->data; 9 10 PetscFunctionBegin; 11 pcis->use_stiffness_scaling = use; 12 PetscFunctionReturn(0); 13 } 14 15 #undef __FUNCT__ 16 #define __FUNCT__ "PCISSetUseStiffnessScaling" 17 /*@ 18 PCISSetUseStiffnessScaling - Tells PCIS to construct partition of unity using 19 local matrices' diagonal. 20 21 Not collective 22 23 Input Parameters: 24 + pc - the preconditioning context 25 - use - whether or not pcis use matrix diagonal to build partition of unity. 26 27 Level: intermediate 28 29 Notes: 30 31 .seealso: PCBDDC 32 @*/ 33 PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use) 34 { 35 PetscErrorCode ierr; 36 37 PetscFunctionBegin; 38 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 39 ierr = PetscTryMethod(pc,"PCISSetUseStiffnessScaling_C",(PC,PetscBool),(pc,use));CHKERRQ(ierr); 40 PetscFunctionReturn(0); 41 } 42 43 #undef __FUNCT__ 44 #define __FUNCT__ "PCISSetSubdomainDiagonalScaling_IS" 45 static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors) 46 { 47 PetscErrorCode ierr; 48 PC_IS *pcis = (PC_IS*)pc->data; 49 50 PetscFunctionBegin; 51 ierr = PetscObjectReference((PetscObject)scaling_factors);CHKERRQ(ierr); 52 ierr = VecDestroy(&pcis->D);CHKERRQ(ierr); 53 pcis->D = scaling_factors; 54 PetscFunctionReturn(0); 55 } 56 57 #undef __FUNCT__ 58 #define __FUNCT__ "PCISSetSubdomainDiagonalScaling" 59 /*@ 60 PCISSetSubdomainDiagonalScaling - Set diagonal scaling for PCIS. 61 62 Not collective 63 64 Input Parameters: 65 + pc - the preconditioning context 66 - scaling_factors - scaling factors for the subdomain 67 68 Level: intermediate 69 70 Notes: 71 Intended to use with jumping coefficients cases. 72 73 .seealso: PCBDDC 74 @*/ 75 PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors) 76 { 77 PetscErrorCode ierr; 78 79 PetscFunctionBegin; 80 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 81 ierr = PetscTryMethod(pc,"PCISSetSubdomainDiagonalScaling_C",(PC,Vec),(pc,scaling_factors));CHKERRQ(ierr); 82 PetscFunctionReturn(0); 83 } 84 85 #undef __FUNCT__ 86 #define __FUNCT__ "PCISSetSubdomainScalingFactor_IS" 87 static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal) 88 { 89 PC_IS *pcis = (PC_IS*)pc->data; 90 91 PetscFunctionBegin; 92 pcis->scaling_factor = scal; 93 PetscFunctionReturn(0); 94 } 95 96 #undef __FUNCT__ 97 #define __FUNCT__ "PCISSetSubdomainScalingFactor" 98 /*@ 99 PCISSetSubdomainScalingFactor - Set scaling factor for PCIS. 100 101 Not collective 102 103 Input Parameters: 104 + pc - the preconditioning context 105 - scal - scaling factor for the subdomain 106 107 Level: intermediate 108 109 Notes: 110 Intended to use with jumping coefficients cases. 111 112 .seealso: PCBDDC 113 @*/ 114 PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal) 115 { 116 PetscErrorCode ierr; 117 118 PetscFunctionBegin; 119 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 120 ierr = PetscTryMethod(pc,"PCISSetSubdomainScalingFactor_C",(PC,PetscScalar),(pc,scal));CHKERRQ(ierr); 121 PetscFunctionReturn(0); 122 } 123 124 125 /* -------------------------------------------------------------------------- */ 126 /* 127 PCISSetUp - 128 */ 129 #undef __FUNCT__ 130 #define __FUNCT__ "PCISSetUp" 131 PetscErrorCode PCISSetUp(PC pc, PetscBool computesolvers) 132 { 133 PC_IS *pcis = (PC_IS*)(pc->data); 134 Mat_IS *matis; 135 MatReuse reuse; 136 Mat lA; 137 IS lP; 138 PetscErrorCode ierr; 139 PetscBool flg,issbaij; 140 Vec counter; 141 142 PetscFunctionBegin; 143 ierr = PetscObjectTypeCompare((PetscObject)pc->pmat,MATIS,&flg);CHKERRQ(ierr); 144 if (!flg) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Preconditioner type of Neumann Neumman requires matrix of type MATIS"); 145 matis = (Mat_IS*)pc->pmat->data; 146 147 /* first time creation, get info on substructuring */ 148 if (!pc->setupcalled) { 149 PetscInt n_I; 150 PetscInt *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global; 151 PetscBT bt; 152 PetscInt i,j; 153 154 /* get info on mapping */ 155 ierr = PetscObjectReference((PetscObject)pc->pmat->rmap->mapping);CHKERRQ(ierr); 156 ierr = ISLocalToGlobalMappingDestroy(&pcis->mapping);CHKERRQ(ierr); 157 pcis->mapping = pc->pmat->rmap->mapping; 158 ierr = ISLocalToGlobalMappingGetSize(pcis->mapping,&pcis->n);CHKERRQ(ierr); 159 ierr = ISLocalToGlobalMappingGetInfo(pcis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr); 160 161 /* Identifying interior and interface nodes, in local numbering */ 162 ierr = PetscBTCreate(pcis->n,&bt);CHKERRQ(ierr); 163 for (i=0;i<pcis->n_neigh;i++) 164 for (j=0;j<pcis->n_shared[i];j++) { 165 ierr = PetscBTSet(bt,pcis->shared[i][j]);CHKERRQ(ierr); 166 } 167 168 /* Creating local and global index sets for interior and inteface nodes. */ 169 ierr = PetscMalloc1(pcis->n,&idx_I_local);CHKERRQ(ierr); 170 ierr = PetscMalloc1(pcis->n,&idx_B_local);CHKERRQ(ierr); 171 for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) { 172 if (!PetscBTLookup(bt,i)) { 173 idx_I_local[n_I] = i; 174 n_I++; 175 } else { 176 idx_B_local[pcis->n_B] = i; 177 pcis->n_B++; 178 } 179 } 180 181 /* Getting the global numbering */ 182 idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */ 183 idx_I_global = idx_B_local + pcis->n_B; 184 ierr = ISLocalToGlobalMappingApply(pcis->mapping,pcis->n_B,idx_B_local,idx_B_global);CHKERRQ(ierr); 185 ierr = ISLocalToGlobalMappingApply(pcis->mapping,n_I,idx_I_local,idx_I_global);CHKERRQ(ierr); 186 187 /* Creating the index sets */ 188 ierr = ISCreateGeneral(PETSC_COMM_SELF,pcis->n_B,idx_B_local,PETSC_COPY_VALUES, &pcis->is_B_local);CHKERRQ(ierr); 189 ierr = ISCreateGeneral(PETSC_COMM_SELF,pcis->n_B,idx_B_global,PETSC_COPY_VALUES,&pcis->is_B_global);CHKERRQ(ierr); 190 ierr = ISCreateGeneral(PETSC_COMM_SELF,n_I,idx_I_local,PETSC_COPY_VALUES, &pcis->is_I_local);CHKERRQ(ierr); 191 ierr = ISCreateGeneral(PETSC_COMM_SELF,n_I,idx_I_global,PETSC_COPY_VALUES,&pcis->is_I_global);CHKERRQ(ierr); 192 193 /* Freeing memory */ 194 ierr = PetscFree(idx_B_local);CHKERRQ(ierr); 195 ierr = PetscFree(idx_I_local);CHKERRQ(ierr); 196 ierr = PetscBTDestroy(&bt);CHKERRQ(ierr); 197 198 /* Creating work vectors and arrays */ 199 ierr = VecDuplicate(matis->x,&pcis->vec1_N);CHKERRQ(ierr); 200 ierr = VecDuplicate(pcis->vec1_N,&pcis->vec2_N);CHKERRQ(ierr); 201 ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n-pcis->n_B,&pcis->vec1_D);CHKERRQ(ierr); 202 ierr = VecDuplicate(pcis->vec1_D,&pcis->vec2_D);CHKERRQ(ierr); 203 ierr = VecDuplicate(pcis->vec1_D,&pcis->vec3_D);CHKERRQ(ierr); 204 ierr = VecDuplicate(pcis->vec1_D,&pcis->vec4_D);CHKERRQ(ierr); 205 ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n_B,&pcis->vec1_B);CHKERRQ(ierr); 206 ierr = VecDuplicate(pcis->vec1_B,&pcis->vec2_B);CHKERRQ(ierr); 207 ierr = VecDuplicate(pcis->vec1_B,&pcis->vec3_B);CHKERRQ(ierr); 208 ierr = MatCreateVecs(pc->pmat,&pcis->vec1_global,0);CHKERRQ(ierr); 209 ierr = PetscMalloc1(pcis->n,&pcis->work_N);CHKERRQ(ierr); 210 /* scaling vector */ 211 if (!pcis->D) { /* it can happen that the user passed in a scaling vector via PCISSetSubdomainDiagonalScaling */ 212 ierr = VecDuplicate(pcis->vec1_B,&pcis->D);CHKERRQ(ierr); 213 ierr = VecSet(pcis->D,pcis->scaling_factor);CHKERRQ(ierr); 214 } 215 216 /* Creating the scatter contexts */ 217 ierr = VecScatterCreate(pcis->vec1_N,pcis->is_I_local,pcis->vec1_D,(IS)0,&pcis->N_to_D);CHKERRQ(ierr); 218 ierr = VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D);CHKERRQ(ierr); 219 ierr = VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B);CHKERRQ(ierr); 220 ierr = VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B);CHKERRQ(ierr); 221 222 /* map from boundary to local */ 223 ierr = ISLocalToGlobalMappingCreateIS(pcis->is_B_local,&pcis->BtoNmap);CHKERRQ(ierr); 224 } 225 226 /* 227 Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering 228 is such that interior nodes come first than the interface ones, we have 229 230 [ A_II | A_IB ] 231 A = [------+------] 232 [ A_BI | A_BB ] 233 */ 234 reuse = MAT_INITIAL_MATRIX; 235 if (pcis->reusesubmatrices && pc->setupcalled) { 236 if (pc->flag == SAME_NONZERO_PATTERN) { 237 reuse = MAT_REUSE_MATRIX; 238 } else { 239 reuse = MAT_INITIAL_MATRIX; 240 } 241 } 242 if (reuse == MAT_INITIAL_MATRIX) { 243 ierr = MatDestroy(&pcis->A_II);CHKERRQ(ierr); 244 ierr = MatDestroy(&pcis->A_IB);CHKERRQ(ierr); 245 ierr = MatDestroy(&pcis->A_BI);CHKERRQ(ierr); 246 ierr = MatDestroy(&pcis->A_BB);CHKERRQ(ierr); 247 } 248 249 ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,reuse,&pcis->A_II);CHKERRQ(ierr); 250 ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,reuse,&pcis->A_BB);CHKERRQ(ierr); 251 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 252 if (!issbaij) { 253 ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,reuse,&pcis->A_IB);CHKERRQ(ierr); 254 ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,reuse,&pcis->A_BI);CHKERRQ(ierr); 255 } else { 256 Mat newmat; 257 ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&newmat);CHKERRQ(ierr); 258 ierr = MatGetSubMatrix(newmat,pcis->is_I_local,pcis->is_B_local,reuse,&pcis->A_IB);CHKERRQ(ierr); 259 ierr = MatGetSubMatrix(newmat,pcis->is_B_local,pcis->is_I_local,reuse,&pcis->A_BI);CHKERRQ(ierr); 260 ierr = MatDestroy(&newmat);CHKERRQ(ierr); 261 } 262 /* interface pressure block row for B_C */ 263 ierr = PetscObjectQuery((PetscObject)pc,"__KSPFETIDP_lP" ,(PetscObject*)&lP);CHKERRQ(ierr); 264 ierr = PetscObjectQuery((PetscObject)pc,"__KSPFETIDP_lA" ,(PetscObject*)&lA);CHKERRQ(ierr); 265 if (lA && lP) { 266 Mat B_BI,B_BB,Bt_BI,Bt_BB; 267 PetscBool issym; 268 ierr = MatIsSymmetric(lA,PETSC_SMALL,&issym);CHKERRQ(ierr); 269 if (issym) { /* symmetric elimination of essential dofs */ 270 ierr = MatGetSubMatrix(lA,lP,pcis->is_I_local,MAT_INITIAL_MATRIX,&B_BI);CHKERRQ(ierr); 271 ierr = MatGetSubMatrix(lA,lP,pcis->is_B_local,MAT_INITIAL_MATRIX,&B_BB);CHKERRQ(ierr); 272 ierr = MatCreateTranspose(B_BI,&Bt_BI);CHKERRQ(ierr); 273 ierr = MatCreateTranspose(B_BB,&Bt_BB);CHKERRQ(ierr); 274 } else { 275 ierr = MatGetSubMatrix(lA,pcis->is_I_local,lP,MAT_INITIAL_MATRIX,&Bt_BI);CHKERRQ(ierr); 276 ierr = MatGetSubMatrix(lA,pcis->is_B_local,lP,MAT_INITIAL_MATRIX,&Bt_BB);CHKERRQ(ierr); 277 ierr = MatCreateTranspose(Bt_BI,&B_BI);CHKERRQ(ierr); 278 ierr = MatCreateTranspose(Bt_BB,&B_BB);CHKERRQ(ierr); 279 } 280 ierr = PetscObjectCompose((PetscObject)pc,"__KSPFETIDP_B_BI",(PetscObject)B_BI);CHKERRQ(ierr); 281 ierr = PetscObjectCompose((PetscObject)pc,"__KSPFETIDP_B_BB",(PetscObject)B_BB);CHKERRQ(ierr); 282 ierr = PetscObjectCompose((PetscObject)pc,"__KSPFETIDP_Bt_BI",(PetscObject)Bt_BI);CHKERRQ(ierr); 283 ierr = PetscObjectCompose((PetscObject)pc,"__KSPFETIDP_Bt_BB",(PetscObject)Bt_BB);CHKERRQ(ierr); 284 ierr = MatDestroy(&B_BI);CHKERRQ(ierr); 285 ierr = MatDestroy(&B_BB);CHKERRQ(ierr); 286 ierr = MatDestroy(&Bt_BI);CHKERRQ(ierr); 287 ierr = MatDestroy(&Bt_BB);CHKERRQ(ierr); 288 } 289 290 /* Creating scaling vector D */ 291 ierr = PetscOptionsGetBool(((PetscObject)pc)->options,((PetscObject)pc)->prefix,"-pc_is_use_stiffness_scaling",&pcis->use_stiffness_scaling,NULL);CHKERRQ(ierr); 292 if (pcis->use_stiffness_scaling) { 293 ierr = MatGetDiagonal(pcis->A_BB,pcis->D);CHKERRQ(ierr); 294 } 295 ierr = MatCreateVecs(pc->pmat,&counter,0);CHKERRQ(ierr); /* temporary auxiliar vector */ 296 ierr = VecSet(counter,0.0);CHKERRQ(ierr); 297 ierr = VecScatterBegin(pcis->global_to_B,pcis->D,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 298 ierr = VecScatterEnd(pcis->global_to_B,pcis->D,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 299 ierr = VecScatterBegin(pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 300 ierr = VecScatterEnd(pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 301 ierr = VecPointwiseDivide(pcis->D,pcis->D,pcis->vec1_B);CHKERRQ(ierr); 302 ierr = VecDestroy(&counter);CHKERRQ(ierr); 303 304 /* See historical note 01, at the bottom of this file. */ 305 306 /* Creating the KSP contexts for the local Dirichlet and Neumann problems */ 307 if (computesolvers) { 308 PC pc_ctx; 309 310 pcis->pure_neumann = matis->pure_neumann; 311 /* Dirichlet */ 312 ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D);CHKERRQ(ierr); 313 ierr = KSPSetErrorIfNotConverged(pcis->ksp_D,pc->erroriffailure);CHKERRQ(ierr); 314 ierr = PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D,(PetscObject)pc,1);CHKERRQ(ierr); 315 ierr = KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II);CHKERRQ(ierr); 316 ierr = KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_");CHKERRQ(ierr); 317 ierr = KSPGetPC(pcis->ksp_D,&pc_ctx);CHKERRQ(ierr); 318 ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr); 319 ierr = KSPSetType(pcis->ksp_D,KSPPREONLY);CHKERRQ(ierr); 320 ierr = KSPSetFromOptions(pcis->ksp_D);CHKERRQ(ierr); 321 /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */ 322 ierr = KSPSetUp(pcis->ksp_D);CHKERRQ(ierr); 323 /* Neumann */ 324 ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N);CHKERRQ(ierr); 325 ierr = KSPSetErrorIfNotConverged(pcis->ksp_N,pc->erroriffailure);CHKERRQ(ierr); 326 ierr = PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N,(PetscObject)pc,1);CHKERRQ(ierr); 327 ierr = KSPSetOperators(pcis->ksp_N,matis->A,matis->A);CHKERRQ(ierr); 328 ierr = KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_");CHKERRQ(ierr); 329 ierr = KSPGetPC(pcis->ksp_N,&pc_ctx);CHKERRQ(ierr); 330 ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr); 331 ierr = KSPSetType(pcis->ksp_N,KSPPREONLY);CHKERRQ(ierr); 332 ierr = KSPSetFromOptions(pcis->ksp_N);CHKERRQ(ierr); 333 { 334 PetscBool damp_fixed = PETSC_FALSE, 335 remove_nullspace_fixed = PETSC_FALSE, 336 set_damping_factor_floating = PETSC_FALSE, 337 not_damp_floating = PETSC_FALSE, 338 not_remove_nullspace_floating = PETSC_FALSE; 339 PetscReal fixed_factor, 340 floating_factor; 341 342 ierr = PetscOptionsGetReal(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed);CHKERRQ(ierr); 343 if (!damp_fixed) fixed_factor = 0.0; 344 ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&damp_fixed,NULL);CHKERRQ(ierr); 345 346 ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed,NULL);CHKERRQ(ierr); 347 348 ierr = PetscOptionsGetReal(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating", 349 &floating_factor,&set_damping_factor_floating);CHKERRQ(ierr); 350 if (!set_damping_factor_floating) floating_factor = 0.0; 351 ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating,NULL);CHKERRQ(ierr); 352 if (!set_damping_factor_floating) floating_factor = 1.e-12; 353 354 ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_not_damp_floating",¬_damp_floating,NULL);CHKERRQ(ierr); 355 356 ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_not_remove_nullspace_floating",¬_remove_nullspace_floating,NULL);CHKERRQ(ierr); 357 358 if (pcis->pure_neumann) { /* floating subdomain */ 359 if (!(not_damp_floating)) { 360 ierr = PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);CHKERRQ(ierr); 361 ierr = PCFactorSetShiftAmount(pc_ctx,floating_factor);CHKERRQ(ierr); 362 } 363 if (!(not_remove_nullspace_floating)) { 364 MatNullSpace nullsp; 365 ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp);CHKERRQ(ierr); 366 ierr = MatSetNullSpace(matis->A,nullsp);CHKERRQ(ierr); 367 ierr = MatNullSpaceDestroy(&nullsp);CHKERRQ(ierr); 368 } 369 } else { /* fixed subdomain */ 370 if (damp_fixed) { 371 ierr = PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);CHKERRQ(ierr); 372 ierr = PCFactorSetShiftAmount(pc_ctx,floating_factor);CHKERRQ(ierr); 373 } 374 if (remove_nullspace_fixed) { 375 MatNullSpace nullsp; 376 ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp);CHKERRQ(ierr); 377 ierr = MatSetNullSpace(matis->A,nullsp);CHKERRQ(ierr); 378 ierr = MatNullSpaceDestroy(&nullsp);CHKERRQ(ierr); 379 } 380 } 381 } 382 /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */ 383 ierr = KSPSetUp(pcis->ksp_N);CHKERRQ(ierr); 384 } 385 386 PetscFunctionReturn(0); 387 } 388 389 /* -------------------------------------------------------------------------- */ 390 /* 391 PCISDestroy - 392 */ 393 #undef __FUNCT__ 394 #define __FUNCT__ "PCISDestroy" 395 PetscErrorCode PCISDestroy(PC pc) 396 { 397 PC_IS *pcis = (PC_IS*)(pc->data); 398 PetscErrorCode ierr; 399 400 PetscFunctionBegin; 401 ierr = ISDestroy(&pcis->is_B_local);CHKERRQ(ierr); 402 ierr = ISDestroy(&pcis->is_I_local);CHKERRQ(ierr); 403 ierr = ISDestroy(&pcis->is_B_global);CHKERRQ(ierr); 404 ierr = ISDestroy(&pcis->is_I_global);CHKERRQ(ierr); 405 ierr = MatDestroy(&pcis->A_II);CHKERRQ(ierr); 406 ierr = MatDestroy(&pcis->A_IB);CHKERRQ(ierr); 407 ierr = MatDestroy(&pcis->A_BI);CHKERRQ(ierr); 408 ierr = MatDestroy(&pcis->A_BB);CHKERRQ(ierr); 409 ierr = VecDestroy(&pcis->D);CHKERRQ(ierr); 410 ierr = KSPDestroy(&pcis->ksp_N);CHKERRQ(ierr); 411 ierr = KSPDestroy(&pcis->ksp_D);CHKERRQ(ierr); 412 ierr = VecDestroy(&pcis->vec1_N);CHKERRQ(ierr); 413 ierr = VecDestroy(&pcis->vec2_N);CHKERRQ(ierr); 414 ierr = VecDestroy(&pcis->vec1_D);CHKERRQ(ierr); 415 ierr = VecDestroy(&pcis->vec2_D);CHKERRQ(ierr); 416 ierr = VecDestroy(&pcis->vec3_D);CHKERRQ(ierr); 417 ierr = VecDestroy(&pcis->vec4_D);CHKERRQ(ierr); 418 ierr = VecDestroy(&pcis->vec1_B);CHKERRQ(ierr); 419 ierr = VecDestroy(&pcis->vec2_B);CHKERRQ(ierr); 420 ierr = VecDestroy(&pcis->vec3_B);CHKERRQ(ierr); 421 ierr = VecDestroy(&pcis->vec1_global);CHKERRQ(ierr); 422 ierr = VecScatterDestroy(&pcis->global_to_D);CHKERRQ(ierr); 423 ierr = VecScatterDestroy(&pcis->N_to_B);CHKERRQ(ierr); 424 ierr = VecScatterDestroy(&pcis->N_to_D);CHKERRQ(ierr); 425 ierr = VecScatterDestroy(&pcis->global_to_B);CHKERRQ(ierr); 426 ierr = PetscFree(pcis->work_N);CHKERRQ(ierr); 427 if (pcis->n_neigh > -1) { 428 ierr = ISLocalToGlobalMappingRestoreInfo(pcis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr); 429 } 430 ierr = ISLocalToGlobalMappingDestroy(&pcis->mapping);CHKERRQ(ierr); 431 ierr = ISLocalToGlobalMappingDestroy(&pcis->BtoNmap);CHKERRQ(ierr); 432 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",NULL);CHKERRQ(ierr); 433 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",NULL);CHKERRQ(ierr); 434 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",NULL);CHKERRQ(ierr); 435 PetscFunctionReturn(0); 436 } 437 438 /* -------------------------------------------------------------------------- */ 439 /* 440 PCISCreate - 441 */ 442 #undef __FUNCT__ 443 #define __FUNCT__ "PCISCreate" 444 PetscErrorCode PCISCreate(PC pc) 445 { 446 PC_IS *pcis = (PC_IS*)(pc->data); 447 PetscErrorCode ierr; 448 449 PetscFunctionBegin; 450 pcis->is_B_local = 0; 451 pcis->is_I_local = 0; 452 pcis->is_B_global = 0; 453 pcis->is_I_global = 0; 454 pcis->A_II = 0; 455 pcis->A_IB = 0; 456 pcis->A_BI = 0; 457 pcis->A_BB = 0; 458 pcis->D = 0; 459 pcis->ksp_N = 0; 460 pcis->ksp_D = 0; 461 pcis->vec1_N = 0; 462 pcis->vec2_N = 0; 463 pcis->vec1_D = 0; 464 pcis->vec2_D = 0; 465 pcis->vec3_D = 0; 466 pcis->vec1_B = 0; 467 pcis->vec2_B = 0; 468 pcis->vec3_B = 0; 469 pcis->vec1_global = 0; 470 pcis->work_N = 0; 471 pcis->global_to_D = 0; 472 pcis->N_to_B = 0; 473 pcis->N_to_D = 0; 474 pcis->global_to_B = 0; 475 pcis->mapping = 0; 476 pcis->BtoNmap = 0; 477 pcis->n_neigh = -1; 478 pcis->scaling_factor = 1.0; 479 pcis->reusesubmatrices = PETSC_TRUE; 480 /* composing functions */ 481 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",PCISSetUseStiffnessScaling_IS);CHKERRQ(ierr); 482 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",PCISSetSubdomainScalingFactor_IS);CHKERRQ(ierr); 483 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",PCISSetSubdomainDiagonalScaling_IS);CHKERRQ(ierr); 484 PetscFunctionReturn(0); 485 } 486 487 /* -------------------------------------------------------------------------- */ 488 /* 489 PCISApplySchur - 490 491 Input parameters: 492 . pc - preconditioner context 493 . v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null) 494 495 Output parameters: 496 . vec1_B - result of Schur complement applied to chunk 497 . vec2_B - garbage (used as work space), or null (and v is used as workspace) 498 . vec1_D - garbage (used as work space) 499 . vec2_D - garbage (used as work space) 500 501 */ 502 #undef __FUNCT__ 503 #define __FUNCT__ "PCISApplySchur" 504 PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D) 505 { 506 PetscErrorCode ierr; 507 PC_IS *pcis = (PC_IS*)(pc->data); 508 509 PetscFunctionBegin; 510 if (!vec2_B) vec2_B = v; 511 512 ierr = MatMult(pcis->A_BB,v,vec1_B);CHKERRQ(ierr); 513 ierr = MatMult(pcis->A_IB,v,vec1_D);CHKERRQ(ierr); 514 ierr = KSPSolve(pcis->ksp_D,vec1_D,vec2_D);CHKERRQ(ierr); 515 ierr = MatMult(pcis->A_BI,vec2_D,vec2_B);CHKERRQ(ierr); 516 ierr = VecAXPY(vec1_B,-1.0,vec2_B);CHKERRQ(ierr); 517 PetscFunctionReturn(0); 518 } 519 520 /* -------------------------------------------------------------------------- */ 521 /* 522 PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface, 523 including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE 524 mode. 525 526 Input parameters: 527 . pc - preconditioner context 528 . array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector 529 . v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array 530 531 Output parameter: 532 . array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector 533 . v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array 534 535 Notes: 536 The entries in the array that do not correspond to interface nodes remain unaltered. 537 */ 538 #undef __FUNCT__ 539 #define __FUNCT__ "PCISScatterArrayNToVecB" 540 PetscErrorCode PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc) 541 { 542 PetscInt i; 543 const PetscInt *idex; 544 PetscErrorCode ierr; 545 PetscScalar *array_B; 546 PC_IS *pcis = (PC_IS*)(pc->data); 547 548 PetscFunctionBegin; 549 ierr = VecGetArray(v_B,&array_B);CHKERRQ(ierr); 550 ierr = ISGetIndices(pcis->is_B_local,&idex);CHKERRQ(ierr); 551 552 if (smode == SCATTER_FORWARD) { 553 if (imode == INSERT_VALUES) { 554 for (i=0; i<pcis->n_B; i++) array_B[i] = array_N[idex[i]]; 555 } else { /* ADD_VALUES */ 556 for (i=0; i<pcis->n_B; i++) array_B[i] += array_N[idex[i]]; 557 } 558 } else { /* SCATTER_REVERSE */ 559 if (imode == INSERT_VALUES) { 560 for (i=0; i<pcis->n_B; i++) array_N[idex[i]] = array_B[i]; 561 } else { /* ADD_VALUES */ 562 for (i=0; i<pcis->n_B; i++) array_N[idex[i]] += array_B[i]; 563 } 564 } 565 ierr = ISRestoreIndices(pcis->is_B_local,&idex);CHKERRQ(ierr); 566 ierr = VecRestoreArray(v_B,&array_B);CHKERRQ(ierr); 567 PetscFunctionReturn(0); 568 } 569 570 /* -------------------------------------------------------------------------- */ 571 /* 572 PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement. 573 More precisely, solves the problem: 574 [ A_II A_IB ] [ . ] [ 0 ] 575 [ ] [ ] = [ ] 576 [ A_BI A_BB ] [ x ] [ b ] 577 578 Input parameters: 579 . pc - preconditioner context 580 . b - vector of local interface nodes (including ghosts) 581 582 Output parameters: 583 . x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur 584 complement to b 585 . vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space) 586 . vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space) 587 588 */ 589 #undef __FUNCT__ 590 #define __FUNCT__ "PCISApplyInvSchur" 591 PetscErrorCode PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N) 592 { 593 PetscErrorCode ierr; 594 PC_IS *pcis = (PC_IS*)(pc->data); 595 596 PetscFunctionBegin; 597 /* 598 Neumann solvers. 599 Applying the inverse of the local Schur complement, i.e, solving a Neumann 600 Problem with zero at the interior nodes of the RHS and extracting the interface 601 part of the solution. inverse Schur complement is applied to b and the result 602 is stored in x. 603 */ 604 /* Setting the RHS vec1_N */ 605 ierr = VecSet(vec1_N,0.0);CHKERRQ(ierr); 606 ierr = VecScatterBegin(pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 607 ierr = VecScatterEnd (pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 608 /* Checking for consistency of the RHS */ 609 { 610 PetscBool flg = PETSC_FALSE; 611 ierr = PetscOptionsGetBool(NULL,NULL,"-pc_is_check_consistency",&flg,NULL);CHKERRQ(ierr); 612 if (flg) { 613 PetscScalar average; 614 PetscViewer viewer; 615 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);CHKERRQ(ierr); 616 617 ierr = VecSum(vec1_N,&average);CHKERRQ(ierr); 618 average = average / ((PetscReal)pcis->n); 619 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 620 if (pcis->pure_neumann) { 621 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is floating. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr); 622 } else { 623 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is fixed. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr); 624 } 625 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 626 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 627 } 628 } 629 /* Solving the system for vec2_N */ 630 ierr = KSPSolve(pcis->ksp_N,vec1_N,vec2_N);CHKERRQ(ierr); 631 /* Extracting the local interface vector out of the solution */ 632 ierr = VecScatterBegin(pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 633 ierr = VecScatterEnd (pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 634 PetscFunctionReturn(0); 635 } 636 637 638 639 640 641 642 643 644 645