1 #define PETSCKSP_DLL 2 3 /* 4 This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner. 5 */ 6 #include "private/pcimpl.h" /*I "petscpc.h" I*/ 7 #include "petscksp.h" 8 9 typedef struct { 10 KSP ksp; 11 PC pc; /* actual preconditioner used on each processor */ 12 Vec xsub,ysub; /* vectors of a subcommunicator to hold parallel vectors of ((PetscObject)pc)->comm */ 13 Vec xdup,ydup; /* parallel vector that congregates xsub or ysub facilitating vector scattering */ 14 Mat pmats; /* matrix and optional preconditioner matrix belong to a subcommunicator */ 15 VecScatter scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */ 16 PetscTruth useparallelmat; 17 PetscSubcomm psubcomm; 18 PetscInt nsubcomm; /* num of data structure PetscSubcomm */ 19 } PC_Redundant; 20 21 #undef __FUNCT__ 22 #define __FUNCT__ "PCView_Redundant" 23 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer) 24 { 25 PC_Redundant *red = (PC_Redundant*)pc->data; 26 PetscErrorCode ierr; 27 PetscMPIInt rank; 28 PetscTruth iascii,isstring; 29 PetscViewer sviewer,subviewer; 30 PetscInt color = red->psubcomm->color; 31 32 PetscFunctionBegin; 33 ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr); 34 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 35 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr); 36 if (iascii) { 37 ierr = PetscViewerASCIIPrintf(viewer," Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr); 38 ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr); 39 if (!color) { /* only view first redundant pc */ 40 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 41 ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr); 42 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 43 } 44 ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr); 45 } else if (isstring) { /* not test it yet! */ 46 ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr); 47 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 48 if (!rank) { 49 ierr = KSPView(red->ksp,sviewer);CHKERRQ(ierr); 50 } 51 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 52 } else { 53 SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name); 54 } 55 PetscFunctionReturn(0); 56 } 57 58 #include "private/matimpl.h" /*I "petscmat.h" I*/ 59 #undef __FUNCT__ 60 #define __FUNCT__ "PCSetUp_Redundant" 61 static PetscErrorCode PCSetUp_Redundant(PC pc) 62 { 63 PC_Redundant *red = (PC_Redundant*)pc->data; 64 PetscErrorCode ierr; 65 PetscInt mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub; 66 PetscMPIInt size; 67 MatReuse reuse = MAT_INITIAL_MATRIX; 68 MatStructure str = DIFFERENT_NONZERO_PATTERN; 69 MPI_Comm comm = ((PetscObject)pc)->comm,subcomm; 70 Vec vec; 71 PetscMPIInt subsize,subrank; 72 const char *prefix; 73 74 PetscFunctionBegin; 75 ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr); 76 ierr = VecGetSize(vec,&m);CHKERRQ(ierr); 77 78 if (!pc->setupcalled) { 79 if (!red->psubcomm) { 80 ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr); 81 ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr); 82 83 /* create a new PC that processors in each subcomm have copy of */ 84 subcomm = red->psubcomm->comm; 85 ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr); 86 ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr); 87 ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr); 88 ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr); 89 ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr); 90 ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr); 91 92 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 93 ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr); 94 ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr); 95 } else { 96 subcomm = red->psubcomm->comm; 97 } 98 99 /* create working vectors xsub/ysub and xdup/ydup */ 100 ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr); 101 ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr); 102 103 /* get local size of xsub/ysub */ 104 ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 105 ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 106 rstart_sub = pc->pmat->rmap->range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */ 107 if (subrank+1 < subsize){ 108 rend_sub = pc->pmat->rmap->range[red->psubcomm->n*(subrank+1)]; 109 } else { 110 rend_sub = m; 111 } 112 mloc_sub = rend_sub - rstart_sub; 113 ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr); 114 /* create xsub with empty local arrays, because xdup's arrays will be placed into it */ 115 ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr); 116 117 /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it. 118 Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */ 119 ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr); 120 ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr); 121 122 /* create vec scatters */ 123 if (!red->scatterin){ 124 IS is1,is2; 125 PetscInt *idx1,*idx2,i,j,k; 126 127 ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr); 128 j = 0; 129 for (k=0; k<red->psubcomm->n; k++){ 130 for (i=mstart; i<mend; i++){ 131 idx1[j] = i; 132 idx2[j++] = i + m*k; 133 } 134 } 135 ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,&is1);CHKERRQ(ierr); 136 ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,&is2);CHKERRQ(ierr); 137 ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr); 138 ierr = ISDestroy(is1);CHKERRQ(ierr); 139 ierr = ISDestroy(is2);CHKERRQ(ierr); 140 141 ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr); 142 ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr); 143 ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr); 144 ierr = ISDestroy(is1);CHKERRQ(ierr); 145 ierr = ISDestroy(is2);CHKERRQ(ierr); 146 ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr); 147 } 148 } 149 ierr = VecDestroy(vec);CHKERRQ(ierr); 150 151 /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */ 152 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 153 if (size == 1) { 154 red->useparallelmat = PETSC_FALSE; 155 } 156 157 if (red->useparallelmat) { 158 if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) { 159 /* destroy old matrices */ 160 if (red->pmats) { 161 ierr = MatDestroy(red->pmats);CHKERRQ(ierr); 162 } 163 } else if (pc->setupcalled == 1) { 164 reuse = MAT_REUSE_MATRIX; 165 str = SAME_NONZERO_PATTERN; 166 } 167 168 /* grab the parallel matrix and put it into processors of a subcomminicator */ 169 /*--------------------------------------------------------------------------*/ 170 ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr); 171 ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr); 172 /* tell PC of the subcommunicator its operators */ 173 ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr); 174 } else { 175 ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr); 176 } 177 if (pc->setfromoptionscalled){ 178 ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr); 179 } 180 ierr = KSPSetUp(red->ksp);CHKERRQ(ierr); 181 PetscFunctionReturn(0); 182 } 183 184 #undef __FUNCT__ 185 #define __FUNCT__ "PCApply_Redundant" 186 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y) 187 { 188 PC_Redundant *red = (PC_Redundant*)pc->data; 189 PetscErrorCode ierr; 190 PetscScalar *array; 191 192 PetscFunctionBegin; 193 /* scatter x to xdup */ 194 ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 195 ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 196 197 /* place xdup's local array into xsub */ 198 ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr); 199 ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr); 200 201 /* apply preconditioner on each processor */ 202 ierr = PCApply(red->pc,red->xsub,red->ysub);CHKERRQ(ierr); 203 ierr = VecResetArray(red->xsub);CHKERRQ(ierr); 204 ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr); 205 206 /* place ysub's local array into ydup */ 207 ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr); 208 ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr); 209 210 /* scatter ydup to y */ 211 ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 212 ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 213 ierr = VecResetArray(red->ydup);CHKERRQ(ierr); 214 ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr); 215 PetscFunctionReturn(0); 216 } 217 218 #undef __FUNCT__ 219 #define __FUNCT__ "PCDestroy_Redundant" 220 static PetscErrorCode PCDestroy_Redundant(PC pc) 221 { 222 PC_Redundant *red = (PC_Redundant*)pc->data; 223 PetscErrorCode ierr; 224 225 PetscFunctionBegin; 226 if (red->scatterin) {ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr);} 227 if (red->scatterout) {ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr);} 228 if (red->ysub) {ierr = VecDestroy(red->ysub);CHKERRQ(ierr);} 229 if (red->xsub) {ierr = VecDestroy(red->xsub);CHKERRQ(ierr);} 230 if (red->xdup) {ierr = VecDestroy(red->xdup);CHKERRQ(ierr);} 231 if (red->ydup) {ierr = VecDestroy(red->ydup);CHKERRQ(ierr);} 232 if (red->pmats) { 233 ierr = MatDestroy(red->pmats);CHKERRQ(ierr); 234 } 235 if (red->psubcomm) {ierr = PetscSubcommDestroy(red->psubcomm);CHKERRQ(ierr);} 236 if (red->ksp) {ierr = KSPDestroy(red->ksp);CHKERRQ(ierr);} 237 ierr = PetscFree(red);CHKERRQ(ierr); 238 PetscFunctionReturn(0); 239 } 240 241 #undef __FUNCT__ 242 #define __FUNCT__ "PCSetFromOptions_Redundant" 243 static PetscErrorCode PCSetFromOptions_Redundant(PC pc) 244 { 245 PetscErrorCode ierr; 246 PC_Redundant *red = (PC_Redundant*)pc->data; 247 248 PetscFunctionBegin; 249 ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr); 250 ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr); 251 ierr = PetscOptionsTail();CHKERRQ(ierr); 252 PetscFunctionReturn(0); 253 } 254 255 EXTERN_C_BEGIN 256 #undef __FUNCT__ 257 #define __FUNCT__ "PCRedundantSetNumber_Redundant" 258 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds) 259 { 260 PC_Redundant *red = (PC_Redundant*)pc->data; 261 262 PetscFunctionBegin; 263 red->nsubcomm = nreds; 264 PetscFunctionReturn(0); 265 } 266 EXTERN_C_END 267 268 #undef __FUNCT__ 269 #define __FUNCT__ "PCRedundantSetNumber" 270 /*@ 271 PCRedundantSetNumber - Sets the number of redundant preconditioner contexts. 272 273 Collective on PC 274 275 Input Parameters: 276 + pc - the preconditioner context 277 - nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and 278 use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes. 279 280 Level: advanced 281 282 .keywords: PC, redundant solve 283 @*/ 284 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber(PC pc,PetscInt nredundant) 285 { 286 PetscErrorCode ierr,(*f)(PC,PetscInt); 287 288 PetscFunctionBegin; 289 PetscValidHeaderSpecific(pc,PC_COOKIE,1); 290 if (nredundant <= 0) SETERRQ1(PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant); 291 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetNumber_C",(void (**)(void))&f);CHKERRQ(ierr); 292 if (f) { 293 ierr = (*f)(pc,nredundant);CHKERRQ(ierr); 294 } 295 PetscFunctionReturn(0); 296 } 297 298 EXTERN_C_BEGIN 299 #undef __FUNCT__ 300 #define __FUNCT__ "PCRedundantSetScatter_Redundant" 301 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out) 302 { 303 PC_Redundant *red = (PC_Redundant*)pc->data; 304 PetscErrorCode ierr; 305 306 PetscFunctionBegin; 307 ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr); 308 if (red->scatterin) { ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr); } 309 red->scatterin = in; 310 ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr); 311 if (red->scatterout) { ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr); } 312 red->scatterout = out; 313 PetscFunctionReturn(0); 314 } 315 EXTERN_C_END 316 317 #undef __FUNCT__ 318 #define __FUNCT__ "PCRedundantSetScatter" 319 /*@ 320 PCRedundantSetScatter - Sets the scatter used to copy values into the 321 redundant local solve and the scatter to move them back into the global 322 vector. 323 324 Collective on PC 325 326 Input Parameters: 327 + pc - the preconditioner context 328 . in - the scatter to move the values in 329 - out - the scatter to move them out 330 331 Level: advanced 332 333 .keywords: PC, redundant solve 334 @*/ 335 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out) 336 { 337 PetscErrorCode ierr,(*f)(PC,VecScatter,VecScatter); 338 339 PetscFunctionBegin; 340 PetscValidHeaderSpecific(pc,PC_COOKIE,1); 341 PetscValidHeaderSpecific(in,VEC_SCATTER_COOKIE,2); 342 PetscValidHeaderSpecific(out,VEC_SCATTER_COOKIE,3); 343 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetScatter_C",(void (**)(void))&f);CHKERRQ(ierr); 344 if (f) { 345 ierr = (*f)(pc,in,out);CHKERRQ(ierr); 346 } 347 PetscFunctionReturn(0); 348 } 349 350 EXTERN_C_BEGIN 351 #undef __FUNCT__ 352 #define __FUNCT__ "PCRedundantGetPC_Redundant" 353 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC_Redundant(PC pc,PC *innerpc) 354 { 355 PetscErrorCode ierr; 356 PC_Redundant *red = (PC_Redundant*)pc->data; 357 MPI_Comm comm,subcomm; 358 const char *prefix; 359 360 PetscFunctionBegin; 361 if (!red->psubcomm) { 362 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 363 ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr); 364 ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr); 365 366 /* create a new PC that processors in each subcomm have copy of */ 367 subcomm = red->psubcomm->comm; 368 ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr); 369 ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr); 370 ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr); 371 ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr); 372 ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr); 373 ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr); 374 375 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 376 ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr); 377 ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr); 378 } 379 380 ierr = KSPGetPC(red->ksp,innerpc);CHKERRQ(ierr); 381 PetscFunctionReturn(0); 382 } 383 EXTERN_C_END 384 385 #undef __FUNCT__ 386 #define __FUNCT__ "PCRedundantGetPC" 387 /*@ 388 PCRedundantGetPC - Gets the sequential PC created by the redundant PC. 389 390 Not Collective 391 392 Input Parameter: 393 . pc - the preconditioner context 394 395 Output Parameter: 396 . innerpc - the sequential PC 397 398 Level: advanced 399 400 .keywords: PC, redundant solve 401 @*/ 402 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC(PC pc,PC *innerpc) 403 { 404 PetscErrorCode ierr,(*f)(PC,PC*); 405 406 PetscFunctionBegin; 407 PetscValidHeaderSpecific(pc,PC_COOKIE,1); 408 PetscValidPointer(innerpc,2); 409 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetPC_C",(void (**)(void))&f);CHKERRQ(ierr); 410 if (f) { 411 ierr = (*f)(pc,innerpc);CHKERRQ(ierr); 412 } 413 PetscFunctionReturn(0); 414 } 415 416 EXTERN_C_BEGIN 417 #undef __FUNCT__ 418 #define __FUNCT__ "PCRedundantGetOperators_Redundant" 419 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat) 420 { 421 PC_Redundant *red = (PC_Redundant*)pc->data; 422 423 PetscFunctionBegin; 424 if (mat) *mat = red->pmats; 425 if (pmat) *pmat = red->pmats; 426 PetscFunctionReturn(0); 427 } 428 EXTERN_C_END 429 430 #undef __FUNCT__ 431 #define __FUNCT__ "PCRedundantGetOperators" 432 /*@ 433 PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix 434 435 Not Collective 436 437 Input Parameter: 438 . pc - the preconditioner context 439 440 Output Parameters: 441 + mat - the matrix 442 - pmat - the (possibly different) preconditioner matrix 443 444 Level: advanced 445 446 .keywords: PC, redundant solve 447 @*/ 448 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat) 449 { 450 PetscErrorCode ierr,(*f)(PC,Mat*,Mat*); 451 452 PetscFunctionBegin; 453 PetscValidHeaderSpecific(pc,PC_COOKIE,1); 454 if (mat) PetscValidPointer(mat,2); 455 if (pmat) PetscValidPointer(pmat,3); 456 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetOperators_C",(void (**)(void))&f);CHKERRQ(ierr); 457 if (f) { 458 ierr = (*f)(pc,mat,pmat);CHKERRQ(ierr); 459 } 460 PetscFunctionReturn(0); 461 } 462 463 /* -------------------------------------------------------------------------------------*/ 464 /*MC 465 PCREDUNDANT - Runs a preconditioner for the entire problem on subgroups of processors 466 467 Options for the redundant preconditioners can be set with -redundant_pc_xxx 468 469 Options Database: 470 . -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and 471 use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes. 472 473 Level: intermediate 474 475 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(), 476 PCRedundantGetPC(), PCRedundantGetOperators(), PCRedundantSetNumber() 477 M*/ 478 479 EXTERN_C_BEGIN 480 #undef __FUNCT__ 481 #define __FUNCT__ "PCCreate_Redundant" 482 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_Redundant(PC pc) 483 { 484 PetscErrorCode ierr; 485 PC_Redundant *red; 486 PetscMPIInt size; 487 488 PetscFunctionBegin; 489 ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr); 490 ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr); 491 red->nsubcomm = size; 492 red->useparallelmat = PETSC_TRUE; 493 pc->data = (void*)red; 494 495 pc->ops->apply = PCApply_Redundant; 496 pc->ops->applytranspose = 0; 497 pc->ops->setup = PCSetUp_Redundant; 498 pc->ops->destroy = PCDestroy_Redundant; 499 pc->ops->setfromoptions = PCSetFromOptions_Redundant; 500 pc->ops->view = PCView_Redundant; 501 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant", 502 PCRedundantSetScatter_Redundant);CHKERRQ(ierr); 503 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant", 504 PCRedundantSetNumber_Redundant);CHKERRQ(ierr); 505 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetPC_C","PCRedundantGetPC_Redundant", 506 PCRedundantGetPC_Redundant);CHKERRQ(ierr); 507 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant", 508 PCRedundantGetOperators_Redundant);CHKERRQ(ierr); 509 PetscFunctionReturn(0); 510 } 511 EXTERN_C_END 512