1 #define PETSCKSP_DLL 2 3 /* 4 This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner. 5 */ 6 #include "private/pcimpl.h" /*I "petscpc.h" I*/ 7 #include "petscksp.h" 8 9 typedef struct { 10 KSP ksp; 11 PC pc; /* actual preconditioner used on each processor */ 12 Vec xsub,ysub; /* vectors of a subcommunicator to hold parallel vectors of ((PetscObject)pc)->comm */ 13 Vec xdup,ydup; /* parallel vector that congregates xsub or ysub facilitating vector scattering */ 14 Mat pmats; /* matrix and optional preconditioner matrix belong to a subcommunicator */ 15 VecScatter scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */ 16 PetscTruth useparallelmat; 17 PetscSubcomm psubcomm; 18 PetscInt nsubcomm; /* num of data structure PetscSubcomm */ 19 } PC_Redundant; 20 21 #undef __FUNCT__ 22 #define __FUNCT__ "PCView_Redundant" 23 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer) 24 { 25 PC_Redundant *red = (PC_Redundant*)pc->data; 26 PetscErrorCode ierr; 27 PetscMPIInt rank; 28 PetscTruth iascii,isstring; 29 PetscViewer sviewer,subviewer; 30 PetscInt color = red->psubcomm->color; 31 32 PetscFunctionBegin; 33 ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr); 34 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 35 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr); 36 if (iascii) { 37 ierr = PetscViewerASCIIPrintf(viewer," Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr); 38 ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr); 39 if (!color) { /* only view first redundant pc */ 40 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 41 ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr); 42 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 43 } 44 ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr); 45 } else if (isstring) { /* not test it yet! */ 46 ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr); 47 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 48 if (!rank) { 49 ierr = KSPView(red->ksp,sviewer);CHKERRQ(ierr); 50 } 51 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 52 } else { 53 SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name); 54 } 55 PetscFunctionReturn(0); 56 } 57 58 #include "private/matimpl.h" /*I "petscmat.h" I*/ 59 #undef __FUNCT__ 60 #define __FUNCT__ "PCSetUp_Redundant" 61 static PetscErrorCode PCSetUp_Redundant(PC pc) 62 { 63 PC_Redundant *red = (PC_Redundant*)pc->data; 64 PetscErrorCode ierr; 65 PetscInt mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub; 66 PetscMPIInt size; 67 MatReuse reuse = MAT_INITIAL_MATRIX; 68 MatStructure str = DIFFERENT_NONZERO_PATTERN; 69 MPI_Comm comm = ((PetscObject)pc)->comm,subcomm; 70 Vec vec; 71 PetscMPIInt subsize,subrank; 72 const char *prefix; 73 74 PetscFunctionBegin; 75 ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr); 76 ierr = VecGetSize(vec,&m);CHKERRQ(ierr); 77 78 if (!pc->setupcalled) { 79 if (!red->psubcomm) { 80 ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr); 81 ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr); 82 83 /* create a new PC that processors in each subcomm have copy of */ 84 subcomm = red->psubcomm->comm; 85 ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr); 86 ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr); 87 ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr); 88 ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr); 89 ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr); 90 ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr); 91 92 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 93 ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr); 94 ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr); 95 } else { 96 subcomm = red->psubcomm->comm; 97 } 98 99 /* create working vectors xsub/ysub and xdup/ydup */ 100 ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr); 101 ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr); 102 103 /* get local size of xsub/ysub */ 104 ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 105 ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 106 rstart_sub = pc->pmat->rmap->range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */ 107 if (subrank+1 < subsize){ 108 rend_sub = pc->pmat->rmap->range[red->psubcomm->n*(subrank+1)]; 109 } else { 110 rend_sub = m; 111 } 112 mloc_sub = rend_sub - rstart_sub; 113 ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr); 114 /* create xsub with empty local arrays, because xdup's arrays will be placed into it */ 115 ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr); 116 117 /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it. 118 Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */ 119 ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr); 120 ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr); 121 122 /* create vec scatters */ 123 if (!red->scatterin){ 124 IS is1,is2; 125 PetscInt *idx1,*idx2,i,j,k; 126 127 ierr = PetscMalloc(2*red->psubcomm->n*mlocal*sizeof(PetscInt),&idx1);CHKERRQ(ierr); 128 idx2 = idx1 + red->psubcomm->n*mlocal; 129 j = 0; 130 for (k=0; k<red->psubcomm->n; k++){ 131 for (i=mstart; i<mend; i++){ 132 idx1[j] = i; 133 idx2[j++] = i + m*k; 134 } 135 } 136 ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,&is1);CHKERRQ(ierr); 137 ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,&is2);CHKERRQ(ierr); 138 ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr); 139 ierr = ISDestroy(is1);CHKERRQ(ierr); 140 ierr = ISDestroy(is2);CHKERRQ(ierr); 141 142 ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr); 143 ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr); 144 ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr); 145 ierr = ISDestroy(is1);CHKERRQ(ierr); 146 ierr = ISDestroy(is2);CHKERRQ(ierr); 147 ierr = PetscFree(idx1);CHKERRQ(ierr); 148 } 149 } 150 ierr = VecDestroy(vec);CHKERRQ(ierr); 151 152 /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */ 153 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 154 if (size == 1) { 155 red->useparallelmat = PETSC_FALSE; 156 } 157 158 if (red->useparallelmat) { 159 if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) { 160 /* destroy old matrices */ 161 if (red->pmats) { 162 ierr = MatDestroy(red->pmats);CHKERRQ(ierr); 163 } 164 } else if (pc->setupcalled == 1) { 165 reuse = MAT_REUSE_MATRIX; 166 str = SAME_NONZERO_PATTERN; 167 } 168 169 /* grab the parallel matrix and put it into processors of a subcomminicator */ 170 /*--------------------------------------------------------------------------*/ 171 ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr); 172 ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr); 173 /* tell PC of the subcommunicator its operators */ 174 ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr); 175 } else { 176 ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr); 177 } 178 if (pc->setfromoptionscalled){ 179 ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr); 180 } 181 ierr = KSPSetUp(red->ksp);CHKERRQ(ierr); 182 PetscFunctionReturn(0); 183 } 184 185 #undef __FUNCT__ 186 #define __FUNCT__ "PCApply_Redundant" 187 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y) 188 { 189 PC_Redundant *red = (PC_Redundant*)pc->data; 190 PetscErrorCode ierr; 191 PetscScalar *array; 192 193 PetscFunctionBegin; 194 /* scatter x to xdup */ 195 ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 196 ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 197 198 /* place xdup's local array into xsub */ 199 ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr); 200 ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr); 201 202 /* apply preconditioner on each processor */ 203 ierr = PCApply(red->pc,red->xsub,red->ysub);CHKERRQ(ierr); 204 ierr = VecResetArray(red->xsub);CHKERRQ(ierr); 205 ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr); 206 207 /* place ysub's local array into ydup */ 208 ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr); 209 ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr); 210 211 /* scatter ydup to y */ 212 ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 213 ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 214 ierr = VecResetArray(red->ydup);CHKERRQ(ierr); 215 ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr); 216 PetscFunctionReturn(0); 217 } 218 219 #undef __FUNCT__ 220 #define __FUNCT__ "PCDestroy_Redundant" 221 static PetscErrorCode PCDestroy_Redundant(PC pc) 222 { 223 PC_Redundant *red = (PC_Redundant*)pc->data; 224 PetscErrorCode ierr; 225 226 PetscFunctionBegin; 227 if (red->scatterin) {ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr);} 228 if (red->scatterout) {ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr);} 229 if (red->ysub) {ierr = VecDestroy(red->ysub);CHKERRQ(ierr);} 230 if (red->xsub) {ierr = VecDestroy(red->xsub);CHKERRQ(ierr);} 231 if (red->xdup) {ierr = VecDestroy(red->xdup);CHKERRQ(ierr);} 232 if (red->ydup) {ierr = VecDestroy(red->ydup);CHKERRQ(ierr);} 233 if (red->pmats) { 234 ierr = MatDestroy(red->pmats);CHKERRQ(ierr); 235 } 236 if (red->psubcomm) {ierr = PetscSubcommDestroy(red->psubcomm);CHKERRQ(ierr);} 237 if (red->ksp) {ierr = KSPDestroy(red->ksp);CHKERRQ(ierr);} 238 ierr = PetscFree(red);CHKERRQ(ierr); 239 PetscFunctionReturn(0); 240 } 241 242 #undef __FUNCT__ 243 #define __FUNCT__ "PCSetFromOptions_Redundant" 244 static PetscErrorCode PCSetFromOptions_Redundant(PC pc) 245 { 246 PetscErrorCode ierr; 247 PC_Redundant *red = (PC_Redundant*)pc->data; 248 249 PetscFunctionBegin; 250 ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr); 251 ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr); 252 ierr = PetscOptionsTail();CHKERRQ(ierr); 253 PetscFunctionReturn(0); 254 } 255 256 EXTERN_C_BEGIN 257 #undef __FUNCT__ 258 #define __FUNCT__ "PCRedundantSetNumber_Redundant" 259 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds) 260 { 261 PC_Redundant *red = (PC_Redundant*)pc->data; 262 263 PetscFunctionBegin; 264 red->nsubcomm = nreds; 265 PetscFunctionReturn(0); 266 } 267 EXTERN_C_END 268 269 #undef __FUNCT__ 270 #define __FUNCT__ "PCRedundantSetNumber" 271 /*@ 272 PCRedundantSetNumber - Sets the number of redundant preconditioner contexts. 273 274 Collective on PC 275 276 Input Parameters: 277 + pc - the preconditioner context 278 - nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and 279 use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes. 280 281 Level: advanced 282 283 .keywords: PC, redundant solve 284 @*/ 285 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber(PC pc,PetscInt nredundant) 286 { 287 PetscErrorCode ierr,(*f)(PC,PetscInt); 288 289 PetscFunctionBegin; 290 PetscValidHeaderSpecific(pc,PC_COOKIE,1); 291 if (nredundant <= 0) SETERRQ1(PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant); 292 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetNumber_C",(void (**)(void))&f);CHKERRQ(ierr); 293 if (f) { 294 ierr = (*f)(pc,nredundant);CHKERRQ(ierr); 295 } 296 PetscFunctionReturn(0); 297 } 298 299 EXTERN_C_BEGIN 300 #undef __FUNCT__ 301 #define __FUNCT__ "PCRedundantSetScatter_Redundant" 302 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out) 303 { 304 PC_Redundant *red = (PC_Redundant*)pc->data; 305 PetscErrorCode ierr; 306 307 PetscFunctionBegin; 308 ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr); 309 if (red->scatterin) { ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr); } 310 red->scatterin = in; 311 ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr); 312 if (red->scatterout) { ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr); } 313 red->scatterout = out; 314 PetscFunctionReturn(0); 315 } 316 EXTERN_C_END 317 318 #undef __FUNCT__ 319 #define __FUNCT__ "PCRedundantSetScatter" 320 /*@ 321 PCRedundantSetScatter - Sets the scatter used to copy values into the 322 redundant local solve and the scatter to move them back into the global 323 vector. 324 325 Collective on PC 326 327 Input Parameters: 328 + pc - the preconditioner context 329 . in - the scatter to move the values in 330 - out - the scatter to move them out 331 332 Level: advanced 333 334 .keywords: PC, redundant solve 335 @*/ 336 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out) 337 { 338 PetscErrorCode ierr,(*f)(PC,VecScatter,VecScatter); 339 340 PetscFunctionBegin; 341 PetscValidHeaderSpecific(pc,PC_COOKIE,1); 342 PetscValidHeaderSpecific(in,VEC_SCATTER_COOKIE,2); 343 PetscValidHeaderSpecific(out,VEC_SCATTER_COOKIE,3); 344 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetScatter_C",(void (**)(void))&f);CHKERRQ(ierr); 345 if (f) { 346 ierr = (*f)(pc,in,out);CHKERRQ(ierr); 347 } 348 PetscFunctionReturn(0); 349 } 350 351 EXTERN_C_BEGIN 352 #undef __FUNCT__ 353 #define __FUNCT__ "PCRedundantGetPC_Redundant" 354 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC_Redundant(PC pc,PC *innerpc) 355 { 356 PetscErrorCode ierr; 357 PC_Redundant *red = (PC_Redundant*)pc->data; 358 MPI_Comm comm,subcomm; 359 const char *prefix; 360 361 PetscFunctionBegin; 362 if (!red->psubcomm) { 363 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 364 ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr); 365 ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr); 366 367 /* create a new PC that processors in each subcomm have copy of */ 368 subcomm = red->psubcomm->comm; 369 ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr); 370 ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr); 371 ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr); 372 ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr); 373 ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr); 374 ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr); 375 376 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 377 ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr); 378 ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr); 379 } 380 381 ierr = KSPGetPC(red->ksp,innerpc);CHKERRQ(ierr); 382 PetscFunctionReturn(0); 383 } 384 EXTERN_C_END 385 386 #undef __FUNCT__ 387 #define __FUNCT__ "PCRedundantGetPC" 388 /*@ 389 PCRedundantGetPC - Gets the sequential PC created by the redundant PC. 390 391 Not Collective 392 393 Input Parameter: 394 . pc - the preconditioner context 395 396 Output Parameter: 397 . innerpc - the sequential PC 398 399 Level: advanced 400 401 .keywords: PC, redundant solve 402 @*/ 403 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC(PC pc,PC *innerpc) 404 { 405 PetscErrorCode ierr,(*f)(PC,PC*); 406 407 PetscFunctionBegin; 408 PetscValidHeaderSpecific(pc,PC_COOKIE,1); 409 PetscValidPointer(innerpc,2); 410 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetPC_C",(void (**)(void))&f);CHKERRQ(ierr); 411 if (f) { 412 ierr = (*f)(pc,innerpc);CHKERRQ(ierr); 413 } 414 PetscFunctionReturn(0); 415 } 416 417 EXTERN_C_BEGIN 418 #undef __FUNCT__ 419 #define __FUNCT__ "PCRedundantGetOperators_Redundant" 420 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat) 421 { 422 PC_Redundant *red = (PC_Redundant*)pc->data; 423 424 PetscFunctionBegin; 425 if (mat) *mat = red->pmats; 426 if (pmat) *pmat = red->pmats; 427 PetscFunctionReturn(0); 428 } 429 EXTERN_C_END 430 431 #undef __FUNCT__ 432 #define __FUNCT__ "PCRedundantGetOperators" 433 /*@ 434 PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix 435 436 Not Collective 437 438 Input Parameter: 439 . pc - the preconditioner context 440 441 Output Parameters: 442 + mat - the matrix 443 - pmat - the (possibly different) preconditioner matrix 444 445 Level: advanced 446 447 .keywords: PC, redundant solve 448 @*/ 449 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat) 450 { 451 PetscErrorCode ierr,(*f)(PC,Mat*,Mat*); 452 453 PetscFunctionBegin; 454 PetscValidHeaderSpecific(pc,PC_COOKIE,1); 455 if (mat) PetscValidPointer(mat,2); 456 if (pmat) PetscValidPointer(pmat,3); 457 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetOperators_C",(void (**)(void))&f);CHKERRQ(ierr); 458 if (f) { 459 ierr = (*f)(pc,mat,pmat);CHKERRQ(ierr); 460 } 461 PetscFunctionReturn(0); 462 } 463 464 /* -------------------------------------------------------------------------------------*/ 465 /*MC 466 PCREDUNDANT - Runs a preconditioner for the entire problem on subgroups of processors 467 468 Options for the redundant preconditioners can be set with -redundant_pc_xxx 469 470 Options Database: 471 . -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and 472 use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes. 473 474 Level: intermediate 475 476 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(), 477 PCRedundantGetPC(), PCRedundantGetOperators(), PCRedundantSetNumber() 478 M*/ 479 480 EXTERN_C_BEGIN 481 #undef __FUNCT__ 482 #define __FUNCT__ "PCCreate_Redundant" 483 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_Redundant(PC pc) 484 { 485 PetscErrorCode ierr; 486 PC_Redundant *red; 487 PetscMPIInt size; 488 489 PetscFunctionBegin; 490 ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr); 491 ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr); 492 red->nsubcomm = size; 493 red->useparallelmat = PETSC_TRUE; 494 pc->data = (void*)red; 495 496 pc->ops->apply = PCApply_Redundant; 497 pc->ops->applytranspose = 0; 498 pc->ops->setup = PCSetUp_Redundant; 499 pc->ops->destroy = PCDestroy_Redundant; 500 pc->ops->setfromoptions = PCSetFromOptions_Redundant; 501 pc->ops->view = PCView_Redundant; 502 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant", 503 PCRedundantSetScatter_Redundant);CHKERRQ(ierr); 504 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant", 505 PCRedundantSetNumber_Redundant);CHKERRQ(ierr); 506 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetPC_C","PCRedundantGetPC_Redundant", 507 PCRedundantGetPC_Redundant);CHKERRQ(ierr); 508 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant", 509 PCRedundantGetOperators_Redundant);CHKERRQ(ierr); 510 PetscFunctionReturn(0); 511 } 512 EXTERN_C_END 513