1 2 /* 3 This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner. 4 */ 5 #include <petsc-private/pcimpl.h> /*I "petscpc.h" I*/ 6 #include <petscksp.h> /*I "petscksp.h" I*/ 7 8 typedef struct { 9 KSP ksp; 10 PC pc; /* actual preconditioner used on each processor */ 11 Vec xsub,ysub; /* vectors of a subcommunicator to hold parallel vectors of ((PetscObject)pc)->comm */ 12 Vec xdup,ydup; /* parallel vector that congregates xsub or ysub facilitating vector scattering */ 13 Mat pmats; /* matrix and optional preconditioner matrix belong to a subcommunicator */ 14 VecScatter scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */ 15 PetscBool useparallelmat; 16 PetscSubcomm psubcomm; 17 PetscInt nsubcomm; /* num of data structure PetscSubcomm */ 18 } PC_Redundant; 19 20 #undef __FUNCT__ 21 #define __FUNCT__ "PCView_Redundant" 22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer) 23 { 24 PC_Redundant *red = (PC_Redundant*)pc->data; 25 PetscErrorCode ierr; 26 PetscBool iascii,isstring; 27 PetscViewer subviewer; 28 29 PetscFunctionBegin; 30 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 31 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 32 if (iascii) { 33 if (!red->psubcomm) { 34 ierr = PetscViewerASCIIPrintf(viewer," Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr); 35 } else { 36 ierr = PetscViewerASCIIPrintf(viewer," Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr); 37 ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr); 38 if (!red->psubcomm->color) { /* only view first redundant pc */ 39 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 40 ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr); 41 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 42 } 43 ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr); 44 } 45 } else if (isstring) { 46 ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr); 47 } else { 48 SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name); 49 } 50 PetscFunctionReturn(0); 51 } 52 53 #undef __FUNCT__ 54 #define __FUNCT__ "PCSetUp_Redundant" 55 static PetscErrorCode PCSetUp_Redundant(PC pc) 56 { 57 PC_Redundant *red = (PC_Redundant*)pc->data; 58 PetscErrorCode ierr; 59 PetscInt mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub; 60 PetscMPIInt size; 61 MatReuse reuse = MAT_INITIAL_MATRIX; 62 MatStructure str = DIFFERENT_NONZERO_PATTERN; 63 MPI_Comm comm = ((PetscObject)pc)->comm,subcomm; 64 Vec vec; 65 PetscMPIInt subsize,subrank; 66 const char *prefix; 67 const PetscInt *range; 68 69 PetscFunctionBegin; 70 ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr); 71 ierr = VecGetSize(vec,&m);CHKERRQ(ierr); 72 73 if (!pc->setupcalled) { 74 if (!red->psubcomm) { 75 ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr); 76 ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr); 77 ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr); 78 ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr); 79 80 /* create a new PC that processors in each subcomm have copy of */ 81 subcomm = red->psubcomm->comm; 82 ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr); 83 ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr); 84 ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr); 85 ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr); 86 ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr); 87 ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr); 88 89 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 90 ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr); 91 ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr); 92 } else { 93 subcomm = red->psubcomm->comm; 94 } 95 96 /* create working vectors xsub/ysub and xdup/ydup */ 97 ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr); 98 ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr); 99 100 /* get local size of xsub/ysub */ 101 ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 102 ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 103 ierr = MatGetOwnershipRanges(pc->pmat,&range);CHKERRQ(ierr); 104 rstart_sub = range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */ 105 if (subrank+1 < subsize){ 106 rend_sub = range[red->psubcomm->n*(subrank+1)]; 107 } else { 108 rend_sub = m; 109 } 110 mloc_sub = rend_sub - rstart_sub; 111 ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr); 112 /* create xsub with empty local arrays, because xdup's arrays will be placed into it */ 113 ierr = VecCreateMPIWithArray(subcomm,1,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr); 114 115 /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it. 116 Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */ 117 ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr); 118 ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,1,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr); 119 120 /* create vec scatters */ 121 if (!red->scatterin){ 122 IS is1,is2; 123 PetscInt *idx1,*idx2,i,j,k; 124 125 ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr); 126 j = 0; 127 for (k=0; k<red->psubcomm->n; k++){ 128 for (i=mstart; i<mend; i++){ 129 idx1[j] = i; 130 idx2[j++] = i + m*k; 131 } 132 } 133 ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr); 134 ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr); 135 ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr); 136 ierr = ISDestroy(&is1);CHKERRQ(ierr); 137 ierr = ISDestroy(&is2);CHKERRQ(ierr); 138 139 ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr); 140 ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr); 141 ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr); 142 ierr = ISDestroy(&is1);CHKERRQ(ierr); 143 ierr = ISDestroy(&is2);CHKERRQ(ierr); 144 ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr); 145 } 146 } 147 ierr = VecDestroy(&vec);CHKERRQ(ierr); 148 149 /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */ 150 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 151 if (size == 1) { 152 red->useparallelmat = PETSC_FALSE; 153 } 154 155 if (red->useparallelmat) { 156 if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) { 157 /* destroy old matrices */ 158 ierr = MatDestroy(&red->pmats);CHKERRQ(ierr); 159 } else if (pc->setupcalled == 1) { 160 reuse = MAT_REUSE_MATRIX; 161 str = SAME_NONZERO_PATTERN; 162 } 163 164 /* grab the parallel matrix and put it into processors of a subcomminicator */ 165 /*--------------------------------------------------------------------------*/ 166 ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr); 167 ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr); 168 /* tell PC of the subcommunicator its operators */ 169 ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr); 170 } else { 171 ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr); 172 } 173 if (pc->setfromoptionscalled){ 174 ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr); 175 } 176 ierr = KSPSetUp(red->ksp);CHKERRQ(ierr); 177 PetscFunctionReturn(0); 178 } 179 180 #undef __FUNCT__ 181 #define __FUNCT__ "PCApply_Redundant" 182 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y) 183 { 184 PC_Redundant *red = (PC_Redundant*)pc->data; 185 PetscErrorCode ierr; 186 PetscScalar *array; 187 188 PetscFunctionBegin; 189 /* scatter x to xdup */ 190 ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 191 ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 192 193 /* place xdup's local array into xsub */ 194 ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr); 195 ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr); 196 197 /* apply preconditioner on each processor */ 198 ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr); 199 ierr = VecResetArray(red->xsub);CHKERRQ(ierr); 200 ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr); 201 202 /* place ysub's local array into ydup */ 203 ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr); 204 ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr); 205 206 /* scatter ydup to y */ 207 ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 208 ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 209 ierr = VecResetArray(red->ydup);CHKERRQ(ierr); 210 ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr); 211 PetscFunctionReturn(0); 212 } 213 214 #undef __FUNCT__ 215 #define __FUNCT__ "PCReset_Redundant" 216 static PetscErrorCode PCReset_Redundant(PC pc) 217 { 218 PC_Redundant *red = (PC_Redundant*)pc->data; 219 PetscErrorCode ierr; 220 221 PetscFunctionBegin; 222 ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr); 223 ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr); 224 ierr = VecDestroy(&red->ysub);CHKERRQ(ierr); 225 ierr = VecDestroy(&red->xsub);CHKERRQ(ierr); 226 ierr = VecDestroy(&red->xdup);CHKERRQ(ierr); 227 ierr = VecDestroy(&red->ydup);CHKERRQ(ierr); 228 ierr = MatDestroy(&red->pmats);CHKERRQ(ierr); 229 if (red->ksp) {ierr = KSPReset(red->ksp);CHKERRQ(ierr);} 230 PetscFunctionReturn(0); 231 } 232 233 #undef __FUNCT__ 234 #define __FUNCT__ "PCDestroy_Redundant" 235 static PetscErrorCode PCDestroy_Redundant(PC pc) 236 { 237 PC_Redundant *red = (PC_Redundant*)pc->data; 238 PetscErrorCode ierr; 239 240 PetscFunctionBegin; 241 ierr = PCReset_Redundant(pc);CHKERRQ(ierr); 242 ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr); 243 ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr); 244 ierr = PetscFree(pc->data);CHKERRQ(ierr); 245 PetscFunctionReturn(0); 246 } 247 248 #undef __FUNCT__ 249 #define __FUNCT__ "PCSetFromOptions_Redundant" 250 static PetscErrorCode PCSetFromOptions_Redundant(PC pc) 251 { 252 PetscErrorCode ierr; 253 PC_Redundant *red = (PC_Redundant*)pc->data; 254 255 PetscFunctionBegin; 256 ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr); 257 ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr); 258 ierr = PetscOptionsTail();CHKERRQ(ierr); 259 PetscFunctionReturn(0); 260 } 261 262 EXTERN_C_BEGIN 263 #undef __FUNCT__ 264 #define __FUNCT__ "PCRedundantSetNumber_Redundant" 265 PetscErrorCode PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds) 266 { 267 PC_Redundant *red = (PC_Redundant*)pc->data; 268 269 PetscFunctionBegin; 270 red->nsubcomm = nreds; 271 PetscFunctionReturn(0); 272 } 273 EXTERN_C_END 274 275 #undef __FUNCT__ 276 #define __FUNCT__ "PCRedundantSetNumber" 277 /*@ 278 PCRedundantSetNumber - Sets the number of redundant preconditioner contexts. 279 280 Logically Collective on PC 281 282 Input Parameters: 283 + pc - the preconditioner context 284 - nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and 285 use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes. 286 287 Level: advanced 288 289 .keywords: PC, redundant solve 290 @*/ 291 PetscErrorCode PCRedundantSetNumber(PC pc,PetscInt nredundant) 292 { 293 PetscErrorCode ierr; 294 295 PetscFunctionBegin; 296 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 297 if (nredundant <= 0) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant); 298 ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr); 299 PetscFunctionReturn(0); 300 } 301 302 EXTERN_C_BEGIN 303 #undef __FUNCT__ 304 #define __FUNCT__ "PCRedundantSetScatter_Redundant" 305 PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out) 306 { 307 PC_Redundant *red = (PC_Redundant*)pc->data; 308 PetscErrorCode ierr; 309 310 PetscFunctionBegin; 311 ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr); 312 ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr); 313 red->scatterin = in; 314 ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr); 315 ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr); 316 red->scatterout = out; 317 PetscFunctionReturn(0); 318 } 319 EXTERN_C_END 320 321 #undef __FUNCT__ 322 #define __FUNCT__ "PCRedundantSetScatter" 323 /*@ 324 PCRedundantSetScatter - Sets the scatter used to copy values into the 325 redundant local solve and the scatter to move them back into the global 326 vector. 327 328 Logically Collective on PC 329 330 Input Parameters: 331 + pc - the preconditioner context 332 . in - the scatter to move the values in 333 - out - the scatter to move them out 334 335 Level: advanced 336 337 .keywords: PC, redundant solve 338 @*/ 339 PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out) 340 { 341 PetscErrorCode ierr; 342 343 PetscFunctionBegin; 344 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 345 PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2); 346 PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3); 347 ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr); 348 PetscFunctionReturn(0); 349 } 350 351 EXTERN_C_BEGIN 352 #undef __FUNCT__ 353 #define __FUNCT__ "PCRedundantGetKSP_Redundant" 354 PetscErrorCode PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp) 355 { 356 PetscErrorCode ierr; 357 PC_Redundant *red = (PC_Redundant*)pc->data; 358 MPI_Comm comm,subcomm; 359 const char *prefix; 360 361 PetscFunctionBegin; 362 if (!red->psubcomm) { 363 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 364 ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr); 365 ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr); 366 ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr); 367 ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr); 368 369 /* create a new PC that processors in each subcomm have copy of */ 370 subcomm = red->psubcomm->comm; 371 ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr); 372 ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr); 373 ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr); 374 ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr); 375 ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr); 376 ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr); 377 378 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 379 ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr); 380 ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr); 381 } 382 *innerksp = red->ksp; 383 PetscFunctionReturn(0); 384 } 385 EXTERN_C_END 386 387 #undef __FUNCT__ 388 #define __FUNCT__ "PCRedundantGetKSP" 389 /*@ 390 PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC. 391 392 Not Collective 393 394 Input Parameter: 395 . pc - the preconditioner context 396 397 Output Parameter: 398 . innerksp - the KSP on the smaller set of processes 399 400 Level: advanced 401 402 .keywords: PC, redundant solve 403 @*/ 404 PetscErrorCode PCRedundantGetKSP(PC pc,KSP *innerksp) 405 { 406 PetscErrorCode ierr; 407 408 PetscFunctionBegin; 409 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 410 PetscValidPointer(innerksp,2); 411 ierr = PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr); 412 PetscFunctionReturn(0); 413 } 414 415 EXTERN_C_BEGIN 416 #undef __FUNCT__ 417 #define __FUNCT__ "PCRedundantGetOperators_Redundant" 418 PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat) 419 { 420 PC_Redundant *red = (PC_Redundant*)pc->data; 421 422 PetscFunctionBegin; 423 if (mat) *mat = red->pmats; 424 if (pmat) *pmat = red->pmats; 425 PetscFunctionReturn(0); 426 } 427 EXTERN_C_END 428 429 #undef __FUNCT__ 430 #define __FUNCT__ "PCRedundantGetOperators" 431 /*@ 432 PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix 433 434 Not Collective 435 436 Input Parameter: 437 . pc - the preconditioner context 438 439 Output Parameters: 440 + mat - the matrix 441 - pmat - the (possibly different) preconditioner matrix 442 443 Level: advanced 444 445 .keywords: PC, redundant solve 446 @*/ 447 PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat) 448 { 449 PetscErrorCode ierr; 450 451 PetscFunctionBegin; 452 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 453 if (mat) PetscValidPointer(mat,2); 454 if (pmat) PetscValidPointer(pmat,3); 455 ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr); 456 PetscFunctionReturn(0); 457 } 458 459 /* -------------------------------------------------------------------------------------*/ 460 /*MC 461 PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors 462 463 Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx 464 465 Options Database: 466 . -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and 467 use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes. 468 469 Level: intermediate 470 471 Notes: The default KSP is preonly and the default PC is LU. 472 473 Developer Notes: Note that PCSetInitialGuessNonzero() is not used by this class but likely should be. 474 475 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(), 476 PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber() 477 M*/ 478 479 EXTERN_C_BEGIN 480 #undef __FUNCT__ 481 #define __FUNCT__ "PCCreate_Redundant" 482 PetscErrorCode PCCreate_Redundant(PC pc) 483 { 484 PetscErrorCode ierr; 485 PC_Redundant *red; 486 PetscMPIInt size; 487 488 PetscFunctionBegin; 489 ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr); 490 ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr); 491 red->nsubcomm = size; 492 red->useparallelmat = PETSC_TRUE; 493 pc->data = (void*)red; 494 495 pc->ops->apply = PCApply_Redundant; 496 pc->ops->applytranspose = 0; 497 pc->ops->setup = PCSetUp_Redundant; 498 pc->ops->destroy = PCDestroy_Redundant; 499 pc->ops->reset = PCReset_Redundant; 500 pc->ops->setfromoptions = PCSetFromOptions_Redundant; 501 pc->ops->view = PCView_Redundant; 502 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant", 503 PCRedundantSetScatter_Redundant);CHKERRQ(ierr); 504 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant", 505 PCRedundantSetNumber_Redundant);CHKERRQ(ierr); 506 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetKSP_C","PCRedundantGetKSP_Redundant", 507 PCRedundantGetKSP_Redundant);CHKERRQ(ierr); 508 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant", 509 PCRedundantGetOperators_Redundant);CHKERRQ(ierr); 510 PetscFunctionReturn(0); 511 } 512 EXTERN_C_END 513