1 2 /* 3 This file defines a "solve the problem redistributely on each subgroup of processor" preconditioner. 4 */ 5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 6 #include <petscksp.h> 7 8 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink; 9 struct _PC_FieldSplitLink { 10 char *splitname; 11 IS is; 12 PC_FieldSplitLink next, previous; 13 }; 14 15 typedef struct { 16 KSP ksp; 17 Vec x, b; 18 VecScatter scatter; 19 IS is; 20 PetscInt dcnt, *drows; /* these are the local rows that have only diagonal entry */ 21 PetscScalar *diag; 22 Vec work; 23 PetscBool zerodiag; 24 25 PetscInt nsplits; 26 PC_FieldSplitLink splitlinks; 27 } PC_Redistribute; 28 29 static PetscErrorCode PCFieldSplitSetIS_Redistribute(PC pc, const char splitname[], IS is) 30 { 31 PC_Redistribute *red = (PC_Redistribute *)pc->data; 32 PC_FieldSplitLink *next = &red->splitlinks; 33 34 PetscFunctionBegin; 35 while (*next) next = &(*next)->next; 36 PetscCall(PetscNew(next)); 37 if (splitname) { 38 PetscCall(PetscStrallocpy(splitname, &(*next)->splitname)); 39 } else { 40 PetscCall(PetscMalloc1(8, &(*next)->splitname)); 41 PetscCall(PetscSNPrintf((*next)->splitname, 7, "%" PetscInt_FMT, red->nsplits++)); 42 } 43 PetscCall(PetscObjectReference((PetscObject)is)); 44 PetscCall(ISDestroy(&(*next)->is)); 45 (*next)->is = is; 46 PetscFunctionReturn(PETSC_SUCCESS); 47 } 48 49 static PetscErrorCode PCView_Redistribute(PC pc, PetscViewer viewer) 50 { 51 PC_Redistribute *red = (PC_Redistribute *)pc->data; 52 PetscBool iascii, isstring; 53 PetscInt ncnt, N; 54 55 PetscFunctionBegin; 56 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 57 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring)); 58 if (iascii) { 59 PetscCall(MPIU_Allreduce(&red->dcnt, &ncnt, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc))); 60 PetscCall(MatGetSize(pc->pmat, &N, NULL)); 61 PetscCall(PetscViewerASCIIPrintf(viewer, " Number rows eliminated %" PetscInt_FMT " Percentage rows eliminated %g\n", ncnt, (double)(100.0 * ((PetscReal)ncnt) / ((PetscReal)N)))); 62 PetscCall(PetscViewerASCIIPrintf(viewer, " Redistribute preconditioner: \n")); 63 PetscCall(KSPView(red->ksp, viewer)); 64 } else if (isstring) { 65 PetscCall(PetscViewerStringSPrintf(viewer, " Redistribute preconditioner")); 66 PetscCall(KSPView(red->ksp, viewer)); 67 } 68 PetscFunctionReturn(PETSC_SUCCESS); 69 } 70 71 static PetscErrorCode PCSetUp_Redistribute(PC pc) 72 { 73 PC_Redistribute *red = (PC_Redistribute *)pc->data; 74 MPI_Comm comm; 75 PetscInt rstart, rend, nrstart, nrend, i, nz, cnt, *rows, ncnt, dcnt, *drows; 76 PetscLayout map, nmap; 77 PetscMPIInt size, tag, n; 78 PETSC_UNUSED PetscMPIInt imdex; 79 PetscInt *source = NULL; 80 PetscMPIInt *sizes = NULL, nrecvs; 81 PetscInt j, nsends; 82 PetscInt *owner = NULL, *starts = NULL, count, slen; 83 PetscInt *rvalues, *svalues, recvtotal; 84 PetscMPIInt *onodes1, *olengths1; 85 MPI_Request *send_waits = NULL, *recv_waits = NULL; 86 MPI_Status recv_status, *send_status; 87 Vec tvec, diag; 88 Mat tmat; 89 const PetscScalar *d, *values; 90 const PetscInt *cols; 91 PC_FieldSplitLink *next = &red->splitlinks; 92 93 PetscFunctionBegin; 94 if (pc->setupcalled) { 95 PetscCheck(pc->flag == SAME_NONZERO_PATTERN, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PC is not supported for a change in the nonzero structure of the matrix"); 96 PetscCall(KSPGetOperators(red->ksp, NULL, &tmat)); 97 PetscCall(MatCreateSubMatrix(pc->pmat, red->is, red->is, MAT_REUSE_MATRIX, &tmat)); 98 PetscCall(KSPSetOperators(red->ksp, tmat, tmat)); 99 } else { 100 PetscInt NN; 101 PC ipc; 102 PetscVoidFunction fptr; 103 104 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 105 PetscCallMPI(MPI_Comm_size(comm, &size)); 106 PetscCall(PetscObjectGetNewTag((PetscObject)pc, &tag)); 107 108 /* count non-diagonal rows on process */ 109 PetscCall(MatGetOwnershipRange(pc->mat, &rstart, &rend)); 110 cnt = 0; 111 for (i = rstart; i < rend; i++) { 112 PetscCall(MatGetRow(pc->mat, i, &nz, &cols, &values)); 113 for (PetscInt j = 0; j < nz; j++) { 114 if (values[j] != 0 && cols[j] != i) { 115 cnt++; 116 break; 117 } 118 } 119 PetscCall(MatRestoreRow(pc->mat, i, &nz, &cols, &values)); 120 } 121 PetscCall(PetscMalloc1(cnt, &rows)); 122 PetscCall(PetscMalloc1(rend - rstart - cnt, &drows)); 123 124 /* list non-diagonal rows on process */ 125 cnt = 0; 126 dcnt = 0; 127 for (i = rstart; i < rend; i++) { 128 PetscBool diagonly = PETSC_TRUE; 129 PetscCall(MatGetRow(pc->mat, i, &nz, &cols, &values)); 130 for (PetscInt j = 0; j < nz; j++) { 131 if (values[j] != 0 && cols[j] != i) { 132 diagonly = PETSC_FALSE; 133 break; 134 } 135 } 136 if (!diagonly) rows[cnt++] = i; 137 else drows[dcnt++] = i - rstart; 138 PetscCall(MatRestoreRow(pc->mat, i, &nz, &cols, &values)); 139 } 140 141 /* create PetscLayout for non-diagonal rows on each process */ 142 PetscCall(PetscLayoutCreate(comm, &map)); 143 PetscCall(PetscLayoutSetLocalSize(map, cnt)); 144 PetscCall(PetscLayoutSetBlockSize(map, 1)); 145 PetscCall(PetscLayoutSetUp(map)); 146 nrstart = map->rstart; 147 nrend = map->rend; 148 149 /* create PetscLayout for load-balanced non-diagonal rows on each process */ 150 PetscCall(PetscLayoutCreate(comm, &nmap)); 151 PetscCall(MPIU_Allreduce(&cnt, &ncnt, 1, MPIU_INT, MPI_SUM, comm)); 152 PetscCall(PetscLayoutSetSize(nmap, ncnt)); 153 PetscCall(PetscLayoutSetBlockSize(nmap, 1)); 154 PetscCall(PetscLayoutSetUp(nmap)); 155 156 PetscCall(MatGetSize(pc->pmat, &NN, NULL)); 157 PetscCall(PetscInfo(pc, "Number of diagonal rows eliminated %" PetscInt_FMT ", percentage eliminated %g\n", NN - ncnt, (double)(((PetscReal)(NN - ncnt)) / ((PetscReal)(NN))))); 158 159 if (size > 1) { 160 /* 161 the following block of code assumes MPI can send messages to self, which is not supported for MPI-uni hence we need to handle 162 the size 1 case as a special case 163 164 this code is taken from VecScatterCreate_PtoS() 165 Determines what rows need to be moved where to 166 load balance the non-diagonal rows 167 */ 168 /* count number of contributors to each processor */ 169 PetscCall(PetscMalloc2(size, &sizes, cnt, &owner)); 170 PetscCall(PetscArrayzero(sizes, size)); 171 j = 0; 172 nsends = 0; 173 for (i = nrstart; i < nrend; i++) { 174 if (i < nmap->range[j]) j = 0; 175 for (; j < size; j++) { 176 if (i < nmap->range[j + 1]) { 177 if (!sizes[j]++) nsends++; 178 owner[i - nrstart] = j; 179 break; 180 } 181 } 182 } 183 /* inform other processors of number of messages and max length*/ 184 PetscCall(PetscGatherNumberOfMessages(comm, NULL, sizes, &nrecvs)); 185 PetscCall(PetscGatherMessageLengths(comm, nsends, nrecvs, sizes, &onodes1, &olengths1)); 186 PetscCall(PetscSortMPIIntWithArray(nrecvs, onodes1, olengths1)); 187 recvtotal = 0; 188 for (i = 0; i < nrecvs; i++) recvtotal += olengths1[i]; 189 190 /* post receives: rvalues - rows I will own; count - nu */ 191 PetscCall(PetscMalloc3(recvtotal, &rvalues, nrecvs, &source, nrecvs, &recv_waits)); 192 count = 0; 193 for (i = 0; i < nrecvs; i++) { 194 PetscCallMPI(MPI_Irecv((rvalues + count), olengths1[i], MPIU_INT, onodes1[i], tag, comm, recv_waits + i)); 195 count += olengths1[i]; 196 } 197 198 /* do sends: 199 1) starts[i] gives the starting index in svalues for stuff going to 200 the ith processor 201 */ 202 PetscCall(PetscMalloc3(cnt, &svalues, nsends, &send_waits, size, &starts)); 203 starts[0] = 0; 204 for (i = 1; i < size; i++) starts[i] = starts[i - 1] + sizes[i - 1]; 205 for (i = 0; i < cnt; i++) svalues[starts[owner[i]]++] = rows[i]; 206 for (i = 0; i < cnt; i++) rows[i] = rows[i] - nrstart; 207 red->drows = drows; 208 red->dcnt = dcnt; 209 PetscCall(PetscFree(rows)); 210 211 starts[0] = 0; 212 for (i = 1; i < size; i++) starts[i] = starts[i - 1] + sizes[i - 1]; 213 count = 0; 214 for (i = 0; i < size; i++) { 215 if (sizes[i]) PetscCallMPI(MPI_Isend(svalues + starts[i], sizes[i], MPIU_INT, i, tag, comm, send_waits + count++)); 216 } 217 218 /* wait on receives */ 219 count = nrecvs; 220 slen = 0; 221 while (count) { 222 PetscCallMPI(MPI_Waitany(nrecvs, recv_waits, &imdex, &recv_status)); 223 /* unpack receives into our local space */ 224 PetscCallMPI(MPI_Get_count(&recv_status, MPIU_INT, &n)); 225 slen += n; 226 count--; 227 } 228 PetscCheck(slen == recvtotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Total message lengths %" PetscInt_FMT " not expected %" PetscInt_FMT, slen, recvtotal); 229 PetscCall(ISCreateGeneral(comm, slen, rvalues, PETSC_COPY_VALUES, &red->is)); 230 231 /* free all work space */ 232 PetscCall(PetscFree(olengths1)); 233 PetscCall(PetscFree(onodes1)); 234 PetscCall(PetscFree3(rvalues, source, recv_waits)); 235 PetscCall(PetscFree2(sizes, owner)); 236 if (nsends) { /* wait on sends */ 237 PetscCall(PetscMalloc1(nsends, &send_status)); 238 PetscCallMPI(MPI_Waitall(nsends, send_waits, send_status)); 239 PetscCall(PetscFree(send_status)); 240 } 241 PetscCall(PetscFree3(svalues, send_waits, starts)); 242 } else { 243 PetscCall(ISCreateGeneral(comm, cnt, rows, PETSC_OWN_POINTER, &red->is)); 244 red->drows = drows; 245 red->dcnt = dcnt; 246 slen = cnt; 247 } 248 PetscCall(PetscLayoutDestroy(&map)); 249 250 PetscCall(VecCreateMPI(comm, slen, PETSC_DETERMINE, &red->b)); 251 PetscCall(VecDuplicate(red->b, &red->x)); 252 PetscCall(MatCreateVecs(pc->pmat, &tvec, NULL)); 253 PetscCall(VecScatterCreate(tvec, red->is, red->b, NULL, &red->scatter)); 254 255 /* Map the PCFIELDSPLIT fields to redistributed KSP */ 256 PetscCall(KSPGetPC(red->ksp, &ipc)); 257 PetscCall(PetscObjectQueryFunction((PetscObject)ipc, "PCFieldSplitSetIS_C", &fptr)); 258 if (fptr && *next) { 259 PetscScalar *atvec; 260 const PetscScalar *ab; 261 PetscInt primes[] = {2, 3, 5, 7, 11, 13, 17, 19}; 262 PetscInt cnt = 0; 263 264 PetscCheck(red->nsplits <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(primes), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No support for this many fields"); 265 PetscCall(VecSet(tvec, 1.0)); 266 PetscCall(VecGetArray(tvec, &atvec)); 267 268 while (*next) { 269 const PetscInt *indices; 270 PetscInt n; 271 272 PetscCall(ISGetIndices((*next)->is, &indices)); 273 PetscCall(ISGetLocalSize((*next)->is, &n)); 274 for (PetscInt i = 0; i < n; i++) atvec[indices[i] - rstart] *= primes[cnt]; 275 PetscCall(ISRestoreIndices((*next)->is, &indices)); 276 cnt++; 277 next = &(*next)->next; 278 } 279 PetscCall(VecRestoreArray(tvec, &atvec)); 280 PetscCall(VecScatterBegin(red->scatter, tvec, red->b, INSERT_VALUES, SCATTER_FORWARD)); 281 PetscCall(VecScatterEnd(red->scatter, tvec, red->b, INSERT_VALUES, SCATTER_FORWARD)); 282 cnt = 0; 283 PetscCall(VecGetArrayRead(red->b, &ab)); 284 next = &red->splitlinks; 285 while (*next) { 286 PetscInt n = 0; 287 PetscInt *indices; 288 IS ris; 289 290 for (PetscInt i = 0; i < nmap->rend - nmap->rstart; i++) { 291 if (!(((PetscInt)PetscRealPart(ab[i])) % primes[cnt])) n++; 292 } 293 PetscCall(PetscMalloc1(n, &indices)); 294 n = 0; 295 for (PetscInt i = 0; i < nmap->rend - nmap->rstart; i++) { 296 if (!(((PetscInt)PetscRealPart(ab[i])) % primes[cnt])) indices[n++] = i + nmap->rstart; 297 } 298 PetscCall(ISCreateGeneral(comm, n, indices, PETSC_OWN_POINTER, &ris)); 299 PetscCall(PCFieldSplitSetIS(ipc, (*next)->splitname, ris)); 300 301 PetscCall(ISDestroy(&ris)); 302 cnt++; 303 next = &(*next)->next; 304 } 305 PetscCall(VecRestoreArrayRead(red->b, &ab)); 306 } 307 PetscCall(VecDestroy(&tvec)); 308 PetscCall(MatCreateSubMatrix(pc->pmat, red->is, red->is, MAT_INITIAL_MATRIX, &tmat)); 309 PetscCall(KSPSetOperators(red->ksp, tmat, tmat)); 310 PetscCall(MatDestroy(&tmat)); 311 PetscCall(PetscLayoutDestroy(&nmap)); 312 } 313 314 /* get diagonal portion of matrix */ 315 PetscCall(PetscFree(red->diag)); 316 PetscCall(PetscMalloc1(red->dcnt, &red->diag)); 317 PetscCall(MatCreateVecs(pc->pmat, &diag, NULL)); 318 PetscCall(MatGetDiagonal(pc->pmat, diag)); 319 PetscCall(VecGetArrayRead(diag, &d)); 320 for (i = 0; i < red->dcnt; i++) { 321 if (d[red->drows[i]] != 0) red->diag[i] = 1.0 / d[red->drows[i]]; 322 else { 323 red->zerodiag = PETSC_TRUE; 324 red->diag[i] = 0.0; 325 } 326 } 327 PetscCall(VecRestoreArrayRead(diag, &d)); 328 PetscCall(VecDestroy(&diag)); 329 PetscCall(KSPSetUp(red->ksp)); 330 PetscFunctionReturn(PETSC_SUCCESS); 331 } 332 333 static PetscErrorCode PCApply_Redistribute(PC pc, Vec b, Vec x) 334 { 335 PC_Redistribute *red = (PC_Redistribute *)pc->data; 336 PetscInt dcnt = red->dcnt, i; 337 const PetscInt *drows = red->drows; 338 PetscScalar *xwork; 339 const PetscScalar *bwork, *diag = red->diag; 340 PetscBool nonzero_guess; 341 342 PetscFunctionBegin; 343 if (!red->work) PetscCall(VecDuplicate(b, &red->work)); 344 PetscCall(KSPGetInitialGuessNonzero(red->ksp, &nonzero_guess)); 345 if (nonzero_guess) { 346 PetscCall(VecScatterBegin(red->scatter, x, red->x, INSERT_VALUES, SCATTER_FORWARD)); 347 PetscCall(VecScatterEnd(red->scatter, x, red->x, INSERT_VALUES, SCATTER_FORWARD)); 348 } 349 350 /* compute the rows of solution that have diagonal entries only */ 351 PetscCall(VecSet(x, 0.0)); /* x = diag(A)^{-1} b */ 352 PetscCall(VecGetArray(x, &xwork)); 353 PetscCall(VecGetArrayRead(b, &bwork)); 354 if (red->zerodiag) { 355 for (i = 0; i < dcnt; i++) { 356 if (diag[i] == 0.0 && bwork[drows[i]] != 0.0) { 357 PetscCheck(!pc->erroriffailure, PETSC_COMM_SELF, PETSC_ERR_CONV_FAILED, "Linear system is inconsistent, zero matrix row but nonzero right hand side"); 358 PetscCall(PetscInfo(pc, "Linear system is inconsistent, zero matrix row but nonzero right hand side\n")); 359 PetscCall(VecSetInf(x)); 360 pc->failedreasonrank = PC_INCONSISTENT_RHS; 361 } 362 } 363 } 364 for (i = 0; i < dcnt; i++) xwork[drows[i]] = diag[i] * bwork[drows[i]]; 365 PetscCall(PetscLogFlops(dcnt)); 366 PetscCall(VecRestoreArray(red->work, &xwork)); 367 PetscCall(VecRestoreArrayRead(b, &bwork)); 368 /* update the right hand side for the reduced system with diagonal rows (and corresponding columns) removed */ 369 PetscCall(MatMult(pc->pmat, x, red->work)); 370 PetscCall(VecAYPX(red->work, -1.0, b)); /* red->work = b - A x */ 371 372 PetscCall(VecScatterBegin(red->scatter, red->work, red->b, INSERT_VALUES, SCATTER_FORWARD)); 373 PetscCall(VecScatterEnd(red->scatter, red->work, red->b, INSERT_VALUES, SCATTER_FORWARD)); 374 PetscCall(KSPSolve(red->ksp, red->b, red->x)); 375 PetscCall(KSPCheckSolve(red->ksp, pc, red->x)); 376 PetscCall(VecScatterBegin(red->scatter, red->x, x, INSERT_VALUES, SCATTER_REVERSE)); 377 PetscCall(VecScatterEnd(red->scatter, red->x, x, INSERT_VALUES, SCATTER_REVERSE)); 378 PetscFunctionReturn(PETSC_SUCCESS); 379 } 380 381 static PetscErrorCode PCDestroy_Redistribute(PC pc) 382 { 383 PC_Redistribute *red = (PC_Redistribute *)pc->data; 384 PC_FieldSplitLink next = red->splitlinks; 385 386 PetscFunctionBegin; 387 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL)); 388 389 while (next) { 390 PC_FieldSplitLink ilink; 391 PetscCall(PetscFree(next->splitname)); 392 PetscCall(ISDestroy(&next->is)); 393 ilink = next; 394 next = next->next; 395 PetscCall(PetscFree(ilink)); 396 } 397 PetscCall(VecScatterDestroy(&red->scatter)); 398 PetscCall(ISDestroy(&red->is)); 399 PetscCall(VecDestroy(&red->b)); 400 PetscCall(VecDestroy(&red->x)); 401 PetscCall(KSPDestroy(&red->ksp)); 402 PetscCall(VecDestroy(&red->work)); 403 PetscCall(PetscFree(red->drows)); 404 PetscCall(PetscFree(red->diag)); 405 PetscCall(PetscFree(pc->data)); 406 PetscFunctionReturn(PETSC_SUCCESS); 407 } 408 409 static PetscErrorCode PCSetFromOptions_Redistribute(PC pc, PetscOptionItems *PetscOptionsObject) 410 { 411 PC_Redistribute *red = (PC_Redistribute *)pc->data; 412 413 PetscFunctionBegin; 414 PetscCall(KSPSetFromOptions(red->ksp)); 415 PetscFunctionReturn(PETSC_SUCCESS); 416 } 417 418 /*@ 419 PCRedistributeGetKSP - Gets the `KSP` created by the `PCREDISTRIBUTE` 420 421 Not Collective 422 423 Input Parameter: 424 . pc - the preconditioner context 425 426 Output Parameter: 427 . innerksp - the inner `KSP` 428 429 Level: advanced 430 431 .seealso: `KSP`, `PCREDISTRIBUTE` 432 @*/ 433 PetscErrorCode PCRedistributeGetKSP(PC pc, KSP *innerksp) 434 { 435 PC_Redistribute *red = (PC_Redistribute *)pc->data; 436 437 PetscFunctionBegin; 438 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 439 PetscAssertPointer(innerksp, 2); 440 *innerksp = red->ksp; 441 PetscFunctionReturn(PETSC_SUCCESS); 442 } 443 444 /*MC 445 PCREDISTRIBUTE - Redistributes a matrix for load balancing, removing the rows (and the corresponding columns) that only have a diagonal entry and then 446 applies a `KSP` to that new smaller matrix 447 448 Level: intermediate 449 450 Notes: 451 Options for the redistribute `KSP` and `PC` with the options database prefix -redistribute_ 452 453 Usually run this with `-ksp_type preonly` 454 455 If you have used `MatZeroRows()` to eliminate (for example, Dirichlet) boundary conditions for a symmetric problem then you can use, for example, `-ksp_type preonly 456 -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc` to take advantage of the symmetry. 457 458 Supports the function `PCFieldSplitSetIS()`; passs the appropriate reduced field indices to an inner `PCFIELDSPLIT`, set with, for example 459 `-ksp_type preonly -pc_type redistribute -redistribute_pc_type fieldsplit. Does not support the `PCFIELDSPLIT` options database keys. 460 461 This does NOT call a partitioner to reorder rows to lower communication; the ordering of the rows in the original matrix and redistributed matrix is the same. Rows are moved 462 between MPI processes inside the preconditioner to balance the number of rows on each process. 463 464 Developer Note: 465 Should add an option to this preconditioner to use a partitioner to redistribute the rows to lower communication. 466 467 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PCRedistributeGetKSP()`, `MatZeroRows()`, `PCFieldSplitSetIS()`, `PCFIELDSPLIT` 468 M*/ 469 470 PETSC_EXTERN PetscErrorCode PCCreate_Redistribute(PC pc) 471 { 472 PC_Redistribute *red; 473 const char *prefix; 474 475 PetscFunctionBegin; 476 PetscCall(PetscNew(&red)); 477 pc->data = (void *)red; 478 479 pc->ops->apply = PCApply_Redistribute; 480 pc->ops->applytranspose = NULL; 481 pc->ops->setup = PCSetUp_Redistribute; 482 pc->ops->destroy = PCDestroy_Redistribute; 483 pc->ops->setfromoptions = PCSetFromOptions_Redistribute; 484 pc->ops->view = PCView_Redistribute; 485 486 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &red->ksp)); 487 PetscCall(KSPSetNestLevel(red->ksp, pc->kspnestlevel)); 488 PetscCall(KSPSetErrorIfNotConverged(red->ksp, pc->erroriffailure)); 489 PetscCall(PetscObjectIncrementTabLevel((PetscObject)red->ksp, (PetscObject)pc, 1)); 490 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 491 PetscCall(KSPSetOptionsPrefix(red->ksp, prefix)); 492 PetscCall(KSPAppendOptionsPrefix(red->ksp, "redistribute_")); 493 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_Redistribute)); 494 PetscFunctionReturn(PETSC_SUCCESS); 495 } 496