xref: /petsc/src/ksp/pc/impls/mpi/pcmpi.c (revision dad3da8e34286fb6ce90bb426d531ecdf48a854b)
1f1f2ae84SBarry Smith /*
2f1f2ae84SBarry Smith     This file creates an MPI parallel KSP from a sequential PC that lives on MPI rank 0.
3f1f2ae84SBarry Smith     It is intended to allow using PETSc MPI parallel linear solvers from non-MPI codes.
4f1f2ae84SBarry Smith 
5f1f2ae84SBarry Smith     That program may use OpenMP to compute the right hand side and matrix for the linear system
6f1f2ae84SBarry Smith 
7f1f2ae84SBarry Smith     The code uses MPI_COMM_WORLD below but maybe it should be PETSC_COMM_WORLD
8f1f2ae84SBarry Smith 
9f1f2ae84SBarry Smith     The resulting KSP and PC can only be controlled via the options database, though some common commands
10f1f2ae84SBarry Smith     could be passed through the server.
11f1f2ae84SBarry Smith 
12f1f2ae84SBarry Smith */
13f1f2ae84SBarry Smith #include <petsc/private/pcimpl.h>
14f1f2ae84SBarry Smith #include <petsc/private/kspimpl.h>
15f1f2ae84SBarry Smith 
16f1f2ae84SBarry Smith #define PC_MPI_MAX_RANKS  256
17f1f2ae84SBarry Smith #define PC_MPI_COMM_WORLD MPI_COMM_WORLD
18f1f2ae84SBarry Smith 
19f1f2ae84SBarry Smith typedef struct {
20f1f2ae84SBarry Smith   KSP         ksps[PC_MPI_MAX_RANKS];                               /* The addresses of the MPI parallel KSP on each rank, NULL when not on a rank. */
21f1f2ae84SBarry Smith   PetscMPIInt sendcount[PC_MPI_MAX_RANKS], displ[PC_MPI_MAX_RANKS]; /* For scatter/gather of rhs/solution */
22f1f2ae84SBarry Smith   PetscMPIInt NZ[PC_MPI_MAX_RANKS], NZdispl[PC_MPI_MAX_RANKS];      /* For scatter of nonzero values in matrix (and nonzero column indices initially */
23f1f2ae84SBarry Smith   PetscInt    mincntperrank;                                        /* minimum number of desired nonzeros per active rank in MPI parallel KSP solve */
24f1f2ae84SBarry Smith   PetscBool   alwaysuseserver;                                      /* for debugging use the server infrastructure even if only one MPI rank is used for the solve */
25f1f2ae84SBarry Smith } PC_MPI;
26f1f2ae84SBarry Smith 
279371c9d4SSatish Balay typedef enum {
289371c9d4SSatish Balay   PCMPI_EXIT, /* exit the PC server loop, means the controlling sequential program is done */
29f1f2ae84SBarry Smith   PCMPI_CREATE,
30f1f2ae84SBarry Smith   PCMPI_SET_MAT,           /* set original matrix (or one with different nonzero pattern) */
31f1f2ae84SBarry Smith   PCMPI_UPDATE_MAT_VALUES, /* update current matrix with new nonzero values */
32f1f2ae84SBarry Smith   PCMPI_SOLVE,
33f1f2ae84SBarry Smith   PCMPI_VIEW,
34f1f2ae84SBarry Smith   PCMPI_DESTROY /* destroy a KSP that is no longer needed */
35f1f2ae84SBarry Smith } PCMPICommand;
36f1f2ae84SBarry Smith 
37f1f2ae84SBarry Smith static MPI_Comm  PCMPIComms[PC_MPI_MAX_RANKS];
38f1f2ae84SBarry Smith static PetscBool PCMPICommSet = PETSC_FALSE;
39f1f2ae84SBarry Smith static PetscInt  PCMPISolveCounts[PC_MPI_MAX_RANKS], PCMPIKSPCounts[PC_MPI_MAX_RANKS], PCMPIMatCounts[PC_MPI_MAX_RANKS], PCMPISolveCountsSeq = 0, PCMPIKSPCountsSeq = 0;
40f1f2ae84SBarry Smith 
419371c9d4SSatish Balay static PetscErrorCode PCMPICommsCreate(void) {
42f1f2ae84SBarry Smith   MPI_Comm    comm = PC_MPI_COMM_WORLD;
43f1f2ae84SBarry Smith   PetscMPIInt size, rank, i;
44f1f2ae84SBarry Smith 
45f1f2ae84SBarry Smith   PetscFunctionBegin;
46f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_size(comm, &size));
47f1f2ae84SBarry Smith   PetscCheck(size <= PC_MPI_MAX_RANKS, PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for using more than PC_MPI_MAX_RANKS MPI ranks in an MPI linear solver server solve");
48f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_rank(comm, &rank));
49f1f2ae84SBarry Smith   /* comm for size 1 is useful only for debugging */
50f1f2ae84SBarry Smith   for (i = 0; i < size; i++) {
51f1f2ae84SBarry Smith     PetscMPIInt color = rank < i + 1 ? 0 : MPI_UNDEFINED;
52f1f2ae84SBarry Smith     PetscCallMPI(MPI_Comm_split(comm, color, 0, &PCMPIComms[i]));
53f1f2ae84SBarry Smith     PCMPISolveCounts[i] = 0;
54f1f2ae84SBarry Smith     PCMPIKSPCounts[i]   = 0;
55f1f2ae84SBarry Smith   }
56f1f2ae84SBarry Smith   PCMPICommSet = PETSC_TRUE;
57f1f2ae84SBarry Smith   PetscFunctionReturn(0);
58f1f2ae84SBarry Smith }
59f1f2ae84SBarry Smith 
609371c9d4SSatish Balay PetscErrorCode PCMPICommsDestroy(void) {
61f1f2ae84SBarry Smith   MPI_Comm    comm = PC_MPI_COMM_WORLD;
62f1f2ae84SBarry Smith   PetscMPIInt size, rank, i;
63f1f2ae84SBarry Smith 
64f1f2ae84SBarry Smith   PetscFunctionBegin;
65f1f2ae84SBarry Smith   if (!PCMPICommSet) PetscFunctionReturn(0);
66f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_size(comm, &size));
67f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_rank(comm, &rank));
68f1f2ae84SBarry Smith   for (i = 0; i < size; i++) {
69f1f2ae84SBarry Smith     if (PCMPIComms[i] != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_free(&PCMPIComms[i]));
70f1f2ae84SBarry Smith   }
71f1f2ae84SBarry Smith   PCMPICommSet = PETSC_FALSE;
72f1f2ae84SBarry Smith   PetscFunctionReturn(0);
73f1f2ae84SBarry Smith }
74f1f2ae84SBarry Smith 
759371c9d4SSatish Balay static PetscErrorCode PCMPICreate(PC pc) {
76f1f2ae84SBarry Smith   PC_MPI     *km   = pc ? (PC_MPI *)pc->data : NULL;
77f1f2ae84SBarry Smith   MPI_Comm    comm = PC_MPI_COMM_WORLD;
78f1f2ae84SBarry Smith   KSP         ksp;
79f1f2ae84SBarry Smith   PetscInt    N[2], mincntperrank = 0;
80f1f2ae84SBarry Smith   PetscMPIInt size;
81f1f2ae84SBarry Smith   Mat         sA;
82f1f2ae84SBarry Smith   char       *prefix;
83f1f2ae84SBarry Smith   PetscMPIInt len = 0;
84f1f2ae84SBarry Smith 
85f1f2ae84SBarry Smith   PetscFunctionBegin;
86f1f2ae84SBarry Smith   if (!PCMPICommSet) PetscCall(PCMPICommsCreate());
87f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_size(comm, &size));
88f1f2ae84SBarry Smith   if (pc) {
89f1f2ae84SBarry Smith     if (size == 1) PetscCall(PetscPrintf(PETSC_COMM_SELF, "Warning: Running KSP type of MPI on a one rank MPI run, this will be less efficient then not using this type\n"));
90f1f2ae84SBarry Smith     PetscCall(PCGetOperators(pc, &sA, &sA));
91f1f2ae84SBarry Smith     PetscCall(MatGetSize(sA, &N[0], &N[1]));
92f1f2ae84SBarry Smith   }
93f1f2ae84SBarry Smith   PetscCallMPI(MPI_Bcast(N, 2, MPIU_INT, 0, comm));
94f1f2ae84SBarry Smith 
95f1f2ae84SBarry Smith   /* choose a suitable sized MPI_Comm for the problem to be solved on */
96f1f2ae84SBarry Smith   if (km) mincntperrank = km->mincntperrank;
97f1f2ae84SBarry Smith   PetscCallMPI(MPI_Bcast(&mincntperrank, 1, MPI_INT, 0, comm));
98f1f2ae84SBarry Smith   comm = PCMPIComms[PetscMin(size, PetscMax(1, N[0] / mincntperrank)) - 1];
99f1f2ae84SBarry Smith   if (comm == MPI_COMM_NULL) {
100f1f2ae84SBarry Smith     ksp = NULL;
101f1f2ae84SBarry Smith     PetscFunctionReturn(0);
102f1f2ae84SBarry Smith   }
103f1f2ae84SBarry Smith   PetscCall(KSPCreate(comm, &ksp));
104f1f2ae84SBarry Smith   PetscCallMPI(MPI_Gather(&ksp, 1, MPI_AINT, pc ? km->ksps : NULL, 1, MPI_AINT, 0, comm));
105f1f2ae84SBarry Smith   if (pc) {
106f1f2ae84SBarry Smith     size_t slen;
107f1f2ae84SBarry Smith 
108f1f2ae84SBarry Smith     PetscCallMPI(MPI_Comm_size(comm, &size));
109*dad3da8eSBarry Smith     PCMPIKSPCounts[size - 1]++;
110f1f2ae84SBarry Smith     PetscCall(PCGetOptionsPrefix(pc, (const char **)&prefix));
111f1f2ae84SBarry Smith     PetscCall(PetscStrlen(prefix, &slen));
112f1f2ae84SBarry Smith     len = (PetscMPIInt)slen;
113f1f2ae84SBarry Smith   }
114f1f2ae84SBarry Smith   PetscCallMPI(MPI_Bcast(&len, 1, MPI_INT, 0, comm));
115f1f2ae84SBarry Smith   if (len) {
11648a46eb9SPierre Jolivet     if (!pc) PetscCall(PetscMalloc1(len + 1, &prefix));
117f1f2ae84SBarry Smith     PetscCallMPI(MPI_Bcast(prefix, len + 1, MPI_CHAR, 0, comm));
118f1f2ae84SBarry Smith     PetscCall(KSPSetOptionsPrefix(ksp, prefix));
119f1f2ae84SBarry Smith   }
120f1f2ae84SBarry Smith   PetscCall(KSPAppendOptionsPrefix(ksp, "mpi_"));
121f1f2ae84SBarry Smith   PetscFunctionReturn(0);
122f1f2ae84SBarry Smith }
123f1f2ae84SBarry Smith 
1249371c9d4SSatish Balay static PetscErrorCode PCMPISetMat(PC pc) {
125f1f2ae84SBarry Smith   PC_MPI            *km = pc ? (PC_MPI *)pc->data : NULL;
126f1f2ae84SBarry Smith   Mat                A;
127f1f2ae84SBarry Smith   PetscInt           N[2], n, *ia, *ja, j;
128f1f2ae84SBarry Smith   Mat                sA;
129f1f2ae84SBarry Smith   MPI_Comm           comm = PC_MPI_COMM_WORLD;
130f1f2ae84SBarry Smith   KSP                ksp;
131f1f2ae84SBarry Smith   PetscLayout        layout;
132f1f2ae84SBarry Smith   const PetscInt    *IA = NULL, *JA = NULL;
133f1f2ae84SBarry Smith   const PetscInt    *range;
134f1f2ae84SBarry Smith   PetscMPIInt       *NZ = NULL, sendcounti[PC_MPI_MAX_RANKS], displi[PC_MPI_MAX_RANKS], *NZdispl = NULL, nz, size, i;
135f1f2ae84SBarry Smith   PetscScalar       *a;
136f1f2ae84SBarry Smith   const PetscScalar *sa = NULL;
137f1f2ae84SBarry Smith 
138f1f2ae84SBarry Smith   PetscFunctionBegin;
139f1f2ae84SBarry Smith   PetscCallMPI(MPI_Scatter(pc ? km->ksps : NULL, 1, MPI_AINT, &ksp, 1, MPI_AINT, 0, comm));
140f1f2ae84SBarry Smith   if (!ksp) PetscFunctionReturn(0);
141f1f2ae84SBarry Smith   PetscCall(PetscObjectGetComm((PetscObject)ksp, &comm));
142f1f2ae84SBarry Smith   if (pc) {
143f1f2ae84SBarry Smith     PetscCallMPI(MPI_Comm_size(comm, &size));
144*dad3da8eSBarry Smith     PCMPIMatCounts[size - 1]++;
145f1f2ae84SBarry Smith     PetscCall(PCGetOperators(pc, &sA, &sA));
146f1f2ae84SBarry Smith     PetscCall(MatGetSize(sA, &N[0], &N[1]));
147f1f2ae84SBarry Smith   }
148f1f2ae84SBarry Smith   PetscCallMPI(MPI_Bcast(N, 2, MPIU_INT, 0, comm));
149f1f2ae84SBarry Smith 
150f1f2ae84SBarry Smith   /* determine ownership ranges of matrix */
151f1f2ae84SBarry Smith   PetscCall(PetscLayoutCreate(comm, &layout));
152f1f2ae84SBarry Smith   PetscCall(PetscLayoutSetBlockSize(layout, 1));
153f1f2ae84SBarry Smith   PetscCall(PetscLayoutSetSize(layout, N[0]));
154f1f2ae84SBarry Smith   PetscCall(PetscLayoutSetUp(layout));
155f1f2ae84SBarry Smith   PetscCall(PetscLayoutGetLocalSize(layout, &n));
156f1f2ae84SBarry Smith 
157f1f2ae84SBarry Smith   /* copy over the matrix nonzero structure and values */
158f1f2ae84SBarry Smith   if (pc) {
159f1f2ae84SBarry Smith     NZ      = km->NZ;
160f1f2ae84SBarry Smith     NZdispl = km->NZdispl;
161f1f2ae84SBarry Smith     PetscCall(PetscLayoutGetRanges(layout, &range));
162f1f2ae84SBarry Smith     PetscCall(MatGetRowIJ(sA, 0, PETSC_FALSE, PETSC_FALSE, NULL, &IA, &JA, NULL));
163f1f2ae84SBarry Smith     for (i = 0; i < size; i++) {
164f1f2ae84SBarry Smith       sendcounti[i] = (PetscMPIInt)(1 + range[i + 1] - range[i]);
165f1f2ae84SBarry Smith       NZ[i]         = (PetscMPIInt)(IA[range[i + 1]] - IA[range[i]]);
166f1f2ae84SBarry Smith     }
167f1f2ae84SBarry Smith     displi[0]  = 0;
168f1f2ae84SBarry Smith     NZdispl[0] = 0;
169f1f2ae84SBarry Smith     for (j = 1; j < size; j++) {
170f1f2ae84SBarry Smith       displi[j]  = displi[j - 1] + sendcounti[j - 1] - 1;
171f1f2ae84SBarry Smith       NZdispl[j] = NZdispl[j - 1] + NZ[j - 1];
172f1f2ae84SBarry Smith     }
173f1f2ae84SBarry Smith     PetscCall(MatSeqAIJGetArrayRead(sA, &sa));
174f1f2ae84SBarry Smith   }
175f1f2ae84SBarry Smith   PetscCall(PetscLayoutDestroy(&layout));
176f1f2ae84SBarry Smith   PetscCallMPI(MPI_Scatter(NZ, 1, MPI_INT, &nz, 1, MPI_INT, 0, comm));
177f1f2ae84SBarry Smith 
178f1f2ae84SBarry Smith   PetscCall(PetscMalloc3(n + 1, &ia, nz, &ja, nz, &a));
179f1f2ae84SBarry Smith   PetscCallMPI(MPI_Scatterv(IA, sendcounti, displi, MPIU_INT, ia, n + 1, MPIU_INT, 0, comm));
180f1f2ae84SBarry Smith   PetscCallMPI(MPI_Scatterv(JA, NZ, NZdispl, MPIU_INT, ja, nz, MPIU_INT, 0, comm));
181f1f2ae84SBarry Smith   PetscCallMPI(MPI_Scatterv(sa, NZ, NZdispl, MPIU_SCALAR, a, nz, MPIU_SCALAR, 0, comm));
182f1f2ae84SBarry Smith 
183f1f2ae84SBarry Smith   if (pc) {
184f1f2ae84SBarry Smith     PetscCall(MatSeqAIJRestoreArrayRead(sA, &sa));
185f1f2ae84SBarry Smith     PetscCall(MatRestoreRowIJ(sA, 0, PETSC_FALSE, PETSC_FALSE, NULL, &IA, &JA, NULL));
186f1f2ae84SBarry Smith   }
187f1f2ae84SBarry Smith 
188ad540459SPierre Jolivet   for (j = 1; j < n + 1; j++) ia[j] -= ia[0];
189f1f2ae84SBarry Smith   ia[0] = 0;
190f1f2ae84SBarry Smith   PetscCall(MatCreateMPIAIJWithArrays(comm, n, n, N[0], N[0], ia, ja, a, &A));
191f1f2ae84SBarry Smith   PetscCall(MatSetOptionsPrefix(A, "mpi_"));
192f1f2ae84SBarry Smith 
193f1f2ae84SBarry Smith   PetscCall(PetscFree3(ia, ja, a));
194f1f2ae84SBarry Smith   PetscCall(KSPSetOperators(ksp, A, A));
195f1f2ae84SBarry Smith   if (!ksp->vec_sol) PetscCall(MatCreateVecs(A, &ksp->vec_sol, &ksp->vec_rhs));
196f1f2ae84SBarry Smith   if (pc) { /* needed for scatterv/gatherv of rhs and solution */
197f1f2ae84SBarry Smith     const PetscInt *range;
198f1f2ae84SBarry Smith 
199f1f2ae84SBarry Smith     PetscCall(VecGetOwnershipRanges(ksp->vec_sol, &range));
200f1f2ae84SBarry Smith     for (i = 0; i < size; i++) {
201f1f2ae84SBarry Smith       km->sendcount[i] = (PetscMPIInt)(range[i + 1] - range[i]);
202f1f2ae84SBarry Smith       km->displ[i]     = (PetscMPIInt)range[i];
203f1f2ae84SBarry Smith     }
204f1f2ae84SBarry Smith   }
205f1f2ae84SBarry Smith   PetscCall(MatDestroy(&A));
206f1f2ae84SBarry Smith   PetscCall(KSPSetFromOptions(ksp));
207f1f2ae84SBarry Smith   PetscFunctionReturn(0);
208f1f2ae84SBarry Smith }
209f1f2ae84SBarry Smith 
2109371c9d4SSatish Balay static PetscErrorCode PCMPIUpdateMatValues(PC pc) {
211f1f2ae84SBarry Smith   PC_MPI            *km = pc ? (PC_MPI *)pc->data : NULL;
212f1f2ae84SBarry Smith   KSP                ksp;
213f1f2ae84SBarry Smith   Mat                sA, A;
214f1f2ae84SBarry Smith   MPI_Comm           comm = PC_MPI_COMM_WORLD;
215f1f2ae84SBarry Smith   PetscScalar       *a;
216f1f2ae84SBarry Smith   PetscCount         nz;
217f1f2ae84SBarry Smith   const PetscScalar *sa = NULL;
218*dad3da8eSBarry Smith   PetscMPIInt        size;
219f1f2ae84SBarry Smith 
220f1f2ae84SBarry Smith   PetscFunctionBegin;
221f1f2ae84SBarry Smith   if (pc) {
222f1f2ae84SBarry Smith     PetscCall(PCGetOperators(pc, &sA, &sA));
223f1f2ae84SBarry Smith     PetscCall(MatSeqAIJGetArrayRead(sA, &sa));
224f1f2ae84SBarry Smith   }
225f1f2ae84SBarry Smith   PetscCallMPI(MPI_Scatter(pc ? km->ksps : NULL, 1, MPI_AINT, &ksp, 1, MPI_AINT, 0, comm));
226f1f2ae84SBarry Smith   if (!ksp) PetscFunctionReturn(0);
227f1f2ae84SBarry Smith   PetscCall(PetscObjectGetComm((PetscObject)ksp, &comm));
228*dad3da8eSBarry Smith   PetscCallMPI(MPI_Comm_size(comm, &size));
229*dad3da8eSBarry Smith   PCMPIMatCounts[size - 1]++;
230f1f2ae84SBarry Smith   PetscCall(KSPGetOperators(ksp, NULL, &A));
231f1f2ae84SBarry Smith   PetscCall(MatMPIAIJGetNumberNonzeros(A, &nz));
232f1f2ae84SBarry Smith   PetscCall(PetscMalloc1(nz, &a));
233f1f2ae84SBarry Smith   PetscCallMPI(MPI_Scatterv(sa, pc ? km->NZ : NULL, pc ? km->NZdispl : NULL, MPIU_SCALAR, a, nz, MPIU_SCALAR, 0, comm));
234f1f2ae84SBarry Smith   if (pc) PetscCall(MatSeqAIJRestoreArrayRead(sA, &sa));
235f1f2ae84SBarry Smith   PetscCall(MatUpdateMPIAIJWithArray(A, a));
236f1f2ae84SBarry Smith   PetscCall(PetscFree(a));
237f1f2ae84SBarry Smith   PetscFunctionReturn(0);
238f1f2ae84SBarry Smith }
239f1f2ae84SBarry Smith 
2409371c9d4SSatish Balay static PetscErrorCode PCMPISolve(PC pc, Vec B, Vec X) {
241f1f2ae84SBarry Smith   PC_MPI            *km = pc ? (PC_MPI *)pc->data : NULL;
242f1f2ae84SBarry Smith   KSP                ksp;
243f1f2ae84SBarry Smith   MPI_Comm           comm = PC_MPI_COMM_WORLD;
244f1f2ae84SBarry Smith   const PetscScalar *sb   = NULL, *x;
245f1f2ae84SBarry Smith   PetscScalar       *b, *sx = NULL;
246f1f2ae84SBarry Smith   PetscInt           n;
247f1f2ae84SBarry Smith 
248f1f2ae84SBarry Smith   PetscFunctionBegin;
249f1f2ae84SBarry Smith   PetscCallMPI(MPI_Scatter(pc ? km->ksps : &ksp, 1, MPI_AINT, &ksp, 1, MPI_AINT, 0, comm));
250f1f2ae84SBarry Smith   if (!ksp) PetscFunctionReturn(0);
251f1f2ae84SBarry Smith   PetscCall(PetscObjectGetComm((PetscObject)ksp, &comm));
252f1f2ae84SBarry Smith 
253f1f2ae84SBarry Smith   /* TODO: optimize code to not require building counts/displ everytime */
254f1f2ae84SBarry Smith 
255f1f2ae84SBarry Smith   /* scatterv rhs */
256f1f2ae84SBarry Smith   if (pc) {
257f1f2ae84SBarry Smith     PetscMPIInt size;
258f1f2ae84SBarry Smith 
259*dad3da8eSBarry Smith     PetscCallMPI(MPI_Comm_size(comm, &size));
260*dad3da8eSBarry Smith     PCMPISolveCounts[size - 1]++;
261f1f2ae84SBarry Smith     PetscCall(VecGetArrayRead(B, &sb));
262f1f2ae84SBarry Smith   }
263f1f2ae84SBarry Smith   PetscCall(VecGetLocalSize(ksp->vec_rhs, &n));
264f1f2ae84SBarry Smith   PetscCall(VecGetArray(ksp->vec_rhs, &b));
265f1f2ae84SBarry Smith   PetscCallMPI(MPI_Scatterv(sb, pc ? km->sendcount : NULL, pc ? km->displ : NULL, MPIU_SCALAR, b, n, MPIU_SCALAR, 0, comm));
266f1f2ae84SBarry Smith   PetscCall(VecRestoreArray(ksp->vec_rhs, &b));
267f1f2ae84SBarry Smith   if (pc) PetscCall(VecRestoreArrayRead(B, &sb));
268f1f2ae84SBarry Smith 
269f1f2ae84SBarry Smith   PetscCall(KSPSolve(ksp, NULL, NULL));
270f1f2ae84SBarry Smith 
271f1f2ae84SBarry Smith   /* gather solution */
272f1f2ae84SBarry Smith   PetscCall(VecGetArrayRead(ksp->vec_sol, &x));
273f1f2ae84SBarry Smith   if (pc) PetscCall(VecGetArray(X, &sx));
274f1f2ae84SBarry Smith   PetscCallMPI(MPI_Gatherv(x, n, MPIU_SCALAR, sx, pc ? km->sendcount : NULL, pc ? km->displ : NULL, MPIU_SCALAR, 0, comm));
275f1f2ae84SBarry Smith   if (pc) PetscCall(VecRestoreArray(X, &sx));
276f1f2ae84SBarry Smith   PetscCall(VecRestoreArrayRead(ksp->vec_sol, &x));
277f1f2ae84SBarry Smith   PetscFunctionReturn(0);
278f1f2ae84SBarry Smith }
279f1f2ae84SBarry Smith 
2809371c9d4SSatish Balay static PetscErrorCode PCMPIDestroy(PC pc) {
281f1f2ae84SBarry Smith   PC_MPI  *km = pc ? (PC_MPI *)pc->data : NULL;
282f1f2ae84SBarry Smith   KSP      ksp;
283f1f2ae84SBarry Smith   MPI_Comm comm = PC_MPI_COMM_WORLD;
284f1f2ae84SBarry Smith 
285f1f2ae84SBarry Smith   PetscFunctionBegin;
286f1f2ae84SBarry Smith   PetscCallMPI(MPI_Scatter(pc ? km->ksps : NULL, 1, MPI_AINT, &ksp, 1, MPI_AINT, 0, comm));
287f1f2ae84SBarry Smith   if (!ksp) PetscFunctionReturn(0);
288f1f2ae84SBarry Smith   PetscCall(KSPDestroy(&ksp));
289f1f2ae84SBarry Smith   PetscFunctionReturn(0);
290f1f2ae84SBarry Smith }
291f1f2ae84SBarry Smith 
292f1f2ae84SBarry Smith /*@C
293f1f2ae84SBarry Smith      PCMPIServerBegin - starts a server that runs on the rank != 0 MPI processes waiting to process requests for
294f1f2ae84SBarry Smith      parallel KSP solves and management of parallel KSP objects.
295f1f2ae84SBarry Smith 
296f1f2ae84SBarry Smith      Logically collective on all MPI ranks except 0
297f1f2ae84SBarry Smith 
298f1f2ae84SBarry Smith    Options Database:
299f1f2ae84SBarry Smith +   -mpi_linear_solver_server - causes the PETSc program to start in MPI linear solver server mode where only the first MPI rank runs user code
300f1f2ae84SBarry Smith -   -mpi_linear_solver_server_view - displays information about the linear systems solved by the MPI linear solver server
301f1f2ae84SBarry Smith 
302f1f2ae84SBarry Smith      Notes:
303f1f2ae84SBarry Smith       This is normally started automatically in `PetscInitialize()` when the option is provided
304f1f2ae84SBarry Smith 
305f1f2ae84SBarry Smith      Developer Notes:
306f1f2ae84SBarry Smith        When called on rank zero this sets PETSC_COMM_WORLD to PETSC_COMM_SELF to allow a main program
307f1f2ae84SBarry Smith        written with PETSC_COMM_WORLD to run correctly on the single rank while all the ranks
308f1f2ae84SBarry Smith        (that would normally be sharing PETSC_COMM_WORLD) to run the solver server.
309f1f2ae84SBarry Smith 
310f1f2ae84SBarry Smith        Can this be integrated into the PetscDevice abstraction that is currently being developed?
311f1f2ae84SBarry Smith 
312f1f2ae84SBarry Smith      Level: developer
313f1f2ae84SBarry Smith 
314f1f2ae84SBarry Smith .seealso: PCMPIServerEnd(), PCMPI
315f1f2ae84SBarry Smith @*/
3169371c9d4SSatish Balay PetscErrorCode PCMPIServerBegin(void) {
317f1f2ae84SBarry Smith   PetscMPIInt rank;
318f1f2ae84SBarry Smith 
319f1f2ae84SBarry Smith   PetscFunctionBegin;
320f1f2ae84SBarry Smith   PetscCall(PetscInfo(NULL, "Starting MPI Linear Solver Server"));
321f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_rank(PC_MPI_COMM_WORLD, &rank));
322f1f2ae84SBarry Smith   if (rank == 0) {
323f1f2ae84SBarry Smith     PETSC_COMM_WORLD = PETSC_COMM_SELF;
324f1f2ae84SBarry Smith     PetscFunctionReturn(0);
325f1f2ae84SBarry Smith   }
326f1f2ae84SBarry Smith 
327f1f2ae84SBarry Smith   while (PETSC_TRUE) {
328f1f2ae84SBarry Smith     PCMPICommand request = PCMPI_CREATE;
329f1f2ae84SBarry Smith     PetscCallMPI(MPI_Bcast(&request, 1, MPIU_ENUM, 0, PC_MPI_COMM_WORLD));
330f1f2ae84SBarry Smith     switch (request) {
3319371c9d4SSatish Balay     case PCMPI_CREATE: PetscCall(PCMPICreate(NULL)); break;
3329371c9d4SSatish Balay     case PCMPI_SET_MAT: PetscCall(PCMPISetMat(NULL)); break;
3339371c9d4SSatish Balay     case PCMPI_UPDATE_MAT_VALUES: PetscCall(PCMPIUpdateMatValues(NULL)); break;
334f1f2ae84SBarry Smith     case PCMPI_VIEW:
335f1f2ae84SBarry Smith       // PetscCall(PCMPIView(NULL));
336f1f2ae84SBarry Smith       break;
3379371c9d4SSatish Balay     case PCMPI_SOLVE: PetscCall(PCMPISolve(NULL, NULL, NULL)); break;
3389371c9d4SSatish Balay     case PCMPI_DESTROY: PetscCall(PCMPIDestroy(NULL)); break;
339f1f2ae84SBarry Smith     case PCMPI_EXIT:
340f1f2ae84SBarry Smith       PetscCall(PetscFinalize());
341f1f2ae84SBarry Smith       exit(0); /* not sure if this is a good idea, but cannot return because it will run users main program */
342f1f2ae84SBarry Smith       break;
3439371c9d4SSatish Balay     default: break;
344f1f2ae84SBarry Smith     }
345f1f2ae84SBarry Smith   }
346f1f2ae84SBarry Smith   PetscFunctionReturn(0);
347f1f2ae84SBarry Smith }
348f1f2ae84SBarry Smith 
349f1f2ae84SBarry Smith /*@C
350f1f2ae84SBarry Smith      PCMPIServerEnd - ends a server that runs on the rank != 0 MPI processes waiting to process requests for
351f1f2ae84SBarry Smith      parallel KSP solves and management of parallel KSP objects.
352f1f2ae84SBarry Smith 
353f1f2ae84SBarry Smith      Logically collective on all MPI ranks except 0
354f1f2ae84SBarry Smith 
355f1f2ae84SBarry Smith      Notes:
356f1f2ae84SBarry Smith       This is normally ended automatically in `PetscFinalize()` when the option is provided
357f1f2ae84SBarry Smith 
358f1f2ae84SBarry Smith      Level: developer
359f1f2ae84SBarry Smith 
360f1f2ae84SBarry Smith .seealso: PCMPIServerBegin(), PCMPI
361f1f2ae84SBarry Smith @*/
3629371c9d4SSatish Balay PetscErrorCode PCMPIServerEnd(void) {
363f1f2ae84SBarry Smith   PCMPICommand request = PCMPI_EXIT;
364f1f2ae84SBarry Smith 
365f1f2ae84SBarry Smith   PetscFunctionBegin;
366f1f2ae84SBarry Smith   if (PetscGlobalRank == 0) {
367f1f2ae84SBarry Smith     PetscViewer       viewer = NULL;
368f1f2ae84SBarry Smith     PetscViewerFormat format;
369f1f2ae84SBarry Smith 
370f1f2ae84SBarry Smith     PetscCallMPI(MPI_Bcast(&request, 1, MPIU_ENUM, 0, PC_MPI_COMM_WORLD));
371f1f2ae84SBarry Smith     PETSC_COMM_WORLD = MPI_COMM_WORLD; /* could use PC_MPI_COMM_WORLD */
372f1f2ae84SBarry Smith     PetscOptionsBegin(PETSC_COMM_SELF, NULL, "MPI linear solver server options", NULL);
373f1f2ae84SBarry Smith     PetscCall(PetscOptionsViewer("-mpi_linear_solver_server_view", "View information about system solved with the server", "PCMPI", &viewer, &format, NULL));
374f1f2ae84SBarry Smith     PetscOptionsEnd();
375f1f2ae84SBarry Smith     if (viewer) {
376f1f2ae84SBarry Smith       PetscBool isascii;
377f1f2ae84SBarry Smith 
378f1f2ae84SBarry Smith       PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
379f1f2ae84SBarry Smith       if (isascii) {
380f1f2ae84SBarry Smith         PetscMPIInt size;
381f1f2ae84SBarry Smith         PetscInt    i, ksp = 0, mat = 0, solve = 0;
382f1f2ae84SBarry Smith 
383f1f2ae84SBarry Smith         PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
384*dad3da8eSBarry Smith         for (i = 0; i < size; i++) {
385f1f2ae84SBarry Smith           ksp += PCMPIKSPCounts[i];
386f1f2ae84SBarry Smith           mat += PCMPIMatCounts[i];
387f1f2ae84SBarry Smith           solve += PCMPISolveCounts[i];
388f1f2ae84SBarry Smith         }
389f1f2ae84SBarry Smith         PetscCall(PetscViewerASCIIPrintf(viewer, "MPI linear solver server:\n"));
390f1f2ae84SBarry Smith         PetscCall(PetscViewerASCIIPrintf(viewer, "  Parallel KSPs  %" PetscInt_FMT " Mats  %" PetscInt_FMT " Solves %" PetscInt_FMT "\n", ksp, mat, solve));
391f1f2ae84SBarry Smith         PetscCall(PetscViewerASCIIPrintf(viewer, "  Sequential KSPs  %" PetscInt_FMT " Solves %" PetscInt_FMT "\n", PCMPIKSPCountsSeq, PCMPISolveCountsSeq));
392f1f2ae84SBarry Smith       }
393f1f2ae84SBarry Smith       PetscCall(PetscViewerDestroy(&viewer));
394f1f2ae84SBarry Smith     }
395f1f2ae84SBarry Smith   }
396f1f2ae84SBarry Smith   PetscCall(PCMPICommsDestroy());
397f1f2ae84SBarry Smith   PetscFunctionReturn(0);
398f1f2ae84SBarry Smith }
399f1f2ae84SBarry Smith 
400f1f2ae84SBarry Smith /*
401f1f2ae84SBarry Smith     This version is used in the trivial case when the MPI parallel solver server is running on just the original MPI rank 0
402f1f2ae84SBarry Smith     because, for example, the problem is small. This version is more efficient because it does not require copying any data
403f1f2ae84SBarry Smith */
4049371c9d4SSatish Balay static PetscErrorCode PCSetUp_Seq(PC pc) {
405f1f2ae84SBarry Smith   PC_MPI     *km = (PC_MPI *)pc->data;
406f1f2ae84SBarry Smith   Mat         sA;
407f1f2ae84SBarry Smith   const char *prefix;
408f1f2ae84SBarry Smith 
409f1f2ae84SBarry Smith   PetscFunctionBegin;
410f1f2ae84SBarry Smith   PetscCall(PCGetOperators(pc, NULL, &sA));
411f1f2ae84SBarry Smith   PetscCall(PCGetOptionsPrefix(pc, &prefix));
412f1f2ae84SBarry Smith   PetscCall(KSPCreate(PETSC_COMM_SELF, &km->ksps[0]));
413f1f2ae84SBarry Smith   PetscCall(KSPSetOptionsPrefix(km->ksps[0], prefix));
414f1f2ae84SBarry Smith   PetscCall(KSPAppendOptionsPrefix(km->ksps[0], "mpi_"));
415f1f2ae84SBarry Smith   PetscCall(KSPSetOperators(km->ksps[0], sA, sA));
416f1f2ae84SBarry Smith   PetscCall(KSPSetFromOptions(km->ksps[0]));
417f1f2ae84SBarry Smith   PetscCall(KSPSetUp(km->ksps[0]));
418f1f2ae84SBarry Smith   PetscInfo((PetscObject)pc, "MPI parallel linear solver system is being solved directly on rank 0 due to its small size\n");
419f1f2ae84SBarry Smith   PCMPIKSPCountsSeq++;
420f1f2ae84SBarry Smith   PetscFunctionReturn(0);
421f1f2ae84SBarry Smith }
422f1f2ae84SBarry Smith 
4239371c9d4SSatish Balay static PetscErrorCode PCApply_Seq(PC pc, Vec b, Vec x) {
424f1f2ae84SBarry Smith   PC_MPI *km = (PC_MPI *)pc->data;
425f1f2ae84SBarry Smith 
426f1f2ae84SBarry Smith   PetscFunctionBegin;
427f1f2ae84SBarry Smith   PetscCall(KSPSolve(km->ksps[0], b, x));
428f1f2ae84SBarry Smith   PCMPISolveCountsSeq++;
429f1f2ae84SBarry Smith   PetscFunctionReturn(0);
430f1f2ae84SBarry Smith }
431f1f2ae84SBarry Smith 
4329371c9d4SSatish Balay static PetscErrorCode PCView_Seq(PC pc, PetscViewer viewer) {
433f1f2ae84SBarry Smith   PC_MPI     *km = (PC_MPI *)pc->data;
434f1f2ae84SBarry Smith   const char *prefix;
435f1f2ae84SBarry Smith 
436f1f2ae84SBarry Smith   PetscFunctionBegin;
437f1f2ae84SBarry Smith   PetscCall(PetscViewerASCIIPrintf(viewer, "Running MPI linear solver server directly on rank 0 due to its small size\n"));
438f1f2ae84SBarry Smith   PetscCall(PetscViewerASCIIPrintf(viewer, "Desired minimum number of nonzeros per rank for MPI parallel solve %d\n", (int)km->mincntperrank));
439f1f2ae84SBarry Smith   PetscCall(PCGetOptionsPrefix(pc, &prefix));
440f1f2ae84SBarry Smith   if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "*** Use -%smpi_ksp_view to see the MPI KSP parameters***\n", prefix));
441f1f2ae84SBarry Smith   else PetscCall(PetscViewerASCIIPrintf(viewer, "*** Use -mpi_ksp_view to see the MPI KSP parameters***\n"));
442f1f2ae84SBarry Smith   PetscFunctionReturn(0);
443f1f2ae84SBarry Smith }
444f1f2ae84SBarry Smith 
4459371c9d4SSatish Balay static PetscErrorCode PCDestroy_Seq(PC pc) {
446f1f2ae84SBarry Smith   PC_MPI *km = (PC_MPI *)pc->data;
447f1f2ae84SBarry Smith 
448f1f2ae84SBarry Smith   PetscFunctionBegin;
449f1f2ae84SBarry Smith   PetscCall(KSPDestroy(&km->ksps[0]));
450f1f2ae84SBarry Smith   PetscCall(PetscFree(pc->data));
451f1f2ae84SBarry Smith   PetscFunctionReturn(0);
452f1f2ae84SBarry Smith }
453f1f2ae84SBarry Smith 
454f1f2ae84SBarry Smith /*
455f1f2ae84SBarry Smith      PCSetUp_MPI - Trigger the creation of the MPI parallel PC and copy parts of the matrix and
456f1f2ae84SBarry Smith      right hand side to the parallel PC
457f1f2ae84SBarry Smith */
4589371c9d4SSatish Balay static PetscErrorCode PCSetUp_MPI(PC pc) {
459f1f2ae84SBarry Smith   PC_MPI      *km = (PC_MPI *)pc->data;
460f1f2ae84SBarry Smith   PetscMPIInt  rank, size;
461f1f2ae84SBarry Smith   PCMPICommand request;
462f1f2ae84SBarry Smith   PetscBool    newmatrix = PETSC_FALSE;
463f1f2ae84SBarry Smith 
464f1f2ae84SBarry Smith   PetscFunctionBegin;
465f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_rank(MPI_COMM_WORLD, &rank));
466f1f2ae84SBarry Smith   PetscCheck(rank == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PCMPI can only be used from 0th rank of MPI_COMM_WORLD. Perhaps a missing -mpi_linear_solver_server?");
467f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_size(MPI_COMM_WORLD, &size));
468f1f2ae84SBarry Smith 
469f1f2ae84SBarry Smith   if (!pc->setupcalled) {
470f1f2ae84SBarry Smith     if (!km->alwaysuseserver) {
471f1f2ae84SBarry Smith       PetscInt n;
472f1f2ae84SBarry Smith       Mat      sA;
473f1f2ae84SBarry Smith       /* short circuit for small systems */
474f1f2ae84SBarry Smith       PetscCall(PCGetOperators(pc, &sA, &sA));
475f1f2ae84SBarry Smith       PetscCall(MatGetSize(sA, &n, NULL));
476f1f2ae84SBarry Smith       if (n < 2 * km->mincntperrank - 1 || size == 1) {
477f1f2ae84SBarry Smith         pc->ops->setup   = NULL;
478f1f2ae84SBarry Smith         pc->ops->apply   = PCApply_Seq;
479f1f2ae84SBarry Smith         pc->ops->destroy = PCDestroy_Seq;
480f1f2ae84SBarry Smith         pc->ops->view    = PCView_Seq;
481f1f2ae84SBarry Smith         PetscCall(PCSetUp_Seq(pc));
482f1f2ae84SBarry Smith         PetscFunctionReturn(0);
483f1f2ae84SBarry Smith       }
484f1f2ae84SBarry Smith     }
485f1f2ae84SBarry Smith 
486f1f2ae84SBarry Smith     request = PCMPI_CREATE;
487f1f2ae84SBarry Smith     PetscCallMPI(MPI_Bcast(&request, 1, MPIU_ENUM, 0, MPI_COMM_WORLD));
488f1f2ae84SBarry Smith     PetscCall(PCMPICreate(pc));
489f1f2ae84SBarry Smith     newmatrix = PETSC_TRUE;
4909371c9d4SSatish Balay   }
4919371c9d4SSatish Balay   if (pc->flag == DIFFERENT_NONZERO_PATTERN) newmatrix = PETSC_TRUE;
492f1f2ae84SBarry Smith 
493f1f2ae84SBarry Smith   if (newmatrix) {
494f1f2ae84SBarry Smith     PetscInfo((PetscObject)pc, "New matrix or matrix has changed nonzero structure\n");
495f1f2ae84SBarry Smith     request = PCMPI_SET_MAT;
496f1f2ae84SBarry Smith     PetscCallMPI(MPI_Bcast(&request, 1, MPIU_ENUM, 0, MPI_COMM_WORLD));
497f1f2ae84SBarry Smith     PetscCall(PCMPISetMat(pc));
498f1f2ae84SBarry Smith   } else {
499f1f2ae84SBarry Smith     PetscInfo((PetscObject)pc, "Matrix has only changed nozero values\n");
500f1f2ae84SBarry Smith     request = PCMPI_UPDATE_MAT_VALUES;
501f1f2ae84SBarry Smith     PetscCallMPI(MPI_Bcast(&request, 1, MPIU_ENUM, 0, MPI_COMM_WORLD));
502f1f2ae84SBarry Smith     PetscCall(PCMPIUpdateMatValues(pc));
503f1f2ae84SBarry Smith   }
504f1f2ae84SBarry Smith   PetscFunctionReturn(0);
505f1f2ae84SBarry Smith }
506f1f2ae84SBarry Smith 
5079371c9d4SSatish Balay static PetscErrorCode PCApply_MPI(PC pc, Vec b, Vec x) {
508f1f2ae84SBarry Smith   PCMPICommand request = PCMPI_SOLVE;
509f1f2ae84SBarry Smith 
510f1f2ae84SBarry Smith   PetscFunctionBegin;
511f1f2ae84SBarry Smith   PetscCallMPI(MPI_Bcast(&request, 1, MPIU_ENUM, 0, MPI_COMM_WORLD));
512f1f2ae84SBarry Smith   PetscCall(PCMPISolve(pc, b, x));
513f1f2ae84SBarry Smith   PetscFunctionReturn(0);
514f1f2ae84SBarry Smith }
515f1f2ae84SBarry Smith 
5169371c9d4SSatish Balay PetscErrorCode PCDestroy_MPI(PC pc) {
517f1f2ae84SBarry Smith   PCMPICommand request = PCMPI_DESTROY;
518f1f2ae84SBarry Smith 
519f1f2ae84SBarry Smith   PetscFunctionBegin;
520f1f2ae84SBarry Smith   PetscCallMPI(MPI_Bcast(&request, 1, MPIU_ENUM, 0, MPI_COMM_WORLD));
521f1f2ae84SBarry Smith   PetscCall(PCMPIDestroy(pc));
522f1f2ae84SBarry Smith   PetscCall(PetscFree(pc->data));
523f1f2ae84SBarry Smith   PetscFunctionReturn(0);
524f1f2ae84SBarry Smith }
525f1f2ae84SBarry Smith 
526f1f2ae84SBarry Smith /*
527f1f2ae84SBarry Smith      PCView_MPI - Cannot call view on the MPI parallel KSP because other ranks do not have access to the viewer
528f1f2ae84SBarry Smith */
5299371c9d4SSatish Balay PetscErrorCode PCView_MPI(PC pc, PetscViewer viewer) {
530f1f2ae84SBarry Smith   PC_MPI     *km = (PC_MPI *)pc->data;
531f1f2ae84SBarry Smith   MPI_Comm    comm;
532f1f2ae84SBarry Smith   PetscMPIInt size;
533f1f2ae84SBarry Smith   const char *prefix;
534f1f2ae84SBarry Smith 
535f1f2ae84SBarry Smith   PetscFunctionBegin;
536f1f2ae84SBarry Smith   PetscCall(PetscObjectGetComm((PetscObject)km->ksps[0], &comm));
537f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_size(comm, &size));
538f1f2ae84SBarry Smith   PetscCall(PetscViewerASCIIPrintf(viewer, "Size of MPI communicator used for MPI parallel KSP solve %d\n", size));
539f1f2ae84SBarry Smith   PetscCall(PetscViewerASCIIPrintf(viewer, "Desired minimum number of nonzeros per rank for MPI parallel solve %d\n", (int)km->mincntperrank));
540f1f2ae84SBarry Smith   PetscCall(PCGetOptionsPrefix(pc, &prefix));
541f1f2ae84SBarry Smith   if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "*** Use -%smpi_ksp_view to see the MPI KSP parameters***\n", prefix));
542f1f2ae84SBarry Smith   else PetscCall(PetscViewerASCIIPrintf(viewer, "*** Use -mpi_ksp_view to see the MPI KSP parameters***\n"));
543f1f2ae84SBarry Smith   PetscFunctionReturn(0);
544f1f2ae84SBarry Smith }
545f1f2ae84SBarry Smith 
5469371c9d4SSatish Balay PetscErrorCode PCSetFromOptions_MPI(PC pc, PetscOptionItems *PetscOptionsObject) {
547f1f2ae84SBarry Smith   PC_MPI *km = (PC_MPI *)pc->data;
548f1f2ae84SBarry Smith 
549f1f2ae84SBarry Smith   PetscFunctionBegin;
550f1f2ae84SBarry Smith   PetscOptionsHeadBegin(PetscOptionsObject, "MPI linear solver server options");
551f1f2ae84SBarry Smith   PetscCall(PetscOptionsInt("-pc_mpi_minimum_count_per_rank", "Desired minimum number of nonzeros per rank", "None", km->mincntperrank, &km->mincntperrank, NULL));
552f1f2ae84SBarry Smith   PetscCall(PetscOptionsBool("-pc_mpi_always_use_server", "Use the server even if only one rank is used for the solve (for debugging)", "None", km->alwaysuseserver, &km->alwaysuseserver, NULL));
553f1f2ae84SBarry Smith   PetscOptionsHeadEnd();
554f1f2ae84SBarry Smith   PetscFunctionReturn(0);
555f1f2ae84SBarry Smith }
556f1f2ae84SBarry Smith 
557f1f2ae84SBarry Smith /*MC
558f1f2ae84SBarry Smith      PCMPI - Calls an MPI parallel KSP to solve a linear system from user code running on one process
559f1f2ae84SBarry Smith 
560f1f2ae84SBarry Smith    Level: beginner
561f1f2ae84SBarry Smith 
562f1f2ae84SBarry Smith    Options Database:
563f1f2ae84SBarry Smith +  -mpi_linear_solver_server - causes the PETSc program to start in MPI linear solver server mode where only the first MPI rank runs user code
564f1f2ae84SBarry Smith .  -mpi_linear_solver_server_view - displays information about the linear systems solved by the MPI linear solver server
565f1f2ae84SBarry Smith .  -pc_mpi_minimum_count_per_rank - sets the minimum size of the linear system per MPI rank that the solver will strive for
566f1f2ae84SBarry Smith -  -pc_mpi_always_use_server - use the server solver code even if the particular system is only solved on the process, this option is only for debugging and testing purposes
567f1f2ae84SBarry Smith 
568f1f2ae84SBarry Smith    Notes:
569f1f2ae84SBarry Smith    The options database prefix for the MPI parallel KSP and PC is -mpi_
570f1f2ae84SBarry Smith 
571f1f2ae84SBarry Smith    The MPI linear solver server will not support scaling user code to utilize extremely large numbers of MPI ranks but should give reasonable speedup for
572f1f2ae84SBarry Smith    potentially 4 to 8 MPI ranks depending on the linear system being solved, solver algorithm, and the hardware.
573f1f2ae84SBarry Smith 
574f1f2ae84SBarry Smith    It can be particularly useful for user OpenMP code or potentially user GPU code.
575f1f2ae84SBarry Smith 
576f1f2ae84SBarry Smith    When the program is running with a single MPI rank then this directly uses the provided matrix and right hand side (still in a KSP with the options prefix of -mpi)
577f1f2ae84SBarry Smith    and does not need to distribute the matrix and vector to the various MPI ranks; thus it incurs no extra overhead over just using the KSP directly.
578f1f2ae84SBarry Smith 
579f1f2ae84SBarry Smith .seealso: `KSPCreate()`, `KSPSetType()`, `KSPType`, `KSP`, `PC`, `PCMPIServerBegin()`, `PCMPIServerEnd()`
580f1f2ae84SBarry Smith 
581f1f2ae84SBarry Smith M*/
5829371c9d4SSatish Balay PETSC_EXTERN PetscErrorCode PCCreate_MPI(PC pc) {
583f1f2ae84SBarry Smith   PC_MPI *km;
584f1f2ae84SBarry Smith 
585f1f2ae84SBarry Smith   PetscFunctionBegin;
586f1f2ae84SBarry Smith   PetscCall(PetscNewLog(pc, &km));
587f1f2ae84SBarry Smith   pc->data = (void *)km;
588f1f2ae84SBarry Smith 
589f1f2ae84SBarry Smith   km->mincntperrank = 10000;
590f1f2ae84SBarry Smith 
591f1f2ae84SBarry Smith   pc->ops->setup          = PCSetUp_MPI;
592f1f2ae84SBarry Smith   pc->ops->apply          = PCApply_MPI;
593f1f2ae84SBarry Smith   pc->ops->destroy        = PCDestroy_MPI;
594f1f2ae84SBarry Smith   pc->ops->view           = PCView_MPI;
595f1f2ae84SBarry Smith   pc->ops->setfromoptions = PCSetFromOptions_MPI;
596f1f2ae84SBarry Smith   PetscFunctionReturn(0);
597f1f2ae84SBarry Smith }
598