xref: /petsc/src/ksp/pc/impls/mpi/pcmpi.c (revision 49abdd8a111d9c2ef7fc48ade253ef64e07f9b37)
1f1f2ae84SBarry Smith /*
2f1f2ae84SBarry Smith     This file creates an MPI parallel KSP from a sequential PC that lives on MPI rank 0.
3f1f2ae84SBarry Smith     It is intended to allow using PETSc MPI parallel linear solvers from non-MPI codes.
4f1f2ae84SBarry Smith 
5dd8e379bSPierre Jolivet     That program may use OpenMP to compute the right-hand side and matrix for the linear system
6f1f2ae84SBarry Smith 
7f1f2ae84SBarry Smith     The code uses MPI_COMM_WORLD below but maybe it should be PETSC_COMM_WORLD
8f1f2ae84SBarry Smith 
9f1f2ae84SBarry Smith     The resulting KSP and PC can only be controlled via the options database, though some common commands
10f1f2ae84SBarry Smith     could be passed through the server.
11f1f2ae84SBarry Smith 
12f1f2ae84SBarry Smith */
139f0612e4SBarry Smith #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/
14f1f2ae84SBarry Smith #include <petsc/private/kspimpl.h>
15789afff4SPierre Jolivet #include <petscts.h>
16789afff4SPierre Jolivet #include <petsctao.h>
179f0612e4SBarry Smith #if defined(PETSC_HAVE_PTHREAD_MUTEX)
189f0612e4SBarry Smith   #include <pthread.h>
199f0612e4SBarry Smith #endif
20f1f2ae84SBarry Smith 
21f1f2ae84SBarry Smith #define PC_MPI_MAX_RANKS  256
22f1f2ae84SBarry Smith #define PC_MPI_COMM_WORLD MPI_COMM_WORLD
23f1f2ae84SBarry Smith 
24f1f2ae84SBarry Smith typedef struct {
259f0612e4SBarry Smith   KSP         ksps[PC_MPI_MAX_RANKS];                               /* The addresses of the MPI parallel KSP on each process, NULL when not on a process. */
26f1f2ae84SBarry Smith   PetscMPIInt sendcount[PC_MPI_MAX_RANKS], displ[PC_MPI_MAX_RANKS]; /* For scatter/gather of rhs/solution */
27f1f2ae84SBarry Smith   PetscMPIInt NZ[PC_MPI_MAX_RANKS], NZdispl[PC_MPI_MAX_RANKS];      /* For scatter of nonzero values in matrix (and nonzero column indices initially */
289f0612e4SBarry Smith   PetscInt    mincntperrank;                                        /* minimum number of desired matrix rows per active rank in MPI parallel KSP solve */
299f0612e4SBarry Smith   PetscBool   alwaysuseserver;                                      /* for debugging use the server infrastructure even if only one MPI process is used for the solve */
30f1f2ae84SBarry Smith } PC_MPI;
31f1f2ae84SBarry Smith 
329371c9d4SSatish Balay typedef enum {
339371c9d4SSatish Balay   PCMPI_EXIT, /* exit the PC server loop, means the controlling sequential program is done */
34f1f2ae84SBarry Smith   PCMPI_CREATE,
35f1f2ae84SBarry Smith   PCMPI_SET_MAT,           /* set original matrix (or one with different nonzero pattern) */
36f1f2ae84SBarry Smith   PCMPI_UPDATE_MAT_VALUES, /* update current matrix with new nonzero values */
37f1f2ae84SBarry Smith   PCMPI_SOLVE,
38f1f2ae84SBarry Smith   PCMPI_VIEW,
399f0612e4SBarry Smith   PCMPI_DESTROY /* destroy a PC that is no longer needed */
40f1f2ae84SBarry Smith } PCMPICommand;
41f1f2ae84SBarry Smith 
42f1f2ae84SBarry Smith static MPI_Comm      PCMPIComms[PC_MPI_MAX_RANKS];
43f1f2ae84SBarry Smith static PetscBool     PCMPICommSet = PETSC_FALSE;
44f1f2ae84SBarry Smith static PetscInt      PCMPISolveCounts[PC_MPI_MAX_RANKS], PCMPIKSPCounts[PC_MPI_MAX_RANKS], PCMPIMatCounts[PC_MPI_MAX_RANKS], PCMPISolveCountsSeq = 0, PCMPIKSPCountsSeq = 0;
455316cbedSBarry Smith static PetscInt      PCMPIIterations[PC_MPI_MAX_RANKS], PCMPISizes[PC_MPI_MAX_RANKS], PCMPIIterationsSeq = 0, PCMPISizesSeq = 0;
469f0612e4SBarry Smith static PetscLogEvent EventServerDist, EventServerDistMPI;
479f0612e4SBarry Smith #if defined(PETSC_HAVE_PTHREAD_MUTEX)
489f0612e4SBarry Smith static pthread_mutex_t *PCMPIServerLocks;
499f0612e4SBarry Smith #else
509f0612e4SBarry Smith static void *PCMPIServerLocks;
519f0612e4SBarry Smith #endif
52f1f2ae84SBarry Smith 
53d71ae5a4SJacob Faibussowitsch static PetscErrorCode PCMPICommsCreate(void)
54d71ae5a4SJacob Faibussowitsch {
55f1f2ae84SBarry Smith   MPI_Comm    comm = PC_MPI_COMM_WORLD;
56f1f2ae84SBarry Smith   PetscMPIInt size, rank, i;
57f1f2ae84SBarry Smith 
58f1f2ae84SBarry Smith   PetscFunctionBegin;
59f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_size(comm, &size));
60f1f2ae84SBarry Smith   PetscCheck(size <= PC_MPI_MAX_RANKS, PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for using more than PC_MPI_MAX_RANKS MPI ranks in an MPI linear solver server solve");
61f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_rank(comm, &rank));
62f1f2ae84SBarry Smith   /* comm for size 1 is useful only for debugging */
63f1f2ae84SBarry Smith   for (i = 0; i < size; i++) {
64f1f2ae84SBarry Smith     PetscMPIInt color = rank < i + 1 ? 0 : MPI_UNDEFINED;
65f1f2ae84SBarry Smith     PetscCallMPI(MPI_Comm_split(comm, color, 0, &PCMPIComms[i]));
66f1f2ae84SBarry Smith     PCMPISolveCounts[i] = 0;
67f1f2ae84SBarry Smith     PCMPIKSPCounts[i]   = 0;
685316cbedSBarry Smith     PCMPIIterations[i]  = 0;
695316cbedSBarry Smith     PCMPISizes[i]       = 0;
70f1f2ae84SBarry Smith   }
71f1f2ae84SBarry Smith   PCMPICommSet = PETSC_TRUE;
723ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
73f1f2ae84SBarry Smith }
74f1f2ae84SBarry Smith 
759f0612e4SBarry Smith static PetscErrorCode PCMPICommsDestroy(void)
76d71ae5a4SJacob Faibussowitsch {
77f1f2ae84SBarry Smith   MPI_Comm    comm = PC_MPI_COMM_WORLD;
78f1f2ae84SBarry Smith   PetscMPIInt size, rank, i;
79f1f2ae84SBarry Smith 
80f1f2ae84SBarry Smith   PetscFunctionBegin;
813ba16761SJacob Faibussowitsch   if (!PCMPICommSet) PetscFunctionReturn(PETSC_SUCCESS);
82f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_size(comm, &size));
83f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_rank(comm, &rank));
84f1f2ae84SBarry Smith   for (i = 0; i < size; i++) {
85f1f2ae84SBarry Smith     if (PCMPIComms[i] != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_free(&PCMPIComms[i]));
86f1f2ae84SBarry Smith   }
87f1f2ae84SBarry Smith   PCMPICommSet = PETSC_FALSE;
883ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
89f1f2ae84SBarry Smith }
90f1f2ae84SBarry Smith 
91d71ae5a4SJacob Faibussowitsch static PetscErrorCode PCMPICreate(PC pc)
92d71ae5a4SJacob Faibussowitsch {
93f1f2ae84SBarry Smith   PC_MPI     *km   = pc ? (PC_MPI *)pc->data : NULL;
94f1f2ae84SBarry Smith   MPI_Comm    comm = PC_MPI_COMM_WORLD;
95f1f2ae84SBarry Smith   KSP         ksp;
96f1f2ae84SBarry Smith   PetscInt    N[2], mincntperrank = 0;
97f1f2ae84SBarry Smith   PetscMPIInt size;
98f1f2ae84SBarry Smith   Mat         sA;
993821be0aSBarry Smith   char       *cprefix = NULL;
100f1f2ae84SBarry Smith   PetscMPIInt len     = 0;
101f1f2ae84SBarry Smith 
102f1f2ae84SBarry Smith   PetscFunctionBegin;
1039f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_TRUE;
104f1f2ae84SBarry Smith   if (!PCMPICommSet) PetscCall(PCMPICommsCreate());
105f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_size(comm, &size));
106f1f2ae84SBarry Smith   if (pc) {
107f1f2ae84SBarry Smith     if (size == 1) PetscCall(PetscPrintf(PETSC_COMM_SELF, "Warning: Running KSP type of MPI on a one rank MPI run, this will be less efficient then not using this type\n"));
108f1f2ae84SBarry Smith     PetscCall(PCGetOperators(pc, &sA, &sA));
109f1f2ae84SBarry Smith     PetscCall(MatGetSize(sA, &N[0], &N[1]));
110f1f2ae84SBarry Smith   }
111f1f2ae84SBarry Smith   PetscCallMPI(MPI_Bcast(N, 2, MPIU_INT, 0, comm));
112f1f2ae84SBarry Smith 
113f1f2ae84SBarry Smith   /* choose a suitable sized MPI_Comm for the problem to be solved on */
114f1f2ae84SBarry Smith   if (km) mincntperrank = km->mincntperrank;
115f1f2ae84SBarry Smith   PetscCallMPI(MPI_Bcast(&mincntperrank, 1, MPI_INT, 0, comm));
116f1f2ae84SBarry Smith   comm = PCMPIComms[PetscMin(size, PetscMax(1, N[0] / mincntperrank)) - 1];
117f1f2ae84SBarry Smith   if (comm == MPI_COMM_NULL) {
118f1f2ae84SBarry Smith     ksp                = NULL;
1199f0612e4SBarry Smith     PCMPIServerInSolve = PETSC_FALSE;
1203ba16761SJacob Faibussowitsch     PetscFunctionReturn(PETSC_SUCCESS);
121f1f2ae84SBarry Smith   }
1220316ec64SBarry Smith   PetscCall(PetscLogStagePush(PCMPIStage));
123f1f2ae84SBarry Smith   PetscCall(KSPCreate(comm, &ksp));
1243821be0aSBarry Smith   PetscCall(KSPSetNestLevel(ksp, 1));
1253821be0aSBarry Smith   PetscCall(PetscObjectSetTabLevel((PetscObject)ksp, 1));
1260316ec64SBarry Smith   PetscCall(PetscLogStagePop());
127f1f2ae84SBarry Smith   PetscCallMPI(MPI_Gather(&ksp, 1, MPI_AINT, pc ? km->ksps : NULL, 1, MPI_AINT, 0, comm));
128f1f2ae84SBarry Smith   if (pc) {
129f1f2ae84SBarry Smith     size_t      slen;
1303821be0aSBarry Smith     const char *prefix = NULL;
131f9818f3cSJose E. Roman     char       *found  = NULL;
132f1f2ae84SBarry Smith 
133f1f2ae84SBarry Smith     PetscCallMPI(MPI_Comm_size(comm, &size));
134dad3da8eSBarry Smith     PCMPIKSPCounts[size - 1]++;
1353821be0aSBarry Smith     /* Created KSP gets prefix of PC minus the mpi_linear_solver_server_ portion */
1363821be0aSBarry Smith     PetscCall(PCGetOptionsPrefix(pc, &prefix));
1373821be0aSBarry Smith     PetscCheck(prefix, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PCMPI missing required prefix");
1383821be0aSBarry Smith     PetscCall(PetscStrallocpy(prefix, &cprefix));
1393821be0aSBarry Smith     PetscCall(PetscStrstr(cprefix, "mpi_linear_solver_server_", &found));
1403821be0aSBarry Smith     PetscCheck(found, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PCMPI missing mpi_linear_solver_server_ portion of prefix");
1413821be0aSBarry Smith     *found = 0;
1423821be0aSBarry Smith     PetscCall(PetscStrlen(cprefix, &slen));
143f1f2ae84SBarry Smith     len = (PetscMPIInt)slen;
144f1f2ae84SBarry Smith   }
145f1f2ae84SBarry Smith   PetscCallMPI(MPI_Bcast(&len, 1, MPI_INT, 0, comm));
146f1f2ae84SBarry Smith   if (len) {
1473821be0aSBarry Smith     if (!pc) PetscCall(PetscMalloc1(len + 1, &cprefix));
1483821be0aSBarry Smith     PetscCallMPI(MPI_Bcast(cprefix, len + 1, MPI_CHAR, 0, comm));
1493821be0aSBarry Smith     PetscCall(KSPSetOptionsPrefix(ksp, cprefix));
150f1f2ae84SBarry Smith   }
1513821be0aSBarry Smith   PetscCall(PetscFree(cprefix));
1529f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_FALSE;
1533ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
154f1f2ae84SBarry Smith }
155f1f2ae84SBarry Smith 
156d71ae5a4SJacob Faibussowitsch static PetscErrorCode PCMPISetMat(PC pc)
157d71ae5a4SJacob Faibussowitsch {
158f1f2ae84SBarry Smith   PC_MPI            *km = pc ? (PC_MPI *)pc->data : NULL;
159f1f2ae84SBarry Smith   Mat                A;
1609f0612e4SBarry Smith   PetscInt           m, n, j, bs;
161f1f2ae84SBarry Smith   Mat                sA;
162f1f2ae84SBarry Smith   MPI_Comm           comm = PC_MPI_COMM_WORLD;
163f1f2ae84SBarry Smith   KSP                ksp;
164f1f2ae84SBarry Smith   PetscLayout        layout;
1659f0612e4SBarry Smith   const PetscInt    *IA = NULL, *JA = NULL, *ia, *ja;
166f1f2ae84SBarry Smith   const PetscInt    *range;
167f1f2ae84SBarry Smith   PetscMPIInt       *NZ = NULL, sendcounti[PC_MPI_MAX_RANKS], displi[PC_MPI_MAX_RANKS], *NZdispl = NULL, nz, size, i;
1689f0612e4SBarry Smith   const PetscScalar *a                = NULL, *sa;
1699f0612e4SBarry Smith   PetscInt           matproperties[8] = {0}, rstart, rend;
1703821be0aSBarry Smith   char              *cprefix;
171f1f2ae84SBarry Smith 
172f1f2ae84SBarry Smith   PetscFunctionBegin;
173f1f2ae84SBarry Smith   PetscCallMPI(MPI_Scatter(pc ? km->ksps : NULL, 1, MPI_AINT, &ksp, 1, MPI_AINT, 0, comm));
1743ba16761SJacob Faibussowitsch   if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1759f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_TRUE;
17645682376SBarry Smith   PetscCall(PetscLogEventBegin(EventServerDist, NULL, NULL, NULL, NULL));
177f1f2ae84SBarry Smith   PetscCall(PetscObjectGetComm((PetscObject)ksp, &comm));
178f1f2ae84SBarry Smith   if (pc) {
17968a21331SBarry Smith     PetscBool   isset, issymmetric, ishermitian, isspd, isstructurallysymmetric;
1803821be0aSBarry Smith     const char *prefix;
1813821be0aSBarry Smith     size_t      clen;
18268a21331SBarry Smith 
183f1f2ae84SBarry Smith     PetscCallMPI(MPI_Comm_size(comm, &size));
184dad3da8eSBarry Smith     PCMPIMatCounts[size - 1]++;
185f1f2ae84SBarry Smith     PetscCall(PCGetOperators(pc, &sA, &sA));
18668a21331SBarry Smith     PetscCall(MatGetSize(sA, &matproperties[0], &matproperties[1]));
187dd0d27b1SBarry Smith     PetscCall(MatGetBlockSize(sA, &bs));
18868a21331SBarry Smith     matproperties[2] = bs;
18968a21331SBarry Smith     PetscCall(MatIsSymmetricKnown(sA, &isset, &issymmetric));
19068a21331SBarry Smith     matproperties[3] = !isset ? 0 : (issymmetric ? 1 : 2);
19168a21331SBarry Smith     PetscCall(MatIsHermitianKnown(sA, &isset, &ishermitian));
19268a21331SBarry Smith     matproperties[4] = !isset ? 0 : (ishermitian ? 1 : 2);
19368a21331SBarry Smith     PetscCall(MatIsSPDKnown(sA, &isset, &isspd));
19468a21331SBarry Smith     matproperties[5] = !isset ? 0 : (isspd ? 1 : 2);
19568a21331SBarry Smith     PetscCall(MatIsStructurallySymmetricKnown(sA, &isset, &isstructurallysymmetric));
19668a21331SBarry Smith     matproperties[6] = !isset ? 0 : (isstructurallysymmetric ? 1 : 2);
1977a99bfcaSBarry Smith     /* Created Mat gets prefix of input Mat PLUS the mpi_linear_solver_server_ portion */
1983821be0aSBarry Smith     PetscCall(MatGetOptionsPrefix(sA, &prefix));
1993821be0aSBarry Smith     PetscCall(PetscStrallocpy(prefix, &cprefix));
2003821be0aSBarry Smith     PetscCall(PetscStrlen(cprefix, &clen));
2013821be0aSBarry Smith     matproperties[7] = (PetscInt)clen;
202f1f2ae84SBarry Smith   }
2033821be0aSBarry Smith   PetscCallMPI(MPI_Bcast(matproperties, PETSC_STATIC_ARRAY_LENGTH(matproperties), MPIU_INT, 0, comm));
204f1f2ae84SBarry Smith 
20568a21331SBarry Smith   /* determine ownership ranges of matrix columns */
206f1f2ae84SBarry Smith   PetscCall(PetscLayoutCreate(comm, &layout));
20768a21331SBarry Smith   PetscCall(PetscLayoutSetBlockSize(layout, matproperties[2]));
20868a21331SBarry Smith   PetscCall(PetscLayoutSetSize(layout, matproperties[1]));
209f1f2ae84SBarry Smith   PetscCall(PetscLayoutSetUp(layout));
210f1f2ae84SBarry Smith   PetscCall(PetscLayoutGetLocalSize(layout, &n));
21168a21331SBarry Smith   PetscCall(PetscLayoutDestroy(&layout));
21268a21331SBarry Smith 
21368a21331SBarry Smith   /* determine ownership ranges of matrix rows */
21468a21331SBarry Smith   PetscCall(PetscLayoutCreate(comm, &layout));
21568a21331SBarry Smith   PetscCall(PetscLayoutSetBlockSize(layout, matproperties[2]));
21668a21331SBarry Smith   PetscCall(PetscLayoutSetSize(layout, matproperties[0]));
21768a21331SBarry Smith   PetscCall(PetscLayoutSetUp(layout));
21868a21331SBarry Smith   PetscCall(PetscLayoutGetLocalSize(layout, &m));
2199f0612e4SBarry Smith   PetscCall(PetscLayoutGetRange(layout, &rstart, &rend));
220f1f2ae84SBarry Smith 
2219f0612e4SBarry Smith   PetscCall(PetscLogEventBegin(EventServerDistMPI, NULL, NULL, NULL, NULL));
222f1f2ae84SBarry Smith   /* copy over the matrix nonzero structure and values */
223f1f2ae84SBarry Smith   if (pc) {
2249f0612e4SBarry Smith     PetscCall(MatGetRowIJ(sA, 0, PETSC_FALSE, PETSC_FALSE, NULL, &IA, &JA, NULL));
2259f0612e4SBarry Smith     if (!PCMPIServerUseShmget) {
226f1f2ae84SBarry Smith       NZ      = km->NZ;
227f1f2ae84SBarry Smith       NZdispl = km->NZdispl;
228f1f2ae84SBarry Smith       PetscCall(PetscLayoutGetRanges(layout, &range));
229f1f2ae84SBarry Smith       for (i = 0; i < size; i++) {
230f1f2ae84SBarry Smith         sendcounti[i] = (PetscMPIInt)(1 + range[i + 1] - range[i]);
231f1f2ae84SBarry Smith         NZ[i]         = (PetscMPIInt)(IA[range[i + 1]] - IA[range[i]]);
232f1f2ae84SBarry Smith       }
233f1f2ae84SBarry Smith       displi[0]  = 0;
234f1f2ae84SBarry Smith       NZdispl[0] = 0;
235f1f2ae84SBarry Smith       for (j = 1; j < size; j++) {
236f1f2ae84SBarry Smith         displi[j]  = displi[j - 1] + sendcounti[j - 1] - 1;
237f1f2ae84SBarry Smith         NZdispl[j] = NZdispl[j - 1] + NZ[j - 1];
238f1f2ae84SBarry Smith       }
2399f0612e4SBarry Smith     }
240f1f2ae84SBarry Smith     PetscCall(MatSeqAIJGetArrayRead(sA, &sa));
241f1f2ae84SBarry Smith   }
242f1f2ae84SBarry Smith   PetscCall(PetscLayoutDestroy(&layout));
243f1f2ae84SBarry Smith 
2443821be0aSBarry Smith   PetscCall(MatCreate(comm, &A));
2453821be0aSBarry Smith   if (matproperties[7] > 0) {
2463821be0aSBarry Smith     if (!pc) PetscCall(PetscMalloc1(matproperties[7] + 1, &cprefix));
2476497c311SBarry Smith     PetscCallMPI(MPI_Bcast(cprefix, (PetscMPIInt)(matproperties[7] + 1), MPI_CHAR, 0, comm));
2483821be0aSBarry Smith     PetscCall(MatSetOptionsPrefix(A, cprefix));
2493821be0aSBarry Smith     PetscCall(PetscFree(cprefix));
2503821be0aSBarry Smith   }
2513821be0aSBarry Smith   PetscCall(MatAppendOptionsPrefix(A, "mpi_linear_solver_server_"));
2523821be0aSBarry Smith   PetscCall(MatSetSizes(A, m, n, matproperties[0], matproperties[1]));
2533821be0aSBarry Smith   PetscCall(MatSetType(A, MATMPIAIJ));
2549f0612e4SBarry Smith 
2559f0612e4SBarry Smith   if (!PCMPIServerUseShmget) {
2566497c311SBarry Smith     PetscMPIInt in;
2579f0612e4SBarry Smith     PetscCallMPI(MPI_Scatter(NZ, 1, MPI_INT, &nz, 1, MPI_INT, 0, comm));
2589f0612e4SBarry Smith     PetscCall(PetscMalloc3(n + 1, &ia, nz, &ja, nz, &a));
2596497c311SBarry Smith     PetscCall(PetscMPIIntCast(n, &in));
2606497c311SBarry Smith     PetscCallMPI(MPI_Scatterv(IA, sendcounti, displi, MPIU_INT, (void *)ia, in + 1, MPIU_INT, 0, comm));
2619f0612e4SBarry Smith     PetscCallMPI(MPI_Scatterv(JA, NZ, NZdispl, MPIU_INT, (void *)ja, nz, MPIU_INT, 0, comm));
2629f0612e4SBarry Smith     PetscCallMPI(MPI_Scatterv(sa, NZ, NZdispl, MPIU_SCALAR, (void *)a, nz, MPIU_SCALAR, 0, comm));
2639f0612e4SBarry Smith   } else {
2649f0612e4SBarry Smith     const void           *addr[3] = {(const void **)IA, (const void **)JA, (const void **)sa};
2659f0612e4SBarry Smith     PCMPIServerAddresses *addresses;
2669f0612e4SBarry Smith 
2679f0612e4SBarry Smith     PetscCall(PetscNew(&addresses));
2689f0612e4SBarry Smith     addresses->n = 3;
2699f0612e4SBarry Smith     PetscCall(PetscShmgetMapAddresses(comm, addresses->n, addr, addresses->addr));
2709f0612e4SBarry Smith     ia = rstart + (PetscInt *)addresses->addr[0];
2719f0612e4SBarry Smith     ja = ia[0] + (PetscInt *)addresses->addr[1];
2729f0612e4SBarry Smith     a  = ia[0] + (PetscScalar *)addresses->addr[2];
273*49abdd8aSBarry Smith     PetscCall(PetscObjectContainerCompose((PetscObject)A, "PCMPIServerAddresses", (void *)addresses, PCMPIServerAddressesDestroy));
2749f0612e4SBarry Smith   }
2759f0612e4SBarry Smith 
2769f0612e4SBarry Smith   if (pc) {
2779f0612e4SBarry Smith     PetscCall(MatSeqAIJRestoreArrayRead(sA, &sa));
2789f0612e4SBarry Smith     PetscCall(MatRestoreRowIJ(sA, 0, PETSC_FALSE, PETSC_FALSE, NULL, &IA, &JA, NULL));
2799f0612e4SBarry Smith   }
2809f0612e4SBarry Smith   PetscCall(PetscLogEventEnd(EventServerDistMPI, NULL, NULL, NULL, NULL));
2819f0612e4SBarry Smith 
2829f0612e4SBarry Smith   PetscCall(PetscLogStagePush(PCMPIStage));
2833821be0aSBarry Smith   PetscCall(MatMPIAIJSetPreallocationCSR(A, ia, ja, a));
28468a21331SBarry Smith   PetscCall(MatSetBlockSize(A, matproperties[2]));
2853821be0aSBarry Smith 
28668a21331SBarry Smith   if (matproperties[3]) PetscCall(MatSetOption(A, MAT_SYMMETRIC, matproperties[3] == 1 ? PETSC_TRUE : PETSC_FALSE));
28768a21331SBarry Smith   if (matproperties[4]) PetscCall(MatSetOption(A, MAT_HERMITIAN, matproperties[4] == 1 ? PETSC_TRUE : PETSC_FALSE));
28868a21331SBarry Smith   if (matproperties[5]) PetscCall(MatSetOption(A, MAT_SPD, matproperties[5] == 1 ? PETSC_TRUE : PETSC_FALSE));
28968a21331SBarry Smith   if (matproperties[6]) PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, matproperties[6] == 1 ? PETSC_TRUE : PETSC_FALSE));
290f1f2ae84SBarry Smith 
2919f0612e4SBarry Smith   if (!PCMPIServerUseShmget) PetscCall(PetscFree3(ia, ja, a));
292f1f2ae84SBarry Smith   PetscCall(KSPSetOperators(ksp, A, A));
293f1f2ae84SBarry Smith   if (!ksp->vec_sol) PetscCall(MatCreateVecs(A, &ksp->vec_sol, &ksp->vec_rhs));
2940316ec64SBarry Smith   PetscCall(PetscLogStagePop());
2959f0612e4SBarry Smith   if (pc && !PCMPIServerUseShmget) { /* needed for scatterv/gatherv of rhs and solution */
296f1f2ae84SBarry Smith     const PetscInt *range;
297f1f2ae84SBarry Smith 
298f1f2ae84SBarry Smith     PetscCall(VecGetOwnershipRanges(ksp->vec_sol, &range));
299f1f2ae84SBarry Smith     for (i = 0; i < size; i++) {
300f1f2ae84SBarry Smith       km->sendcount[i] = (PetscMPIInt)(range[i + 1] - range[i]);
301f1f2ae84SBarry Smith       km->displ[i]     = (PetscMPIInt)range[i];
302f1f2ae84SBarry Smith     }
303f1f2ae84SBarry Smith   }
304f1f2ae84SBarry Smith   PetscCall(MatDestroy(&A));
30545682376SBarry Smith   PetscCall(PetscLogEventEnd(EventServerDist, NULL, NULL, NULL, NULL));
306f1f2ae84SBarry Smith   PetscCall(KSPSetFromOptions(ksp));
3079f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_FALSE;
3083ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
309f1f2ae84SBarry Smith }
310f1f2ae84SBarry Smith 
311d71ae5a4SJacob Faibussowitsch static PetscErrorCode PCMPIUpdateMatValues(PC pc)
312d71ae5a4SJacob Faibussowitsch {
313f1f2ae84SBarry Smith   PC_MPI            *km = pc ? (PC_MPI *)pc->data : NULL;
314f1f2ae84SBarry Smith   KSP                ksp;
315f1f2ae84SBarry Smith   Mat                sA, A;
316f1f2ae84SBarry Smith   MPI_Comm           comm = PC_MPI_COMM_WORLD;
3179f0612e4SBarry Smith   const PetscInt    *ia, *IA;
3189f0612e4SBarry Smith   const PetscScalar *a;
319f1f2ae84SBarry Smith   PetscCount         nz;
320f1f2ae84SBarry Smith   const PetscScalar *sa = NULL;
321dad3da8eSBarry Smith   PetscMPIInt        size;
3229f0612e4SBarry Smith   PetscInt           rstart, matproperties[4] = {0, 0, 0, 0};
323f1f2ae84SBarry Smith 
324f1f2ae84SBarry Smith   PetscFunctionBegin;
325f1f2ae84SBarry Smith   if (pc) {
326f1f2ae84SBarry Smith     PetscCall(PCGetOperators(pc, &sA, &sA));
327f1f2ae84SBarry Smith     PetscCall(MatSeqAIJGetArrayRead(sA, &sa));
3289f0612e4SBarry Smith     PetscCall(MatGetRowIJ(sA, 0, PETSC_FALSE, PETSC_FALSE, NULL, &IA, NULL, NULL));
329f1f2ae84SBarry Smith   }
330f1f2ae84SBarry Smith   PetscCallMPI(MPI_Scatter(pc ? km->ksps : NULL, 1, MPI_AINT, &ksp, 1, MPI_AINT, 0, comm));
3313ba16761SJacob Faibussowitsch   if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
3329f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_TRUE;
33345682376SBarry Smith   PetscCall(PetscLogEventBegin(EventServerDist, NULL, NULL, NULL, NULL));
334f1f2ae84SBarry Smith   PetscCall(PetscObjectGetComm((PetscObject)ksp, &comm));
335dad3da8eSBarry Smith   PetscCallMPI(MPI_Comm_size(comm, &size));
336dad3da8eSBarry Smith   PCMPIMatCounts[size - 1]++;
337f1f2ae84SBarry Smith   PetscCall(KSPGetOperators(ksp, NULL, &A));
3389f0612e4SBarry Smith   PetscCall(PetscLogEventBegin(EventServerDistMPI, NULL, NULL, NULL, NULL));
3399f0612e4SBarry Smith   if (!PCMPIServerUseShmget) {
3406497c311SBarry Smith     PetscMPIInt mpi_nz;
3416497c311SBarry Smith 
342f1f2ae84SBarry Smith     PetscCall(MatMPIAIJGetNumberNonzeros(A, &nz));
3436497c311SBarry Smith     PetscCall(PetscMPIIntCast(nz, &mpi_nz));
344f1f2ae84SBarry Smith     PetscCall(PetscMalloc1(nz, &a));
3456497c311SBarry Smith     PetscCallMPI(MPI_Scatterv(sa, pc ? km->NZ : NULL, pc ? km->NZdispl : NULL, MPIU_SCALAR, (void *)a, mpi_nz, MPIU_SCALAR, 0, comm));
3469f0612e4SBarry Smith   } else {
3479f0612e4SBarry Smith     PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
3489f0612e4SBarry Smith     PCMPIServerAddresses *addresses;
3499f0612e4SBarry Smith     PetscCall(PetscObjectContainerQuery((PetscObject)A, "PCMPIServerAddresses", (void **)&addresses));
3509f0612e4SBarry Smith     ia = rstart + (PetscInt *)addresses->addr[0];
3519f0612e4SBarry Smith     a  = ia[0] + (PetscScalar *)addresses->addr[2];
3529f0612e4SBarry Smith   }
3539f0612e4SBarry Smith   PetscCall(PetscLogEventEnd(EventServerDistMPI, NULL, NULL, NULL, NULL));
35468a21331SBarry Smith   if (pc) {
35568a21331SBarry Smith     PetscBool isset, issymmetric, ishermitian, isspd, isstructurallysymmetric;
35668a21331SBarry Smith 
35768a21331SBarry Smith     PetscCall(MatSeqAIJRestoreArrayRead(sA, &sa));
3589f0612e4SBarry Smith     PetscCall(MatRestoreRowIJ(sA, 0, PETSC_FALSE, PETSC_FALSE, NULL, &IA, NULL, NULL));
35968a21331SBarry Smith 
36068a21331SBarry Smith     PetscCall(MatIsSymmetricKnown(sA, &isset, &issymmetric));
36168a21331SBarry Smith     matproperties[0] = !isset ? 0 : (issymmetric ? 1 : 2);
36268a21331SBarry Smith     PetscCall(MatIsHermitianKnown(sA, &isset, &ishermitian));
36368a21331SBarry Smith     matproperties[1] = !isset ? 0 : (ishermitian ? 1 : 2);
36468a21331SBarry Smith     PetscCall(MatIsSPDKnown(sA, &isset, &isspd));
36568a21331SBarry Smith     matproperties[2] = !isset ? 0 : (isspd ? 1 : 2);
36668a21331SBarry Smith     PetscCall(MatIsStructurallySymmetricKnown(sA, &isset, &isstructurallysymmetric));
36768a21331SBarry Smith     matproperties[3] = !isset ? 0 : (isstructurallysymmetric ? 1 : 2);
36868a21331SBarry Smith   }
369f1f2ae84SBarry Smith   PetscCall(MatUpdateMPIAIJWithArray(A, a));
3709f0612e4SBarry Smith   if (!PCMPIServerUseShmget) PetscCall(PetscFree(a));
37168a21331SBarry Smith   PetscCallMPI(MPI_Bcast(matproperties, 4, MPIU_INT, 0, comm));
37268a21331SBarry Smith   /* if any of these properties was previously set and is now not set this will result in incorrect properties in A since there is no way to unset a property */
37368a21331SBarry Smith   if (matproperties[0]) PetscCall(MatSetOption(A, MAT_SYMMETRIC, matproperties[0] == 1 ? PETSC_TRUE : PETSC_FALSE));
37468a21331SBarry Smith   if (matproperties[1]) PetscCall(MatSetOption(A, MAT_HERMITIAN, matproperties[1] == 1 ? PETSC_TRUE : PETSC_FALSE));
37568a21331SBarry Smith   if (matproperties[2]) PetscCall(MatSetOption(A, MAT_SPD, matproperties[2] == 1 ? PETSC_TRUE : PETSC_FALSE));
37668a21331SBarry Smith   if (matproperties[3]) PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, matproperties[3] == 1 ? PETSC_TRUE : PETSC_FALSE));
37745682376SBarry Smith   PetscCall(PetscLogEventEnd(EventServerDist, NULL, NULL, NULL, NULL));
3789f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_FALSE;
3793ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
380f1f2ae84SBarry Smith }
381f1f2ae84SBarry Smith 
382d71ae5a4SJacob Faibussowitsch static PetscErrorCode PCMPISolve(PC pc, Vec B, Vec X)
383d71ae5a4SJacob Faibussowitsch {
384f1f2ae84SBarry Smith   PC_MPI            *km = pc ? (PC_MPI *)pc->data : NULL;
385f1f2ae84SBarry Smith   KSP                ksp;
386f1f2ae84SBarry Smith   MPI_Comm           comm = PC_MPI_COMM_WORLD;
387f1f2ae84SBarry Smith   const PetscScalar *sb   = NULL, *x;
388f1f2ae84SBarry Smith   PetscScalar       *b, *sx = NULL;
3895316cbedSBarry Smith   PetscInt           its, n;
3905316cbedSBarry Smith   PetscMPIInt        size;
3919f0612e4SBarry Smith   void              *addr[2];
392f1f2ae84SBarry Smith 
393f1f2ae84SBarry Smith   PetscFunctionBegin;
394f1f2ae84SBarry Smith   PetscCallMPI(MPI_Scatter(pc ? km->ksps : &ksp, 1, MPI_AINT, &ksp, 1, MPI_AINT, 0, comm));
3953ba16761SJacob Faibussowitsch   if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
3969f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_TRUE;
39745682376SBarry Smith   PetscCall(PetscLogEventBegin(EventServerDist, NULL, NULL, NULL, NULL));
398f1f2ae84SBarry Smith   PetscCall(PetscObjectGetComm((PetscObject)ksp, &comm));
399f1f2ae84SBarry Smith 
400f1f2ae84SBarry Smith   /* scatterv rhs */
401dad3da8eSBarry Smith   PetscCallMPI(MPI_Comm_size(comm, &size));
4025316cbedSBarry Smith   if (pc) {
4035316cbedSBarry Smith     PetscInt N;
4045316cbedSBarry Smith 
405dad3da8eSBarry Smith     PCMPISolveCounts[size - 1]++;
40668a21331SBarry Smith     PetscCall(MatGetSize(pc->pmat, &N, NULL));
4075316cbedSBarry Smith     PCMPISizes[size - 1] += N;
408f1f2ae84SBarry Smith   }
409f1f2ae84SBarry Smith   PetscCall(VecGetLocalSize(ksp->vec_rhs, &n));
4109f0612e4SBarry Smith   PetscCall(PetscLogEventBegin(EventServerDistMPI, NULL, NULL, NULL, NULL));
4119f0612e4SBarry Smith   if (!PCMPIServerUseShmget) {
4126497c311SBarry Smith     PetscMPIInt in;
4136497c311SBarry Smith 
414f1f2ae84SBarry Smith     PetscCall(VecGetArray(ksp->vec_rhs, &b));
4159f0612e4SBarry Smith     if (pc) PetscCall(VecGetArrayRead(B, &sb));
4166497c311SBarry Smith     PetscCall(PetscMPIIntCast(n, &in));
4176497c311SBarry Smith     PetscCallMPI(MPI_Scatterv(sb, pc ? km->sendcount : NULL, pc ? km->displ : NULL, MPIU_SCALAR, b, in, MPIU_SCALAR, 0, comm));
418f1f2ae84SBarry Smith     if (pc) PetscCall(VecRestoreArrayRead(B, &sb));
4199f0612e4SBarry Smith     PetscCall(VecRestoreArray(ksp->vec_rhs, &b));
4209f0612e4SBarry Smith     // TODO: scatter initial guess if needed
4219f0612e4SBarry Smith   } else {
4229f0612e4SBarry Smith     PetscInt rstart;
4239f0612e4SBarry Smith 
4249f0612e4SBarry Smith     if (pc) PetscCall(VecGetArrayRead(B, &sb));
4259f0612e4SBarry Smith     if (pc) PetscCall(VecGetArray(X, &sx));
4269f0612e4SBarry Smith     const void *inaddr[2] = {(const void **)sb, (const void **)sx};
4279f0612e4SBarry Smith     if (pc) PetscCall(VecRestoreArray(X, &sx));
4289f0612e4SBarry Smith     if (pc) PetscCall(VecRestoreArrayRead(B, &sb));
4299f0612e4SBarry Smith 
4309f0612e4SBarry Smith     PetscCall(PetscShmgetMapAddresses(comm, 2, inaddr, addr));
4319f0612e4SBarry Smith     PetscCall(VecGetOwnershipRange(ksp->vec_rhs, &rstart, NULL));
4329f0612e4SBarry Smith     PetscCall(VecPlaceArray(ksp->vec_rhs, rstart + (PetscScalar *)addr[0]));
4339f0612e4SBarry Smith     PetscCall(VecPlaceArray(ksp->vec_sol, rstart + (PetscScalar *)addr[1]));
4349f0612e4SBarry Smith   }
4359f0612e4SBarry Smith   PetscCall(PetscLogEventEnd(EventServerDistMPI, NULL, NULL, NULL, NULL));
436f1f2ae84SBarry Smith 
43745682376SBarry Smith   PetscCall(PetscLogEventEnd(EventServerDist, NULL, NULL, NULL, NULL));
4380316ec64SBarry Smith   PetscCall(PetscLogStagePush(PCMPIStage));
439f1f2ae84SBarry Smith   PetscCall(KSPSolve(ksp, NULL, NULL));
4400316ec64SBarry Smith   PetscCall(PetscLogStagePop());
44145682376SBarry Smith   PetscCall(PetscLogEventBegin(EventServerDist, NULL, NULL, NULL, NULL));
4425316cbedSBarry Smith   PetscCall(KSPGetIterationNumber(ksp, &its));
4435316cbedSBarry Smith   PCMPIIterations[size - 1] += its;
4449f0612e4SBarry Smith   // TODO: send iterations up to outer KSP
4459f0612e4SBarry Smith 
4469f0612e4SBarry Smith   if (PCMPIServerUseShmget) PetscCall(PetscShmgetUnmapAddresses(2, addr));
447f1f2ae84SBarry Smith 
448f1f2ae84SBarry Smith   /* gather solution */
4499f0612e4SBarry Smith   PetscCall(PetscLogEventBegin(EventServerDistMPI, NULL, NULL, NULL, NULL));
4509f0612e4SBarry Smith   if (!PCMPIServerUseShmget) {
4516497c311SBarry Smith     PetscMPIInt in;
4526497c311SBarry Smith 
453f1f2ae84SBarry Smith     PetscCall(VecGetArrayRead(ksp->vec_sol, &x));
454f1f2ae84SBarry Smith     if (pc) PetscCall(VecGetArray(X, &sx));
4556497c311SBarry Smith     PetscCall(PetscMPIIntCast(n, &in));
4566497c311SBarry Smith     PetscCallMPI(MPI_Gatherv(x, in, MPIU_SCALAR, sx, pc ? km->sendcount : NULL, pc ? km->displ : NULL, MPIU_SCALAR, 0, comm));
457f1f2ae84SBarry Smith     if (pc) PetscCall(VecRestoreArray(X, &sx));
458f1f2ae84SBarry Smith     PetscCall(VecRestoreArrayRead(ksp->vec_sol, &x));
4599f0612e4SBarry Smith   } else {
4609f0612e4SBarry Smith     PetscCall(VecResetArray(ksp->vec_rhs));
4619f0612e4SBarry Smith     PetscCall(VecResetArray(ksp->vec_sol));
4629f0612e4SBarry Smith   }
4639f0612e4SBarry Smith   PetscCall(PetscLogEventEnd(EventServerDistMPI, NULL, NULL, NULL, NULL));
46445682376SBarry Smith   PetscCall(PetscLogEventEnd(EventServerDist, NULL, NULL, NULL, NULL));
4659f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_FALSE;
4663ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
467f1f2ae84SBarry Smith }
468f1f2ae84SBarry Smith 
469d71ae5a4SJacob Faibussowitsch static PetscErrorCode PCMPIDestroy(PC pc)
470d71ae5a4SJacob Faibussowitsch {
471f1f2ae84SBarry Smith   PC_MPI  *km = pc ? (PC_MPI *)pc->data : NULL;
472f1f2ae84SBarry Smith   KSP      ksp;
473f1f2ae84SBarry Smith   MPI_Comm comm = PC_MPI_COMM_WORLD;
474f1f2ae84SBarry Smith 
475f1f2ae84SBarry Smith   PetscFunctionBegin;
476f1f2ae84SBarry Smith   PetscCallMPI(MPI_Scatter(pc ? km->ksps : NULL, 1, MPI_AINT, &ksp, 1, MPI_AINT, 0, comm));
4773ba16761SJacob Faibussowitsch   if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
478c7d372c4SBarry Smith   PetscCall(PetscLogStagePush(PCMPIStage));
4799f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_TRUE;
480f1f2ae84SBarry Smith   PetscCall(KSPDestroy(&ksp));
481c7d372c4SBarry Smith   PetscCall(PetscLogStagePop());
4829f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_FALSE;
4833ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
484f1f2ae84SBarry Smith }
485f1f2ae84SBarry Smith 
4869f0612e4SBarry Smith static PetscErrorCode PCMPIServerBroadcastRequest(PCMPICommand request)
4879f0612e4SBarry Smith {
4889f0612e4SBarry Smith #if defined(PETSC_HAVE_PTHREAD_MUTEX)
4899f0612e4SBarry Smith   PetscMPIInt dummy1 = 1, dummy2;
4909f0612e4SBarry Smith #endif
4919f0612e4SBarry Smith 
4929f0612e4SBarry Smith   PetscFunctionBegin;
4939f0612e4SBarry Smith #if defined(PETSC_HAVE_PTHREAD_MUTEX)
4949f0612e4SBarry Smith   if (PCMPIServerUseShmget) {
4959f0612e4SBarry Smith     for (PetscMPIInt i = 1; i < PetscGlobalSize; i++) pthread_mutex_unlock(&PCMPIServerLocks[i]);
4969f0612e4SBarry Smith   }
4979f0612e4SBarry Smith #endif
4989f0612e4SBarry Smith   PetscCallMPI(MPI_Bcast(&request, 1, MPIU_ENUM, 0, MPI_COMM_WORLD));
4999f0612e4SBarry Smith   /* next line ensures the sender has already taken the lock */
5009f0612e4SBarry Smith #if defined(PETSC_HAVE_PTHREAD_MUTEX)
5019f0612e4SBarry Smith   if (PCMPIServerUseShmget) {
5029f0612e4SBarry Smith     PetscCallMPI(MPI_Reduce(&dummy1, &dummy2, 1, MPI_INT, MPI_SUM, 0, PC_MPI_COMM_WORLD));
5039f0612e4SBarry Smith     for (PetscMPIInt i = 1; i < PetscGlobalSize; i++) pthread_mutex_lock(&PCMPIServerLocks[i]);
5049f0612e4SBarry Smith   }
5059f0612e4SBarry Smith #endif
5069f0612e4SBarry Smith   PetscFunctionReturn(PETSC_SUCCESS);
5079f0612e4SBarry Smith }
5083821be0aSBarry Smith 
509f1f2ae84SBarry Smith /*@C
5107a99bfcaSBarry Smith   PCMPIServerBegin - starts a server that runs on the `rank != 0` MPI processes waiting to process requests for
511f1580f4eSBarry Smith   parallel `KSP` solves and management of parallel `KSP` objects.
512f1f2ae84SBarry Smith 
5133821be0aSBarry Smith   Logically Collective on all MPI processes except rank 0
514f1f2ae84SBarry Smith 
515f1580f4eSBarry Smith   Options Database Keys:
516f1f2ae84SBarry Smith + -mpi_linear_solver_server                   - causes the PETSc program to start in MPI linear solver server mode where only the first MPI rank runs user code
5179f0612e4SBarry Smith . -mpi_linear_solver_server_view              - displays information about all the linear systems solved by the MPI linear solver server at the conclusion of the program
5189f0612e4SBarry Smith - -mpi_linear_solver_server_use_shared_memory - use shared memory when communicating matrices and vectors to server processes (default where supported)
519f1f2ae84SBarry Smith 
52020f4b53cSBarry Smith   Level: developer
52120f4b53cSBarry Smith 
522f1580f4eSBarry Smith   Note:
523f1f2ae84SBarry Smith   This is normally started automatically in `PetscInitialize()` when the option is provided
524f1f2ae84SBarry Smith 
5253821be0aSBarry Smith   See `PCMPI` for information on using the solver with a `KSP` object
5263821be0aSBarry Smith 
527f1f2ae84SBarry Smith   Developer Notes:
5283821be0aSBarry Smith   When called on MPI rank 0 this sets `PETSC_COMM_WORLD` to `PETSC_COMM_SELF` to allow a main program
529f1580f4eSBarry Smith   written with `PETSC_COMM_WORLD` to run correctly on the single rank while all the ranks
530f1580f4eSBarry Smith   (that would normally be sharing `PETSC_COMM_WORLD`) to run the solver server.
531f1f2ae84SBarry Smith 
532f1580f4eSBarry Smith   Can this be integrated into the `PetscDevice` abstraction that is currently being developed?
533f1f2ae84SBarry Smith 
5343821be0aSBarry Smith   Conceivably `PCREDISTRIBUTE` could be organized in a similar manner to simplify its usage
5353821be0aSBarry Smith 
5363821be0aSBarry Smith   This could be implemented directly at the `KSP` level instead of using the `PCMPI` wrapper object
5373821be0aSBarry Smith 
538baca6076SPierre Jolivet   The code could be extended to allow an MPI + OpenMP application to use the linear solver server concept across all shared-memory
5393821be0aSBarry Smith   nodes with a single MPI process per node for the user application but multiple MPI processes per node for the linear solver.
5403821be0aSBarry Smith 
5417a99bfcaSBarry Smith   The concept could also be extended for users's callbacks for `SNES`, `TS`, and `Tao` where the `SNESSolve()` for example, runs on
5423821be0aSBarry Smith   all MPI processes but the user callback only runs on one MPI process per node.
5433821be0aSBarry Smith 
5443821be0aSBarry Smith   PETSc could also be extended with an MPI-less API that provides access to PETSc's solvers without any reference to MPI, essentially remove
5453821be0aSBarry Smith   the `MPI_Comm` argument from PETSc calls.
5463821be0aSBarry Smith 
5473821be0aSBarry Smith .seealso: [](sec_pcmpi), `PCMPIServerEnd()`, `PCMPI`, `KSPCheckPCMPI()`
548f1f2ae84SBarry Smith @*/
549d71ae5a4SJacob Faibussowitsch PetscErrorCode PCMPIServerBegin(void)
550d71ae5a4SJacob Faibussowitsch {
551f1f2ae84SBarry Smith   PetscMPIInt rank;
552f1f2ae84SBarry Smith 
553f1f2ae84SBarry Smith   PetscFunctionBegin;
5549d3446b2SPierre Jolivet   PetscCall(PetscInfo(NULL, "Starting MPI Linear Solver Server\n"));
5555e1a0e3cSBarry Smith   if (PetscDefined(USE_SINGLE_LIBRARY)) {
5565e1a0e3cSBarry Smith     PetscCall(VecInitializePackage());
5575e1a0e3cSBarry Smith     PetscCall(MatInitializePackage());
5585e1a0e3cSBarry Smith     PetscCall(DMInitializePackage());
5595e1a0e3cSBarry Smith     PetscCall(PCInitializePackage());
5605e1a0e3cSBarry Smith     PetscCall(KSPInitializePackage());
5615e1a0e3cSBarry Smith     PetscCall(SNESInitializePackage());
5625e1a0e3cSBarry Smith     PetscCall(TSInitializePackage());
5635e1a0e3cSBarry Smith     PetscCall(TaoInitializePackage());
5645e1a0e3cSBarry Smith   }
565956255efSBarry Smith   PetscCall(PetscLogStageRegister("PCMPI", &PCMPIStage));
56645682376SBarry Smith   PetscCall(PetscLogEventRegister("ServerDist", PC_CLASSID, &EventServerDist));
5679f0612e4SBarry Smith   PetscCall(PetscLogEventRegister("ServerDistMPI", PC_CLASSID, &EventServerDistMPI));
5689f0612e4SBarry Smith 
5699f0612e4SBarry Smith   if (!PetscDefined(HAVE_SHMGET)) PCMPIServerUseShmget = PETSC_FALSE;
5709f0612e4SBarry Smith   PetscCall(PetscOptionsGetBool(NULL, NULL, "-mpi_linear_solver_server_use_shared_memory", &PCMPIServerUseShmget, NULL));
5715e1a0e3cSBarry Smith 
572f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_rank(PC_MPI_COMM_WORLD, &rank));
5739f0612e4SBarry Smith   if (PCMPIServerUseShmget) {
5749f0612e4SBarry Smith #if defined(PETSC_HAVE_PTHREAD_MUTEX)
5759f0612e4SBarry Smith     PetscMPIInt size;
5769f0612e4SBarry Smith 
5779f0612e4SBarry Smith     PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
5789f0612e4SBarry Smith     if (size > 1) {
5799f0612e4SBarry Smith       pthread_mutex_t *locks;
5809f0612e4SBarry Smith 
5819f0612e4SBarry Smith       if (rank == 0) {
5829f0612e4SBarry Smith         PCMPIServerActive = PETSC_TRUE;
5839f0612e4SBarry Smith         PetscCall(PetscShmgetAllocateArray(size, sizeof(pthread_mutex_t), (void **)&locks));
5849f0612e4SBarry Smith       }
5859f0612e4SBarry Smith       PetscCall(PetscShmgetMapAddresses(PETSC_COMM_WORLD, 1, (const void **)&locks, (void **)&PCMPIServerLocks));
5869f0612e4SBarry Smith       if (rank == 0) {
5879f0612e4SBarry Smith         pthread_mutexattr_t attr;
5889f0612e4SBarry Smith 
5899f0612e4SBarry Smith         pthread_mutexattr_init(&attr);
5909f0612e4SBarry Smith         pthread_mutexattr_setpshared(&attr, PTHREAD_PROCESS_SHARED);
5919f0612e4SBarry Smith 
5929f0612e4SBarry Smith         for (int i = 1; i < size; i++) {
5939f0612e4SBarry Smith           pthread_mutex_init(&PCMPIServerLocks[i], &attr);
5949f0612e4SBarry Smith           pthread_mutex_lock(&PCMPIServerLocks[i]);
5959f0612e4SBarry Smith         }
5969f0612e4SBarry Smith       }
5979f0612e4SBarry Smith       PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
5989f0612e4SBarry Smith     }
5999f0612e4SBarry Smith #endif
6009f0612e4SBarry Smith   }
601f1f2ae84SBarry Smith   if (rank == 0) {
602f1f2ae84SBarry Smith     PETSC_COMM_WORLD  = PETSC_COMM_SELF;
6033821be0aSBarry Smith     PCMPIServerActive = PETSC_TRUE;
6043ba16761SJacob Faibussowitsch     PetscFunctionReturn(PETSC_SUCCESS);
605f1f2ae84SBarry Smith   }
606f1f2ae84SBarry Smith 
607f1f2ae84SBarry Smith   while (PETSC_TRUE) {
608f1f2ae84SBarry Smith     PCMPICommand request = PCMPI_CREATE;
60966a7e86cSPierre Jolivet #if defined(PETSC_HAVE_PTHREAD_MUTEX)
6109f0612e4SBarry Smith     PetscMPIInt dummy1 = 1, dummy2;
61166a7e86cSPierre Jolivet #endif
6129f0612e4SBarry Smith 
613d7c1f440SPierre Jolivet     // TODO: can we broadcast the number of active ranks here so only the correct subset of processes waits on the later scatters?
6149f0612e4SBarry Smith #if defined(PETSC_HAVE_PTHREAD_MUTEX)
6159f0612e4SBarry Smith     if (PCMPIServerUseShmget) pthread_mutex_lock(&PCMPIServerLocks[PetscGlobalRank]);
6169f0612e4SBarry Smith #endif
617f1f2ae84SBarry Smith     PetscCallMPI(MPI_Bcast(&request, 1, MPIU_ENUM, 0, PC_MPI_COMM_WORLD));
6189f0612e4SBarry Smith #if defined(PETSC_HAVE_PTHREAD_MUTEX)
6199f0612e4SBarry Smith     if (PCMPIServerUseShmget) {
6209f0612e4SBarry Smith       /* next line ensures PetscGlobalRank has locked before rank 0 can take the lock back */
6219f0612e4SBarry Smith       PetscCallMPI(MPI_Reduce(&dummy1, &dummy2, 1, MPI_INT, MPI_SUM, 0, PC_MPI_COMM_WORLD));
6229f0612e4SBarry Smith       pthread_mutex_unlock(&PCMPIServerLocks[PetscGlobalRank]);
6239f0612e4SBarry Smith     }
6249f0612e4SBarry Smith #endif
625f1f2ae84SBarry Smith     switch (request) {
626d71ae5a4SJacob Faibussowitsch     case PCMPI_CREATE:
627d71ae5a4SJacob Faibussowitsch       PetscCall(PCMPICreate(NULL));
628d71ae5a4SJacob Faibussowitsch       break;
629d71ae5a4SJacob Faibussowitsch     case PCMPI_SET_MAT:
630d71ae5a4SJacob Faibussowitsch       PetscCall(PCMPISetMat(NULL));
631d71ae5a4SJacob Faibussowitsch       break;
632d71ae5a4SJacob Faibussowitsch     case PCMPI_UPDATE_MAT_VALUES:
633d71ae5a4SJacob Faibussowitsch       PetscCall(PCMPIUpdateMatValues(NULL));
634d71ae5a4SJacob Faibussowitsch       break;
635f1f2ae84SBarry Smith     case PCMPI_VIEW:
636f1f2ae84SBarry Smith       // PetscCall(PCMPIView(NULL));
637f1f2ae84SBarry Smith       break;
638d71ae5a4SJacob Faibussowitsch     case PCMPI_SOLVE:
639d71ae5a4SJacob Faibussowitsch       PetscCall(PCMPISolve(NULL, NULL, NULL));
640d71ae5a4SJacob Faibussowitsch       break;
641d71ae5a4SJacob Faibussowitsch     case PCMPI_DESTROY:
642d71ae5a4SJacob Faibussowitsch       PetscCall(PCMPIDestroy(NULL));
643d71ae5a4SJacob Faibussowitsch       break;
644f1f2ae84SBarry Smith     case PCMPI_EXIT:
6459f0612e4SBarry Smith       if (PCMPIServerUseShmget) PetscCall(PetscShmgetUnmapAddresses(1, (void **)&PCMPIServerLocks));
646f1f2ae84SBarry Smith       PetscCall(PetscFinalize());
647f1f2ae84SBarry Smith       exit(0); /* not sure if this is a good idea, but cannot return because it will run users main program */
648f1f2ae84SBarry Smith       break;
649d71ae5a4SJacob Faibussowitsch     default:
650d71ae5a4SJacob Faibussowitsch       break;
651f1f2ae84SBarry Smith     }
652f1f2ae84SBarry Smith   }
6533ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
654f1f2ae84SBarry Smith }
655f1f2ae84SBarry Smith 
656f1f2ae84SBarry Smith /*@C
657f1f2ae84SBarry Smith   PCMPIServerEnd - ends a server that runs on the rank != 0 MPI processes waiting to process requests for
658f1580f4eSBarry Smith   parallel KSP solves and management of parallel `KSP` objects.
659f1f2ae84SBarry Smith 
66020f4b53cSBarry Smith   Logically Collective on all MPI ranks except 0
66120f4b53cSBarry Smith 
66220f4b53cSBarry Smith   Level: developer
663f1f2ae84SBarry Smith 
664f1580f4eSBarry Smith   Note:
6659f0612e4SBarry Smith   This is normally called automatically in `PetscFinalize()`
666f1f2ae84SBarry Smith 
6673821be0aSBarry Smith .seealso: [](sec_pcmpi), `PCMPIServerBegin()`, `PCMPI`, `KSPCheckPCMPI()`
668f1f2ae84SBarry Smith @*/
669d71ae5a4SJacob Faibussowitsch PetscErrorCode PCMPIServerEnd(void)
670d71ae5a4SJacob Faibussowitsch {
671f1f2ae84SBarry Smith   PetscFunctionBegin;
672f1f2ae84SBarry Smith   if (PetscGlobalRank == 0) {
673f1f2ae84SBarry Smith     PetscViewer       viewer = NULL;
674f1f2ae84SBarry Smith     PetscViewerFormat format;
675f1f2ae84SBarry Smith 
6769f0612e4SBarry Smith     PetscCall(PetscShmgetAddressesFinalize());
6779f0612e4SBarry Smith     PetscCall(PCMPIServerBroadcastRequest(PCMPI_EXIT));
6789f0612e4SBarry Smith     if (PCMPIServerUseShmget) PetscCall(PetscShmgetUnmapAddresses(1, (void **)&PCMPIServerLocks));
679f1f2ae84SBarry Smith     PETSC_COMM_WORLD = MPI_COMM_WORLD; /* could use PC_MPI_COMM_WORLD */
680f1f2ae84SBarry Smith     PetscOptionsBegin(PETSC_COMM_SELF, NULL, "MPI linear solver server options", NULL);
681f1f2ae84SBarry Smith     PetscCall(PetscOptionsViewer("-mpi_linear_solver_server_view", "View information about system solved with the server", "PCMPI", &viewer, &format, NULL));
682f1f2ae84SBarry Smith     PetscOptionsEnd();
683f1f2ae84SBarry Smith     if (viewer) {
684f1f2ae84SBarry Smith       PetscBool isascii;
685f1f2ae84SBarry Smith 
686f1f2ae84SBarry Smith       PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
687f1f2ae84SBarry Smith       if (isascii) {
688f1f2ae84SBarry Smith         PetscMPIInt size;
6895316cbedSBarry Smith         PetscMPIInt i;
690f1f2ae84SBarry Smith 
691f1f2ae84SBarry Smith         PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
6925316cbedSBarry Smith         PetscCall(PetscViewerASCIIPrintf(viewer, "MPI linear solver server statistics:\n"));
6935316cbedSBarry Smith         PetscCall(PetscViewerASCIIPrintf(viewer, "    Ranks        KSPSolve()s     Mats        KSPs       Avg. Size      Avg. Its\n"));
6945316cbedSBarry Smith         if (PCMPIKSPCountsSeq) {
6955316cbedSBarry Smith           PetscCall(PetscViewerASCIIPrintf(viewer, "  Sequential         %" PetscInt_FMT "                         %" PetscInt_FMT "            %" PetscInt_FMT "           %" PetscInt_FMT "\n", PCMPISolveCountsSeq, PCMPIKSPCountsSeq, PCMPISizesSeq / PCMPISolveCountsSeq, PCMPIIterationsSeq / PCMPISolveCountsSeq));
696f1f2ae84SBarry Smith         }
6975316cbedSBarry Smith         for (i = 0; i < size; i++) {
6985316cbedSBarry Smith           if (PCMPIKSPCounts[i]) {
6995316cbedSBarry Smith             PetscCall(PetscViewerASCIIPrintf(viewer, "     %d               %" PetscInt_FMT "            %" PetscInt_FMT "           %" PetscInt_FMT "            %" PetscInt_FMT "            %" PetscInt_FMT "\n", i + 1, PCMPISolveCounts[i], PCMPIMatCounts[i], PCMPIKSPCounts[i], PCMPISizes[i] / PCMPISolveCounts[i], PCMPIIterations[i] / PCMPISolveCounts[i]));
7005316cbedSBarry Smith           }
7015316cbedSBarry Smith         }
7029f0612e4SBarry Smith         PetscCall(PetscViewerASCIIPrintf(viewer, "MPI linear solver server %susing shared memory\n", PCMPIServerUseShmget ? "" : "not "));
703f1f2ae84SBarry Smith       }
704648c30bcSBarry Smith       PetscCall(PetscViewerDestroy(&viewer));
705f1f2ae84SBarry Smith     }
706f1f2ae84SBarry Smith   }
707f1f2ae84SBarry Smith   PetscCall(PCMPICommsDestroy());
7083821be0aSBarry Smith   PCMPIServerActive = PETSC_FALSE;
7093ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
710f1f2ae84SBarry Smith }
711f1f2ae84SBarry Smith 
712f1f2ae84SBarry Smith /*
713f1f2ae84SBarry Smith     This version is used in the trivial case when the MPI parallel solver server is running on just the original MPI rank 0
714f1f2ae84SBarry Smith     because, for example, the problem is small. This version is more efficient because it does not require copying any data
715f1f2ae84SBarry Smith */
716d71ae5a4SJacob Faibussowitsch static PetscErrorCode PCSetUp_Seq(PC pc)
717d71ae5a4SJacob Faibussowitsch {
718f1f2ae84SBarry Smith   PC_MPI     *km = (PC_MPI *)pc->data;
719f1f2ae84SBarry Smith   Mat         sA;
720f1f2ae84SBarry Smith   const char *prefix;
721f9818f3cSJose E. Roman   char       *found = NULL, *cprefix;
722f1f2ae84SBarry Smith 
723f1f2ae84SBarry Smith   PetscFunctionBegin;
7249f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_TRUE;
725f1f2ae84SBarry Smith   PetscCall(PCGetOperators(pc, NULL, &sA));
726f1f2ae84SBarry Smith   PetscCall(PCGetOptionsPrefix(pc, &prefix));
727f1f2ae84SBarry Smith   PetscCall(KSPCreate(PETSC_COMM_SELF, &km->ksps[0]));
7283821be0aSBarry Smith   PetscCall(KSPSetNestLevel(km->ksps[0], 1));
7293821be0aSBarry Smith   PetscCall(PetscObjectSetTabLevel((PetscObject)km->ksps[0], 1));
7303821be0aSBarry Smith 
7313821be0aSBarry Smith   /* Created KSP gets prefix of PC minus the mpi_linear_solver_server_ portion */
7323821be0aSBarry Smith   PetscCall(PCGetOptionsPrefix(pc, &prefix));
7333821be0aSBarry Smith   PetscCheck(prefix, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PCMPI missing required prefix");
7343821be0aSBarry Smith   PetscCall(PetscStrallocpy(prefix, &cprefix));
7353821be0aSBarry Smith   PetscCall(PetscStrstr(cprefix, "mpi_linear_solver_server_", &found));
7363821be0aSBarry Smith   PetscCheck(found, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PCMPI missing mpi_linear_solver_server_ portion of prefix");
7373821be0aSBarry Smith   *found = 0;
7383821be0aSBarry Smith   PetscCall(KSPSetOptionsPrefix(km->ksps[0], cprefix));
7393821be0aSBarry Smith   PetscCall(PetscFree(cprefix));
7403821be0aSBarry Smith 
741f1f2ae84SBarry Smith   PetscCall(KSPSetOperators(km->ksps[0], sA, sA));
742f1f2ae84SBarry Smith   PetscCall(KSPSetFromOptions(km->ksps[0]));
743f1f2ae84SBarry Smith   PetscCall(KSPSetUp(km->ksps[0]));
7443ba16761SJacob Faibussowitsch   PetscCall(PetscInfo((PetscObject)pc, "MPI parallel linear solver system is being solved directly on rank 0 due to its small size\n"));
745f1f2ae84SBarry Smith   PCMPIKSPCountsSeq++;
7469f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_FALSE;
7473ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
748f1f2ae84SBarry Smith }
749f1f2ae84SBarry Smith 
750d71ae5a4SJacob Faibussowitsch static PetscErrorCode PCApply_Seq(PC pc, Vec b, Vec x)
751d71ae5a4SJacob Faibussowitsch {
752f1f2ae84SBarry Smith   PC_MPI  *km = (PC_MPI *)pc->data;
7535316cbedSBarry Smith   PetscInt its, n;
7545316cbedSBarry Smith   Mat      A;
755f1f2ae84SBarry Smith 
756f1f2ae84SBarry Smith   PetscFunctionBegin;
7579f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_TRUE;
758f1f2ae84SBarry Smith   PetscCall(KSPSolve(km->ksps[0], b, x));
7595316cbedSBarry Smith   PetscCall(KSPGetIterationNumber(km->ksps[0], &its));
760f1f2ae84SBarry Smith   PCMPISolveCountsSeq++;
7615316cbedSBarry Smith   PCMPIIterationsSeq += its;
7625316cbedSBarry Smith   PetscCall(KSPGetOperators(km->ksps[0], NULL, &A));
7635316cbedSBarry Smith   PetscCall(MatGetSize(A, &n, NULL));
7645316cbedSBarry Smith   PCMPISizesSeq += n;
7659f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_FALSE;
7669f0612e4SBarry Smith   /*
7679f0612e4SBarry Smith     do not keep reference to previous rhs and solution since destroying them in the next KSPSolve()
7689f0612e4SBarry Smith     my use PetscFree() instead of PCMPIArrayDeallocate()
7699f0612e4SBarry Smith   */
7709f0612e4SBarry Smith   PetscCall(VecDestroy(&km->ksps[0]->vec_rhs));
7719f0612e4SBarry Smith   PetscCall(VecDestroy(&km->ksps[0]->vec_sol));
7723ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
773f1f2ae84SBarry Smith }
774f1f2ae84SBarry Smith 
775d71ae5a4SJacob Faibussowitsch static PetscErrorCode PCView_Seq(PC pc, PetscViewer viewer)
776d71ae5a4SJacob Faibussowitsch {
777f1f2ae84SBarry Smith   PC_MPI *km = (PC_MPI *)pc->data;
778f1f2ae84SBarry Smith 
779f1f2ae84SBarry Smith   PetscFunctionBegin;
780f1f2ae84SBarry Smith   PetscCall(PetscViewerASCIIPrintf(viewer, "Running MPI linear solver server directly on rank 0 due to its small size\n"));
781f1f2ae84SBarry Smith   PetscCall(PetscViewerASCIIPrintf(viewer, "Desired minimum number of nonzeros per rank for MPI parallel solve %d\n", (int)km->mincntperrank));
7825316cbedSBarry Smith   PetscCall(PetscViewerASCIIPrintf(viewer, "*** Use -mpi_linear_solver_server_view to statistics on all the solves ***\n"));
7833ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
784f1f2ae84SBarry Smith }
785f1f2ae84SBarry Smith 
786d71ae5a4SJacob Faibussowitsch static PetscErrorCode PCDestroy_Seq(PC pc)
787d71ae5a4SJacob Faibussowitsch {
788f1f2ae84SBarry Smith   PC_MPI *km = (PC_MPI *)pc->data;
7899f0612e4SBarry Smith   Mat     A, B;
7909f0612e4SBarry Smith   Vec     x, b;
791f1f2ae84SBarry Smith 
792f1f2ae84SBarry Smith   PetscFunctionBegin;
7939f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_TRUE;
7949f0612e4SBarry Smith   /* since matrices and vectors are shared with outer KSP we need to ensure they are not destroyed with PetscFree() */
7959f0612e4SBarry Smith   PetscCall(KSPGetOperators(km->ksps[0], &A, &B));
7969f0612e4SBarry Smith   PetscCall(PetscObjectReference((PetscObject)A));
7979f0612e4SBarry Smith   PetscCall(PetscObjectReference((PetscObject)B));
7989f0612e4SBarry Smith   PetscCall(KSPGetSolution(km->ksps[0], &x));
7999f0612e4SBarry Smith   PetscCall(PetscObjectReference((PetscObject)x));
8009f0612e4SBarry Smith   PetscCall(KSPGetRhs(km->ksps[0], &b));
8019f0612e4SBarry Smith   PetscCall(PetscObjectReference((PetscObject)b));
802f1f2ae84SBarry Smith   PetscCall(KSPDestroy(&km->ksps[0]));
803f1f2ae84SBarry Smith   PetscCall(PetscFree(pc->data));
8049f0612e4SBarry Smith   PCMPIServerInSolve = PETSC_FALSE;
8059f0612e4SBarry Smith   PetscCall(MatDestroy(&A));
8069f0612e4SBarry Smith   PetscCall(MatDestroy(&B));
8079f0612e4SBarry Smith   PetscCall(VecDestroy(&x));
8089f0612e4SBarry Smith   PetscCall(VecDestroy(&b));
8093ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
810f1f2ae84SBarry Smith }
811f1f2ae84SBarry Smith 
812f1f2ae84SBarry Smith /*
813f1f2ae84SBarry Smith      PCSetUp_MPI - Trigger the creation of the MPI parallel PC and copy parts of the matrix and
814dd8e379bSPierre Jolivet      right-hand side to the parallel PC
815f1f2ae84SBarry Smith */
816d71ae5a4SJacob Faibussowitsch static PetscErrorCode PCSetUp_MPI(PC pc)
817d71ae5a4SJacob Faibussowitsch {
818f1f2ae84SBarry Smith   PC_MPI     *km = (PC_MPI *)pc->data;
819f1f2ae84SBarry Smith   PetscMPIInt rank, size;
820f1f2ae84SBarry Smith   PetscBool   newmatrix = PETSC_FALSE;
821f1f2ae84SBarry Smith 
822f1f2ae84SBarry Smith   PetscFunctionBegin;
823f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_rank(MPI_COMM_WORLD, &rank));
824f1f2ae84SBarry Smith   PetscCheck(rank == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PCMPI can only be used from 0th rank of MPI_COMM_WORLD. Perhaps a missing -mpi_linear_solver_server?");
825f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_size(MPI_COMM_WORLD, &size));
826f1f2ae84SBarry Smith 
827f1f2ae84SBarry Smith   if (!pc->setupcalled) {
828f1f2ae84SBarry Smith     if (!km->alwaysuseserver) {
829f1f2ae84SBarry Smith       PetscInt n;
830f1f2ae84SBarry Smith       Mat      sA;
831f1f2ae84SBarry Smith       /* short circuit for small systems */
832f1f2ae84SBarry Smith       PetscCall(PCGetOperators(pc, &sA, &sA));
833f1f2ae84SBarry Smith       PetscCall(MatGetSize(sA, &n, NULL));
834f1f2ae84SBarry Smith       if (n < 2 * km->mincntperrank - 1 || size == 1) {
835f1f2ae84SBarry Smith         pc->ops->setup   = NULL;
836f1f2ae84SBarry Smith         pc->ops->apply   = PCApply_Seq;
837f1f2ae84SBarry Smith         pc->ops->destroy = PCDestroy_Seq;
838f1f2ae84SBarry Smith         pc->ops->view    = PCView_Seq;
839f1f2ae84SBarry Smith         PetscCall(PCSetUp_Seq(pc));
8403ba16761SJacob Faibussowitsch         PetscFunctionReturn(PETSC_SUCCESS);
841f1f2ae84SBarry Smith       }
842f1f2ae84SBarry Smith     }
843f1f2ae84SBarry Smith 
8449f0612e4SBarry Smith     PetscCall(PCMPIServerBroadcastRequest(PCMPI_CREATE));
845f1f2ae84SBarry Smith     PetscCall(PCMPICreate(pc));
846f1f2ae84SBarry Smith     newmatrix = PETSC_TRUE;
8479371c9d4SSatish Balay   }
8489371c9d4SSatish Balay   if (pc->flag == DIFFERENT_NONZERO_PATTERN) newmatrix = PETSC_TRUE;
849f1f2ae84SBarry Smith 
850f1f2ae84SBarry Smith   if (newmatrix) {
8513ba16761SJacob Faibussowitsch     PetscCall(PetscInfo((PetscObject)pc, "New matrix or matrix has changed nonzero structure\n"));
8529f0612e4SBarry Smith     PetscCall(PCMPIServerBroadcastRequest(PCMPI_SET_MAT));
853f1f2ae84SBarry Smith     PetscCall(PCMPISetMat(pc));
854f1f2ae84SBarry Smith   } else {
855bbea24aaSStefano Zampini     PetscCall(PetscInfo((PetscObject)pc, "Matrix has only changed nonzero values\n"));
8569f0612e4SBarry Smith     PetscCall(PCMPIServerBroadcastRequest(PCMPI_UPDATE_MAT_VALUES));
857f1f2ae84SBarry Smith     PetscCall(PCMPIUpdateMatValues(pc));
858f1f2ae84SBarry Smith   }
8593ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
860f1f2ae84SBarry Smith }
861f1f2ae84SBarry Smith 
862d71ae5a4SJacob Faibussowitsch static PetscErrorCode PCApply_MPI(PC pc, Vec b, Vec x)
863d71ae5a4SJacob Faibussowitsch {
864f1f2ae84SBarry Smith   PetscFunctionBegin;
8659f0612e4SBarry Smith   PetscCall(PCMPIServerBroadcastRequest(PCMPI_SOLVE));
866f1f2ae84SBarry Smith   PetscCall(PCMPISolve(pc, b, x));
8673ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
868f1f2ae84SBarry Smith }
869f1f2ae84SBarry Smith 
87066976f2fSJacob Faibussowitsch static PetscErrorCode PCDestroy_MPI(PC pc)
871d71ae5a4SJacob Faibussowitsch {
872f1f2ae84SBarry Smith   PetscFunctionBegin;
8739f0612e4SBarry Smith   PetscCall(PCMPIServerBroadcastRequest(PCMPI_DESTROY));
874f1f2ae84SBarry Smith   PetscCall(PCMPIDestroy(pc));
875f1f2ae84SBarry Smith   PetscCall(PetscFree(pc->data));
8763ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
877f1f2ae84SBarry Smith }
878f1f2ae84SBarry Smith 
879f1f2ae84SBarry Smith /*
8809f0612e4SBarry Smith      PCView_MPI - Cannot call view on the MPI parallel KSP because other ranks do not have access to the viewer, use options database
881f1f2ae84SBarry Smith */
88266976f2fSJacob Faibussowitsch static PetscErrorCode PCView_MPI(PC pc, PetscViewer viewer)
883d71ae5a4SJacob Faibussowitsch {
884f1f2ae84SBarry Smith   PC_MPI     *km = (PC_MPI *)pc->data;
885f1f2ae84SBarry Smith   MPI_Comm    comm;
886f1f2ae84SBarry Smith   PetscMPIInt size;
887f1f2ae84SBarry Smith 
888f1f2ae84SBarry Smith   PetscFunctionBegin;
889f1f2ae84SBarry Smith   PetscCall(PetscObjectGetComm((PetscObject)km->ksps[0], &comm));
890f1f2ae84SBarry Smith   PetscCallMPI(MPI_Comm_size(comm, &size));
891f1f2ae84SBarry Smith   PetscCall(PetscViewerASCIIPrintf(viewer, "Size of MPI communicator used for MPI parallel KSP solve %d\n", size));
8929f0612e4SBarry Smith   PetscCall(PetscViewerASCIIPrintf(viewer, "Desired minimum number of matrix rows on each MPI process for MPI parallel solve %d\n", (int)km->mincntperrank));
8939f0612e4SBarry Smith   PetscCall(PetscViewerASCIIPrintf(viewer, "*** Use -mpi_linear_solver_server_view to view statistics on all the solves ***\n"));
8943ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
895f1f2ae84SBarry Smith }
896f1f2ae84SBarry Smith 
89766976f2fSJacob Faibussowitsch static PetscErrorCode PCSetFromOptions_MPI(PC pc, PetscOptionItems *PetscOptionsObject)
898d71ae5a4SJacob Faibussowitsch {
899f1f2ae84SBarry Smith   PC_MPI *km = (PC_MPI *)pc->data;
900f1f2ae84SBarry Smith 
901f1f2ae84SBarry Smith   PetscFunctionBegin;
902f1f2ae84SBarry Smith   PetscOptionsHeadBegin(PetscOptionsObject, "MPI linear solver server options");
9033821be0aSBarry Smith   PetscCall(PetscOptionsInt("-minimum_count_per_rank", "Desired minimum number of nonzeros per rank", "None", km->mincntperrank, &km->mincntperrank, NULL));
9043821be0aSBarry Smith   PetscCall(PetscOptionsBool("-always_use_server", "Use the server even if only one rank is used for the solve (for debugging)", "None", km->alwaysuseserver, &km->alwaysuseserver, NULL));
905f1f2ae84SBarry Smith   PetscOptionsHeadEnd();
9063ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
907f1f2ae84SBarry Smith }
908f1f2ae84SBarry Smith 
909f1f2ae84SBarry Smith /*MC
910f1580f4eSBarry Smith      PCMPI - Calls an MPI parallel `KSP` to solve a linear system from user code running on one process
911f1f2ae84SBarry Smith 
9123821be0aSBarry Smith    Options Database Keys for the Server:
913f1f2ae84SBarry Smith +  -mpi_linear_solver_server - causes the PETSc program to start in MPI linear solver server mode where only the first MPI rank runs user code
9149f0612e4SBarry Smith .  -mpi_linear_solver_server_view - displays information about all the linear systems solved by the MPI linear solver server
9159f0612e4SBarry Smith -  -mpi_linear_solver_server_use_shared_memory <true, false> - use shared memory to distribute matrix and right hand side, defaults to true
916f1f2ae84SBarry Smith 
9173821be0aSBarry Smith    Options Database Keys for a specific `KSP` object
9183821be0aSBarry Smith +  -[any_ksp_prefix]_mpi_linear_solver_server_minimum_count_per_rank - sets the minimum size of the linear system per MPI rank that the solver will strive for
9193821be0aSBarry Smith -  -[any_ksp_prefix]_mpi_linear_solver_server_always_use_server - use the server solver code even if the particular system is only solved on the process (for debugging and testing purposes)
9203821be0aSBarry Smith 
9213821be0aSBarry Smith    Level: developer
92220f4b53cSBarry Smith 
923f1f2ae84SBarry Smith    Notes:
9249f0612e4SBarry Smith    This cannot be used with vectors or matrices that are created using arrays provided by the user, such as `VecCreateWithArray()` or
9259f0612e4SBarry Smith    `MatCreateSeqAIJWithArrays()`
9269f0612e4SBarry Smith 
92746bbbc36SPierre Jolivet    The options database prefix for the actual solver is any prefix provided before use to the original `KSP` with `KSPSetOptionsPrefix()`, mostly commonly no prefix is used.
928f1f2ae84SBarry Smith 
929f1f2ae84SBarry Smith    It can be particularly useful for user OpenMP code or potentially user GPU code.
930f1f2ae84SBarry Smith 
931dd8e379bSPierre Jolivet    When the program is running with a single MPI process then it directly uses the provided matrix and right-hand side
9323821be0aSBarry Smith    and does not need to distribute the matrix and vector to the various MPI processes; thus it incurs no extra overhead over just using the `KSP` directly.
933f1f2ae84SBarry Smith 
9343821be0aSBarry Smith    The solver options for actual solving `KSP` and `PC` must be controlled via the options database, calls to set options directly on the user level `KSP` and `PC` have no effect
9350316ec64SBarry Smith    because they are not the actual solver objects.
9360316ec64SBarry Smith 
9370316ec64SBarry Smith    When `-log_view` is used with this solver the events within the parallel solve are logging in their own stage. Some of the logging in the other
93868a21331SBarry Smith    stages will be confusing since the event times are only recorded on the 0th MPI rank, thus the percent of time in the events will be misleading.
9390316ec64SBarry Smith 
9403821be0aSBarry Smith    Developer Note:
9413821be0aSBarry Smith    This `PCType` is never directly selected by the user, it is set when the option `-mpi_linear_solver_server` is used and the `PC` is at the outer most nesting of
9423821be0aSBarry Smith    a `KSP`. The outer most `KSP` object is automatically set to `KSPPREONLY` and thus is not directly visible to the user.
9433821be0aSBarry Smith 
9443821be0aSBarry Smith .seealso: [](sec_pcmpi), `KSPCreate()`, `KSPSetType()`, `KSPType`, `KSP`, `PC`, `PCMPIServerBegin()`, `PCMPIServerEnd()`, `KSPCheckPCMPI()`
945f1f2ae84SBarry Smith M*/
946d71ae5a4SJacob Faibussowitsch PETSC_EXTERN PetscErrorCode PCCreate_MPI(PC pc)
947d71ae5a4SJacob Faibussowitsch {
948f1f2ae84SBarry Smith   PC_MPI *km;
949f9818f3cSJose E. Roman   char   *found = NULL;
950f1f2ae84SBarry Smith 
951f1f2ae84SBarry Smith   PetscFunctionBegin;
9523821be0aSBarry Smith   PetscCall(PetscStrstr(((PetscObject)pc)->prefix, "mpi_linear_solver_server_", &found));
9533821be0aSBarry Smith   PetscCheck(found, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PCMPI object prefix does not have mpi_linear_solver_server_");
9543821be0aSBarry Smith 
9553821be0aSBarry Smith   /* material from PCSetType() */
9563821be0aSBarry Smith   PetscTryTypeMethod(pc, destroy);
9573821be0aSBarry Smith   pc->ops->destroy = NULL;
9583821be0aSBarry Smith   pc->data         = NULL;
9593821be0aSBarry Smith 
9603821be0aSBarry Smith   PetscCall(PetscFunctionListDestroy(&((PetscObject)pc)->qlist));
9613821be0aSBarry Smith   PetscCall(PetscMemzero(pc->ops, sizeof(struct _PCOps)));
9623821be0aSBarry Smith   pc->modifysubmatrices  = NULL;
9633821be0aSBarry Smith   pc->modifysubmatricesP = NULL;
9643821be0aSBarry Smith   pc->setupcalled        = 0;
9653821be0aSBarry Smith 
9664dfa11a4SJacob Faibussowitsch   PetscCall(PetscNew(&km));
967f1f2ae84SBarry Smith   pc->data = (void *)km;
968f1f2ae84SBarry Smith 
969f1f2ae84SBarry Smith   km->mincntperrank = 10000;
970f1f2ae84SBarry Smith 
971f1f2ae84SBarry Smith   pc->ops->setup          = PCSetUp_MPI;
972f1f2ae84SBarry Smith   pc->ops->apply          = PCApply_MPI;
973f1f2ae84SBarry Smith   pc->ops->destroy        = PCDestroy_MPI;
974f1f2ae84SBarry Smith   pc->ops->view           = PCView_MPI;
975f1f2ae84SBarry Smith   pc->ops->setfromoptions = PCSetFromOptions_MPI;
9763821be0aSBarry Smith   PetscCall(PetscObjectChangeTypeName((PetscObject)pc, PCMPI));
9773ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
978f1f2ae84SBarry Smith }
9799f0612e4SBarry Smith 
9809f0612e4SBarry Smith /*@
9819f0612e4SBarry Smith   PCMPIGetKSP - Gets the `KSP` created by the `PCMPI`
9829f0612e4SBarry Smith 
9839f0612e4SBarry Smith   Not Collective
9849f0612e4SBarry Smith 
9859f0612e4SBarry Smith   Input Parameter:
9869f0612e4SBarry Smith . pc - the preconditioner context
9879f0612e4SBarry Smith 
9889f0612e4SBarry Smith   Output Parameter:
9899f0612e4SBarry Smith . innerksp - the inner `KSP`
9909f0612e4SBarry Smith 
9919f0612e4SBarry Smith   Level: advanced
9929f0612e4SBarry Smith 
9939f0612e4SBarry Smith .seealso: [](ch_ksp), `KSP`, `PCMPI`, `PCREDISTRIBUTE`
9949f0612e4SBarry Smith @*/
9959f0612e4SBarry Smith PetscErrorCode PCMPIGetKSP(PC pc, KSP *innerksp)
9969f0612e4SBarry Smith {
9979f0612e4SBarry Smith   PC_MPI *red = (PC_MPI *)pc->data;
9989f0612e4SBarry Smith 
9999f0612e4SBarry Smith   PetscFunctionBegin;
10009f0612e4SBarry Smith   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
10019f0612e4SBarry Smith   PetscAssertPointer(innerksp, 2);
10029f0612e4SBarry Smith   *innerksp = red->ksps[0];
10039f0612e4SBarry Smith   PetscFunctionReturn(PETSC_SUCCESS);
10049f0612e4SBarry Smith }
1005