xref: /petsc/src/sys/utils/mpiu.c (revision 811af0c4b09a35de4306c442f88bd09fdc09897d)
1e5c89e4eSSatish Balay 
2c6db04a5SJed Brown #include <petscsys.h> /*I  "petscsys.h"  I*/
38ae1f613SSatish Balay #include <petsc/private/petscimpl.h>
4e5c89e4eSSatish Balay /*
5e5c89e4eSSatish Balay     Note that tag of 0 is ok because comm is a private communicator
6e5c89e4eSSatish Balay   generated below just for these routines.
7e5c89e4eSSatish Balay */
8e5c89e4eSSatish Balay 
99371c9d4SSatish Balay PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm, int ng) {
10e5c89e4eSSatish Balay   PetscMPIInt rank, size, tag = 0;
11e5c89e4eSSatish Balay   MPI_Status  status;
12e5c89e4eSSatish Balay 
13e5c89e4eSSatish Balay   PetscFunctionBegin;
149566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
15e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
169566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1748a46eb9SPierre Jolivet   if (rank) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, rank - 1, tag, comm, &status));
18e5c89e4eSSatish Balay   /* Send to the next process in the group unless we are the last process */
1948a46eb9SPierre Jolivet   if ((rank % ng) < ng - 1 && rank != size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, rank + 1, tag, comm));
20e5c89e4eSSatish Balay   PetscFunctionReturn(0);
21e5c89e4eSSatish Balay }
22e5c89e4eSSatish Balay 
239371c9d4SSatish Balay PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm, int ng) {
24e5c89e4eSSatish Balay   PetscMPIInt rank, size, tag = 0;
25e5c89e4eSSatish Balay   MPI_Status  status;
26e5c89e4eSSatish Balay 
27e5c89e4eSSatish Balay   PetscFunctionBegin;
289566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(comm, &rank));
299566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
30e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
31e5c89e4eSSatish Balay 
32e5c89e4eSSatish Balay   /* Send to the first process in the next group */
3348a46eb9SPierre Jolivet   if ((rank % ng) == ng - 1 || rank == size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, (rank + 1) % size, tag, comm));
3448a46eb9SPierre Jolivet   if (rank == 0) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, size - 1, tag, comm, &status));
35e5c89e4eSSatish Balay   PetscFunctionReturn(0);
36e5c89e4eSSatish Balay }
37e5c89e4eSSatish Balay 
38e5c89e4eSSatish Balay /* ---------------------------------------------------------------------*/
39e5c89e4eSSatish Balay /*
40e5c89e4eSSatish Balay     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
41e5c89e4eSSatish Balay   is attached to a communicator that manages the sequential phase code below.
42e5c89e4eSSatish Balay */
432fe4ef4aSBarry Smith PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
44e5c89e4eSSatish Balay 
45e30d2299SSatish Balay /*@
46e5c89e4eSSatish Balay    PetscSequentialPhaseBegin - Begins a sequential section of code.
47e5c89e4eSSatish Balay 
48d083f849SBarry Smith    Collective
49e5c89e4eSSatish Balay 
50e5c89e4eSSatish Balay    Input Parameters:
51e5c89e4eSSatish Balay +  comm - Communicator to sequentialize.
52e5c89e4eSSatish Balay -  ng   - Number in processor group.  This many processes are allowed to execute
53e5c89e4eSSatish Balay    at the same time (usually 1)
54e5c89e4eSSatish Balay 
55e5c89e4eSSatish Balay    Level: intermediate
56e5c89e4eSSatish Balay 
57e5c89e4eSSatish Balay    Notes:
58*811af0c4SBarry Smith    `PetscSequentialPhaseBegin()` and `PetscSequentialPhaseEnd()` provide a
59e5c89e4eSSatish Balay    way to force a section of code to be executed by the processes in
60e5c89e4eSSatish Balay    rank order.  Typically, this is done with
61e5c89e4eSSatish Balay .vb
62e5c89e4eSSatish Balay       PetscSequentialPhaseBegin(comm, 1);
63e5c89e4eSSatish Balay       <code to be executed sequentially>
64e5c89e4eSSatish Balay       PetscSequentialPhaseEnd(comm, 1);
65e5c89e4eSSatish Balay .ve
66e5c89e4eSSatish Balay 
67*811af0c4SBarry Smith    You should use `PetscSynchronizedPrintf()` to ensure output between MPI ranks is properly order and not these routines.
68e5c89e4eSSatish Balay 
69*811af0c4SBarry Smith .seealso: `PetscSequentialPhaseEnd()`,`PetscSynchronizedPrintf()`
70e5c89e4eSSatish Balay @*/
719371c9d4SSatish Balay PetscErrorCode PetscSequentialPhaseBegin(MPI_Comm comm, int ng) {
72e5c89e4eSSatish Balay   PetscMPIInt size;
73e5c89e4eSSatish Balay   MPI_Comm    local_comm, *addr_local_comm;
74e5c89e4eSSatish Balay 
75e5c89e4eSSatish Balay   PetscFunctionBegin;
769566063dSJacob Faibussowitsch   PetscCall(PetscSysInitializePackage());
779566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
78e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
79e5c89e4eSSatish Balay 
80e5c89e4eSSatish Balay   /* Get the private communicator for the sequential operations */
8148a46eb9SPierre Jolivet   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) PetscCallMPI(MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN, MPI_COMM_NULL_DELETE_FN, &Petsc_Seq_keyval, NULL));
82e5c89e4eSSatish Balay 
839566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_dup(comm, &local_comm));
849566063dSJacob Faibussowitsch   PetscCall(PetscMalloc1(1, &addr_local_comm));
85a297a907SKarl Rupp 
86e5c89e4eSSatish Balay   *addr_local_comm = local_comm;
87a297a907SKarl Rupp 
889566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_set_attr(comm, Petsc_Seq_keyval, (void *)addr_local_comm));
899566063dSJacob Faibussowitsch   PetscCall(PetscSequentialPhaseBegin_Private(local_comm, ng));
90e5c89e4eSSatish Balay   PetscFunctionReturn(0);
91e5c89e4eSSatish Balay }
92e5c89e4eSSatish Balay 
93e30d2299SSatish Balay /*@
94e5c89e4eSSatish Balay    PetscSequentialPhaseEnd - Ends a sequential section of code.
95e5c89e4eSSatish Balay 
96d083f849SBarry Smith    Collective
97e5c89e4eSSatish Balay 
98e5c89e4eSSatish Balay    Input Parameters:
99e5c89e4eSSatish Balay +  comm - Communicator to sequentialize.
100e5c89e4eSSatish Balay -  ng   - Number in processor group.  This many processes are allowed to execute
101e5c89e4eSSatish Balay    at the same time (usually 1)
102e5c89e4eSSatish Balay 
103e5c89e4eSSatish Balay    Level: intermediate
104e5c89e4eSSatish Balay 
105*811af0c4SBarry Smith    Note:
106*811af0c4SBarry Smith    See `PetscSequentialPhaseBegin()` for more details.
107e5c89e4eSSatish Balay 
108db781477SPatrick Sanan .seealso: `PetscSequentialPhaseBegin()`
109e5c89e4eSSatish Balay @*/
1109371c9d4SSatish Balay PetscErrorCode PetscSequentialPhaseEnd(MPI_Comm comm, int ng) {
111e5c89e4eSSatish Balay   PetscMPIInt size, flag;
112e5c89e4eSSatish Balay   MPI_Comm    local_comm, *addr_local_comm;
113e5c89e4eSSatish Balay 
114e5c89e4eSSatish Balay   PetscFunctionBegin;
1159566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
116e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
117e5c89e4eSSatish Balay 
1189566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_get_attr(comm, Petsc_Seq_keyval, (void **)&addr_local_comm, &flag));
11928b400f6SJacob Faibussowitsch   PetscCheck(flag, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
120e5c89e4eSSatish Balay   local_comm = *addr_local_comm;
121e5c89e4eSSatish Balay 
1229566063dSJacob Faibussowitsch   PetscCall(PetscSequentialPhaseEnd_Private(local_comm, ng));
123e5c89e4eSSatish Balay 
1249566063dSJacob Faibussowitsch   PetscCall(PetscFree(addr_local_comm));
1259566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_free(&local_comm));
1269566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_delete_attr(comm, Petsc_Seq_keyval));
127e5c89e4eSSatish Balay   PetscFunctionReturn(0);
128e5c89e4eSSatish Balay }
1296ba4152bSMatthew G. Knepley 
1306ba4152bSMatthew G. Knepley /*@C
1316ba4152bSMatthew G. Knepley   PetscGlobalMinMaxInt - Get the global min/max from local min/max input
1326ba4152bSMatthew G. Knepley 
133d083f849SBarry Smith   Collective
1346ba4152bSMatthew G. Knepley 
1356ba4152bSMatthew G. Knepley   Input Parameter:
1366ba4152bSMatthew G. Knepley . minMaxVal - An array with the local min and max
1376ba4152bSMatthew G. Knepley 
1386ba4152bSMatthew G. Knepley   Output Parameter:
1396ba4152bSMatthew G. Knepley . minMaxValGlobal - An array with the global min and max
1406ba4152bSMatthew G. Knepley 
1416ba4152bSMatthew G. Knepley   Level: beginner
1426ba4152bSMatthew G. Knepley 
143*811af0c4SBarry Smith .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxReal()`
1446ba4152bSMatthew G. Knepley @*/
1459371c9d4SSatish Balay PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, const PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2]) {
14658b5cd2aSSatish Balay   PetscInt sendbuf[3], recvbuf[3];
1476ba4152bSMatthew G. Knepley 
1486ba4152bSMatthew G. Knepley   PetscFunctionBegin;
14958b5cd2aSSatish Balay   sendbuf[0] = -minMaxVal[0]; /* Note that -PETSC_MIN_INT = PETSC_MIN_INT */
15058b5cd2aSSatish Balay   sendbuf[1] = minMaxVal[1];
15158b5cd2aSSatish Balay   sendbuf[2] = (minMaxVal[0] == PETSC_MIN_INT) ? 1 : 0; /* Are there PETSC_MIN_INT in minMaxVal[0]? */
1529566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Allreduce(sendbuf, recvbuf, 3, MPIU_INT, MPI_MAX, comm));
15358b5cd2aSSatish Balay   minMaxValGlobal[0] = recvbuf[2] ? PETSC_MIN_INT : -recvbuf[0];
15458b5cd2aSSatish Balay   minMaxValGlobal[1] = recvbuf[1];
1556ba4152bSMatthew G. Knepley   PetscFunctionReturn(0);
1566ba4152bSMatthew G. Knepley }
1576ba4152bSMatthew G. Knepley 
1586ba4152bSMatthew G. Knepley /*@C
1596ba4152bSMatthew G. Knepley   PetscGlobalMinMaxReal - Get the global min/max from local min/max input
1606ba4152bSMatthew G. Knepley 
161d083f849SBarry Smith   Collective
1626ba4152bSMatthew G. Knepley 
1636ba4152bSMatthew G. Knepley   Input Parameter:
1646ba4152bSMatthew G. Knepley . minMaxVal - An array with the local min and max
1656ba4152bSMatthew G. Knepley 
1666ba4152bSMatthew G. Knepley   Output Parameter:
1676ba4152bSMatthew G. Knepley . minMaxValGlobal - An array with the global min and max
1686ba4152bSMatthew G. Knepley 
1696ba4152bSMatthew G. Knepley   Level: beginner
1706ba4152bSMatthew G. Knepley 
171*811af0c4SBarry Smith .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxInt()`
1726ba4152bSMatthew G. Knepley @*/
1739371c9d4SSatish Balay PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, const PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2]) {
17458b5cd2aSSatish Balay   PetscReal sendbuf[2];
1756ba4152bSMatthew G. Knepley 
1766ba4152bSMatthew G. Knepley   PetscFunctionBegin;
17758b5cd2aSSatish Balay   sendbuf[0] = -minMaxVal[0];
17858b5cd2aSSatish Balay   sendbuf[1] = minMaxVal[1];
1791c2dc1cbSBarry Smith   PetscCall(MPIU_Allreduce(sendbuf, minMaxValGlobal, 2, MPIU_REAL, MPIU_MAX, comm));
18058b5cd2aSSatish Balay   minMaxValGlobal[0] = -minMaxValGlobal[0];
1816ba4152bSMatthew G. Knepley   PetscFunctionReturn(0);
1826ba4152bSMatthew G. Knepley }
183