xref: /petsc/src/sys/utils/mpiu.c (revision 10450e9e44b354a0a3da7bbd573407bdf051df10)
1e5c89e4eSSatish Balay 
2c6db04a5SJed Brown #include <petscsys.h> /*I  "petscsys.h"  I*/
38ae1f613SSatish Balay #include <petsc/private/petscimpl.h>
4e5c89e4eSSatish Balay /*
5e5c89e4eSSatish Balay     Note that tag of 0 is ok because comm is a private communicator
6e5c89e4eSSatish Balay   generated below just for these routines.
7e5c89e4eSSatish Balay */
8e5c89e4eSSatish Balay 
9d71ae5a4SJacob Faibussowitsch PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm, int ng)
10d71ae5a4SJacob Faibussowitsch {
11e5c89e4eSSatish Balay   PetscMPIInt rank, size, tag = 0;
12e5c89e4eSSatish Balay   MPI_Status  status;
13e5c89e4eSSatish Balay 
14e5c89e4eSSatish Balay   PetscFunctionBegin;
159566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
163ba16761SJacob Faibussowitsch   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
179566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1848a46eb9SPierre Jolivet   if (rank) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, rank - 1, tag, comm, &status));
19e5c89e4eSSatish Balay   /* Send to the next process in the group unless we are the last process */
2048a46eb9SPierre Jolivet   if ((rank % ng) < ng - 1 && rank != size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, rank + 1, tag, comm));
213ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
22e5c89e4eSSatish Balay }
23e5c89e4eSSatish Balay 
24d71ae5a4SJacob Faibussowitsch PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm, int ng)
25d71ae5a4SJacob Faibussowitsch {
26e5c89e4eSSatish Balay   PetscMPIInt rank, size, tag = 0;
27e5c89e4eSSatish Balay   MPI_Status  status;
28e5c89e4eSSatish Balay 
29e5c89e4eSSatish Balay   PetscFunctionBegin;
309566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(comm, &rank));
319566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
323ba16761SJacob Faibussowitsch   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
33e5c89e4eSSatish Balay 
34e5c89e4eSSatish Balay   /* Send to the first process in the next group */
3548a46eb9SPierre Jolivet   if ((rank % ng) == ng - 1 || rank == size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, (rank + 1) % size, tag, comm));
3648a46eb9SPierre Jolivet   if (rank == 0) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, size - 1, tag, comm, &status));
373ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
38e5c89e4eSSatish Balay }
39e5c89e4eSSatish Balay 
40e5c89e4eSSatish Balay /* ---------------------------------------------------------------------*/
41e5c89e4eSSatish Balay /*
42e5c89e4eSSatish Balay     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
43e5c89e4eSSatish Balay   is attached to a communicator that manages the sequential phase code below.
44e5c89e4eSSatish Balay */
452fe4ef4aSBarry Smith PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
46e5c89e4eSSatish Balay 
47e30d2299SSatish Balay /*@
48e5c89e4eSSatish Balay   PetscSequentialPhaseBegin - Begins a sequential section of code.
49e5c89e4eSSatish Balay 
50d083f849SBarry Smith   Collective
51e5c89e4eSSatish Balay 
52e5c89e4eSSatish Balay   Input Parameters:
53667f096bSBarry Smith + comm - Communicator to sequentialize over
54e5c89e4eSSatish Balay - ng   - Number in processor group.  This many processes are allowed to execute
55e5c89e4eSSatish Balay    at the same time (usually 1)
56e5c89e4eSSatish Balay 
57e5c89e4eSSatish Balay   Level: intermediate
58e5c89e4eSSatish Balay 
59e5c89e4eSSatish Balay   Notes:
60811af0c4SBarry Smith   `PetscSequentialPhaseBegin()` and `PetscSequentialPhaseEnd()` provide a
61e5c89e4eSSatish Balay   way to force a section of code to be executed by the processes in
62e5c89e4eSSatish Balay   rank order.  Typically, this is done with
63e5c89e4eSSatish Balay .vb
64e5c89e4eSSatish Balay       PetscSequentialPhaseBegin(comm, 1);
65e5c89e4eSSatish Balay       <code to be executed sequentially>
66e5c89e4eSSatish Balay       PetscSequentialPhaseEnd(comm, 1);
67e5c89e4eSSatish Balay .ve
68e5c89e4eSSatish Balay 
69811af0c4SBarry Smith   You should use `PetscSynchronizedPrintf()` to ensure output between MPI ranks is properly order and not these routines.
70e5c89e4eSSatish Balay 
71811af0c4SBarry Smith .seealso: `PetscSequentialPhaseEnd()`, `PetscSynchronizedPrintf()`
72e5c89e4eSSatish Balay @*/
73d71ae5a4SJacob Faibussowitsch PetscErrorCode PetscSequentialPhaseBegin(MPI_Comm comm, int ng)
74d71ae5a4SJacob Faibussowitsch {
75e5c89e4eSSatish Balay   PetscMPIInt size;
76e5c89e4eSSatish Balay   MPI_Comm    local_comm, *addr_local_comm;
77e5c89e4eSSatish Balay 
78e5c89e4eSSatish Balay   PetscFunctionBegin;
799566063dSJacob Faibussowitsch   PetscCall(PetscSysInitializePackage());
809566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
813ba16761SJacob Faibussowitsch   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
82e5c89e4eSSatish Balay 
83e5c89e4eSSatish Balay   /* Get the private communicator for the sequential operations */
8448a46eb9SPierre Jolivet   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) PetscCallMPI(MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN, MPI_COMM_NULL_DELETE_FN, &Petsc_Seq_keyval, NULL));
85e5c89e4eSSatish Balay 
869566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_dup(comm, &local_comm));
879566063dSJacob Faibussowitsch   PetscCall(PetscMalloc1(1, &addr_local_comm));
88a297a907SKarl Rupp 
89e5c89e4eSSatish Balay   *addr_local_comm = local_comm;
90a297a907SKarl Rupp 
919566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_set_attr(comm, Petsc_Seq_keyval, (void *)addr_local_comm));
929566063dSJacob Faibussowitsch   PetscCall(PetscSequentialPhaseBegin_Private(local_comm, ng));
933ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
94e5c89e4eSSatish Balay }
95e5c89e4eSSatish Balay 
96e30d2299SSatish Balay /*@
97e5c89e4eSSatish Balay   PetscSequentialPhaseEnd - Ends a sequential section of code.
98e5c89e4eSSatish Balay 
99d083f849SBarry Smith   Collective
100e5c89e4eSSatish Balay 
101e5c89e4eSSatish Balay   Input Parameters:
102e5c89e4eSSatish Balay + comm - Communicator to sequentialize.
103e5c89e4eSSatish Balay - ng   - Number in processor group.  This many processes are allowed to execute
104e5c89e4eSSatish Balay    at the same time (usually 1)
105e5c89e4eSSatish Balay 
106e5c89e4eSSatish Balay   Level: intermediate
107e5c89e4eSSatish Balay 
108811af0c4SBarry Smith   Note:
109811af0c4SBarry Smith   See `PetscSequentialPhaseBegin()` for more details.
110e5c89e4eSSatish Balay 
111db781477SPatrick Sanan .seealso: `PetscSequentialPhaseBegin()`
112e5c89e4eSSatish Balay @*/
113d71ae5a4SJacob Faibussowitsch PetscErrorCode PetscSequentialPhaseEnd(MPI_Comm comm, int ng)
114d71ae5a4SJacob Faibussowitsch {
115e5c89e4eSSatish Balay   PetscMPIInt size, flag;
116e5c89e4eSSatish Balay   MPI_Comm    local_comm, *addr_local_comm;
117e5c89e4eSSatish Balay 
118e5c89e4eSSatish Balay   PetscFunctionBegin;
1199566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
1203ba16761SJacob Faibussowitsch   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
121e5c89e4eSSatish Balay 
1229566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_get_attr(comm, Petsc_Seq_keyval, (void **)&addr_local_comm, &flag));
12328b400f6SJacob Faibussowitsch   PetscCheck(flag, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
124e5c89e4eSSatish Balay   local_comm = *addr_local_comm;
125e5c89e4eSSatish Balay 
1269566063dSJacob Faibussowitsch   PetscCall(PetscSequentialPhaseEnd_Private(local_comm, ng));
127e5c89e4eSSatish Balay 
1289566063dSJacob Faibussowitsch   PetscCall(PetscFree(addr_local_comm));
1299566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_free(&local_comm));
1309566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_delete_attr(comm, Petsc_Seq_keyval));
1313ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
132e5c89e4eSSatish Balay }
1336ba4152bSMatthew G. Knepley 
1346ba4152bSMatthew G. Knepley /*@C
1356ba4152bSMatthew G. Knepley   PetscGlobalMinMaxInt - Get the global min/max from local min/max input
1366ba4152bSMatthew G. Knepley 
137d083f849SBarry Smith   Collective
1386ba4152bSMatthew G. Knepley 
139*10450e9eSJacob Faibussowitsch   Input Parameters:
140*10450e9eSJacob Faibussowitsch + comm      - The MPI communicator to reduce with
141*10450e9eSJacob Faibussowitsch - minMaxVal - An array with the local min and max
1426ba4152bSMatthew G. Knepley 
1436ba4152bSMatthew G. Knepley   Output Parameter:
1446ba4152bSMatthew G. Knepley . minMaxValGlobal - An array with the global min and max
1456ba4152bSMatthew G. Knepley 
1466ba4152bSMatthew G. Knepley   Level: beginner
1476ba4152bSMatthew G. Knepley 
148811af0c4SBarry Smith .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxReal()`
1496ba4152bSMatthew G. Knepley @*/
150d71ae5a4SJacob Faibussowitsch PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, const PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2])
151d71ae5a4SJacob Faibussowitsch {
15258b5cd2aSSatish Balay   PetscInt sendbuf[3], recvbuf[3];
1536ba4152bSMatthew G. Knepley 
1546ba4152bSMatthew G. Knepley   PetscFunctionBegin;
15558b5cd2aSSatish Balay   sendbuf[0] = -minMaxVal[0]; /* Note that -PETSC_MIN_INT = PETSC_MIN_INT */
15658b5cd2aSSatish Balay   sendbuf[1] = minMaxVal[1];
15758b5cd2aSSatish Balay   sendbuf[2] = (minMaxVal[0] == PETSC_MIN_INT) ? 1 : 0; /* Are there PETSC_MIN_INT in minMaxVal[0]? */
158712fec58SPierre Jolivet   PetscCall(MPIU_Allreduce(sendbuf, recvbuf, 3, MPIU_INT, MPI_MAX, comm));
15958b5cd2aSSatish Balay   minMaxValGlobal[0] = recvbuf[2] ? PETSC_MIN_INT : -recvbuf[0];
16058b5cd2aSSatish Balay   minMaxValGlobal[1] = recvbuf[1];
1613ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
1626ba4152bSMatthew G. Knepley }
1636ba4152bSMatthew G. Knepley 
1646ba4152bSMatthew G. Knepley /*@C
1656ba4152bSMatthew G. Knepley   PetscGlobalMinMaxReal - Get the global min/max from local min/max input
1666ba4152bSMatthew G. Knepley 
167d083f849SBarry Smith   Collective
1686ba4152bSMatthew G. Knepley 
169*10450e9eSJacob Faibussowitsch   Input Parameters:
170*10450e9eSJacob Faibussowitsch + comm      - The MPI communicator to reduce with
171*10450e9eSJacob Faibussowitsch - minMaxVal - An array with the local min and max
1726ba4152bSMatthew G. Knepley 
1736ba4152bSMatthew G. Knepley   Output Parameter:
1746ba4152bSMatthew G. Knepley . minMaxValGlobal - An array with the global min and max
1756ba4152bSMatthew G. Knepley 
1766ba4152bSMatthew G. Knepley   Level: beginner
1776ba4152bSMatthew G. Knepley 
178811af0c4SBarry Smith .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxInt()`
1796ba4152bSMatthew G. Knepley @*/
180d71ae5a4SJacob Faibussowitsch PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, const PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2])
181d71ae5a4SJacob Faibussowitsch {
18258b5cd2aSSatish Balay   PetscReal sendbuf[2];
1836ba4152bSMatthew G. Knepley 
1846ba4152bSMatthew G. Knepley   PetscFunctionBegin;
18558b5cd2aSSatish Balay   sendbuf[0] = -minMaxVal[0];
18658b5cd2aSSatish Balay   sendbuf[1] = minMaxVal[1];
1871c2dc1cbSBarry Smith   PetscCall(MPIU_Allreduce(sendbuf, minMaxValGlobal, 2, MPIU_REAL, MPIU_MAX, comm));
18858b5cd2aSSatish Balay   minMaxValGlobal[0] = -minMaxValGlobal[0];
1893ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
1906ba4152bSMatthew G. Knepley }
191