xref: /petsc/src/sys/utils/mpiu.c (revision 3ba1676111f5c958fe6c2729b46ca4d523958bb3)
1e5c89e4eSSatish Balay 
2c6db04a5SJed Brown #include <petscsys.h> /*I  "petscsys.h"  I*/
38ae1f613SSatish Balay #include <petsc/private/petscimpl.h>
4e5c89e4eSSatish Balay /*
5e5c89e4eSSatish Balay     Note that tag of 0 is ok because comm is a private communicator
6e5c89e4eSSatish Balay   generated below just for these routines.
7e5c89e4eSSatish Balay */
8e5c89e4eSSatish Balay 
9d71ae5a4SJacob Faibussowitsch PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm, int ng)
10d71ae5a4SJacob Faibussowitsch {
11e5c89e4eSSatish Balay   PetscMPIInt rank, size, tag = 0;
12e5c89e4eSSatish Balay   MPI_Status  status;
13e5c89e4eSSatish Balay 
14e5c89e4eSSatish Balay   PetscFunctionBegin;
159566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
16*3ba16761SJacob Faibussowitsch   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
179566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1848a46eb9SPierre Jolivet   if (rank) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, rank - 1, tag, comm, &status));
19e5c89e4eSSatish Balay   /* Send to the next process in the group unless we are the last process */
2048a46eb9SPierre Jolivet   if ((rank % ng) < ng - 1 && rank != size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, rank + 1, tag, comm));
21*3ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
22e5c89e4eSSatish Balay }
23e5c89e4eSSatish Balay 
24d71ae5a4SJacob Faibussowitsch PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm, int ng)
25d71ae5a4SJacob Faibussowitsch {
26e5c89e4eSSatish Balay   PetscMPIInt rank, size, tag = 0;
27e5c89e4eSSatish Balay   MPI_Status  status;
28e5c89e4eSSatish Balay 
29e5c89e4eSSatish Balay   PetscFunctionBegin;
309566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(comm, &rank));
319566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
32*3ba16761SJacob Faibussowitsch   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
33e5c89e4eSSatish Balay 
34e5c89e4eSSatish Balay   /* Send to the first process in the next group */
3548a46eb9SPierre Jolivet   if ((rank % ng) == ng - 1 || rank == size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, (rank + 1) % size, tag, comm));
3648a46eb9SPierre Jolivet   if (rank == 0) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, size - 1, tag, comm, &status));
37*3ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
38e5c89e4eSSatish Balay }
39e5c89e4eSSatish Balay 
40e5c89e4eSSatish Balay /* ---------------------------------------------------------------------*/
41e5c89e4eSSatish Balay /*
42e5c89e4eSSatish Balay     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
43e5c89e4eSSatish Balay   is attached to a communicator that manages the sequential phase code below.
44e5c89e4eSSatish Balay */
452fe4ef4aSBarry Smith PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
46e5c89e4eSSatish Balay 
47e30d2299SSatish Balay /*@
48e5c89e4eSSatish Balay    PetscSequentialPhaseBegin - Begins a sequential section of code.
49e5c89e4eSSatish Balay 
50d083f849SBarry Smith    Collective
51e5c89e4eSSatish Balay 
52e5c89e4eSSatish Balay    Input Parameters:
53e5c89e4eSSatish Balay +  comm - Communicator to sequentialize.
54e5c89e4eSSatish Balay -  ng   - Number in processor group.  This many processes are allowed to execute
55e5c89e4eSSatish Balay    at the same time (usually 1)
56e5c89e4eSSatish Balay 
57e5c89e4eSSatish Balay    Level: intermediate
58e5c89e4eSSatish Balay 
59e5c89e4eSSatish Balay    Notes:
60811af0c4SBarry Smith    `PetscSequentialPhaseBegin()` and `PetscSequentialPhaseEnd()` provide a
61e5c89e4eSSatish Balay    way to force a section of code to be executed by the processes in
62e5c89e4eSSatish Balay    rank order.  Typically, this is done with
63e5c89e4eSSatish Balay .vb
64e5c89e4eSSatish Balay       PetscSequentialPhaseBegin(comm, 1);
65e5c89e4eSSatish Balay       <code to be executed sequentially>
66e5c89e4eSSatish Balay       PetscSequentialPhaseEnd(comm, 1);
67e5c89e4eSSatish Balay .ve
68e5c89e4eSSatish Balay 
69811af0c4SBarry Smith    You should use `PetscSynchronizedPrintf()` to ensure output between MPI ranks is properly order and not these routines.
70e5c89e4eSSatish Balay 
71811af0c4SBarry Smith .seealso: `PetscSequentialPhaseEnd()`, `PetscSynchronizedPrintf()`
72e5c89e4eSSatish Balay @*/
73d71ae5a4SJacob Faibussowitsch PetscErrorCode PetscSequentialPhaseBegin(MPI_Comm comm, int ng)
74d71ae5a4SJacob Faibussowitsch {
75e5c89e4eSSatish Balay   PetscMPIInt size;
76e5c89e4eSSatish Balay   MPI_Comm    local_comm, *addr_local_comm;
77e5c89e4eSSatish Balay 
78e5c89e4eSSatish Balay   PetscFunctionBegin;
799566063dSJacob Faibussowitsch   PetscCall(PetscSysInitializePackage());
809566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
81*3ba16761SJacob Faibussowitsch   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
82e5c89e4eSSatish Balay 
83e5c89e4eSSatish Balay   /* Get the private communicator for the sequential operations */
8448a46eb9SPierre Jolivet   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) PetscCallMPI(MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN, MPI_COMM_NULL_DELETE_FN, &Petsc_Seq_keyval, NULL));
85e5c89e4eSSatish Balay 
869566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_dup(comm, &local_comm));
879566063dSJacob Faibussowitsch   PetscCall(PetscMalloc1(1, &addr_local_comm));
88a297a907SKarl Rupp 
89e5c89e4eSSatish Balay   *addr_local_comm = local_comm;
90a297a907SKarl Rupp 
919566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_set_attr(comm, Petsc_Seq_keyval, (void *)addr_local_comm));
929566063dSJacob Faibussowitsch   PetscCall(PetscSequentialPhaseBegin_Private(local_comm, ng));
93*3ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
94e5c89e4eSSatish Balay }
95e5c89e4eSSatish Balay 
96e30d2299SSatish Balay /*@
97e5c89e4eSSatish Balay    PetscSequentialPhaseEnd - Ends a sequential section of code.
98e5c89e4eSSatish Balay 
99d083f849SBarry Smith    Collective
100e5c89e4eSSatish Balay 
101e5c89e4eSSatish Balay    Input Parameters:
102e5c89e4eSSatish Balay +  comm - Communicator to sequentialize.
103e5c89e4eSSatish Balay -  ng   - Number in processor group.  This many processes are allowed to execute
104e5c89e4eSSatish Balay    at the same time (usually 1)
105e5c89e4eSSatish Balay 
106e5c89e4eSSatish Balay    Level: intermediate
107e5c89e4eSSatish Balay 
108811af0c4SBarry Smith    Note:
109811af0c4SBarry Smith    See `PetscSequentialPhaseBegin()` for more details.
110e5c89e4eSSatish Balay 
111db781477SPatrick Sanan .seealso: `PetscSequentialPhaseBegin()`
112e5c89e4eSSatish Balay @*/
113d71ae5a4SJacob Faibussowitsch PetscErrorCode PetscSequentialPhaseEnd(MPI_Comm comm, int ng)
114d71ae5a4SJacob Faibussowitsch {
115e5c89e4eSSatish Balay   PetscMPIInt size, flag;
116e5c89e4eSSatish Balay   MPI_Comm    local_comm, *addr_local_comm;
117e5c89e4eSSatish Balay 
118e5c89e4eSSatish Balay   PetscFunctionBegin;
1199566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
120*3ba16761SJacob Faibussowitsch   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
121e5c89e4eSSatish Balay 
1229566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_get_attr(comm, Petsc_Seq_keyval, (void **)&addr_local_comm, &flag));
12328b400f6SJacob Faibussowitsch   PetscCheck(flag, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
124e5c89e4eSSatish Balay   local_comm = *addr_local_comm;
125e5c89e4eSSatish Balay 
1269566063dSJacob Faibussowitsch   PetscCall(PetscSequentialPhaseEnd_Private(local_comm, ng));
127e5c89e4eSSatish Balay 
1289566063dSJacob Faibussowitsch   PetscCall(PetscFree(addr_local_comm));
1299566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_free(&local_comm));
1309566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_delete_attr(comm, Petsc_Seq_keyval));
131*3ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
132e5c89e4eSSatish Balay }
1336ba4152bSMatthew G. Knepley 
1346ba4152bSMatthew G. Knepley /*@C
1356ba4152bSMatthew G. Knepley   PetscGlobalMinMaxInt - Get the global min/max from local min/max input
1366ba4152bSMatthew G. Knepley 
137d083f849SBarry Smith   Collective
1386ba4152bSMatthew G. Knepley 
1396ba4152bSMatthew G. Knepley   Input Parameter:
1406ba4152bSMatthew G. Knepley . minMaxVal - An array with the local min and max
1416ba4152bSMatthew G. Knepley 
1426ba4152bSMatthew G. Knepley   Output Parameter:
1436ba4152bSMatthew G. Knepley . minMaxValGlobal - An array with the global min and max
1446ba4152bSMatthew G. Knepley 
1456ba4152bSMatthew G. Knepley   Level: beginner
1466ba4152bSMatthew G. Knepley 
147811af0c4SBarry Smith .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxReal()`
1486ba4152bSMatthew G. Knepley @*/
149d71ae5a4SJacob Faibussowitsch PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, const PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2])
150d71ae5a4SJacob Faibussowitsch {
15158b5cd2aSSatish Balay   PetscInt sendbuf[3], recvbuf[3];
1526ba4152bSMatthew G. Knepley 
1536ba4152bSMatthew G. Knepley   PetscFunctionBegin;
15458b5cd2aSSatish Balay   sendbuf[0] = -minMaxVal[0]; /* Note that -PETSC_MIN_INT = PETSC_MIN_INT */
15558b5cd2aSSatish Balay   sendbuf[1] = minMaxVal[1];
15658b5cd2aSSatish Balay   sendbuf[2] = (minMaxVal[0] == PETSC_MIN_INT) ? 1 : 0; /* Are there PETSC_MIN_INT in minMaxVal[0]? */
1579566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Allreduce(sendbuf, recvbuf, 3, MPIU_INT, MPI_MAX, comm));
15858b5cd2aSSatish Balay   minMaxValGlobal[0] = recvbuf[2] ? PETSC_MIN_INT : -recvbuf[0];
15958b5cd2aSSatish Balay   minMaxValGlobal[1] = recvbuf[1];
160*3ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
1616ba4152bSMatthew G. Knepley }
1626ba4152bSMatthew G. Knepley 
1636ba4152bSMatthew G. Knepley /*@C
1646ba4152bSMatthew G. Knepley   PetscGlobalMinMaxReal - Get the global min/max from local min/max input
1656ba4152bSMatthew G. Knepley 
166d083f849SBarry Smith   Collective
1676ba4152bSMatthew G. Knepley 
1686ba4152bSMatthew G. Knepley   Input Parameter:
1696ba4152bSMatthew G. Knepley . minMaxVal - An array with the local min and max
1706ba4152bSMatthew G. Knepley 
1716ba4152bSMatthew G. Knepley   Output Parameter:
1726ba4152bSMatthew G. Knepley . minMaxValGlobal - An array with the global min and max
1736ba4152bSMatthew G. Knepley 
1746ba4152bSMatthew G. Knepley   Level: beginner
1756ba4152bSMatthew G. Knepley 
176811af0c4SBarry Smith .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxInt()`
1776ba4152bSMatthew G. Knepley @*/
178d71ae5a4SJacob Faibussowitsch PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, const PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2])
179d71ae5a4SJacob Faibussowitsch {
18058b5cd2aSSatish Balay   PetscReal sendbuf[2];
1816ba4152bSMatthew G. Knepley 
1826ba4152bSMatthew G. Knepley   PetscFunctionBegin;
18358b5cd2aSSatish Balay   sendbuf[0] = -minMaxVal[0];
18458b5cd2aSSatish Balay   sendbuf[1] = minMaxVal[1];
1851c2dc1cbSBarry Smith   PetscCall(MPIU_Allreduce(sendbuf, minMaxValGlobal, 2, MPIU_REAL, MPIU_MAX, comm));
18658b5cd2aSSatish Balay   minMaxValGlobal[0] = -minMaxValGlobal[0];
187*3ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
1886ba4152bSMatthew G. Knepley }
189