xref: /petsc/src/benchmarks/streams/makefile (revision f97672e55eacc8688507b9471cd7ec2664d7f203)
1cdb0f33dSPierre Jolivet-include ../../../petscdir.mk
2*f97672e5SBarry Smith
340eeca14SSatish BalayLOCDIR        = src/benchmarks/streams/
419816777SMarkEXAMPLESC     = BasicVersion.c MPIVersion.c OpenMPVersion.c SSEVersion.c PthreadVersion.c CUDAVersion.cu
5d8c74875SBarry SmithEXAMPLESF     =
6d8c74875SBarry SmithTESTS         = BasicVersion OpenMPVersion
7d8c74875SBarry SmithMANSEC        = Sys
8d8c74875SBarry Smith
9af0996ceSBarry Smithinclude ${PETSC_DIR}/lib/petsc/conf/variables
10af0996ceSBarry Smithinclude ${PETSC_DIR}/lib/petsc/conf/rules
11af0996ceSBarry Smithinclude ${PETSC_DIR}/lib/petsc/conf/test
12d8c74875SBarry Smith
13dc0529c6SBarry SmithBasicVersion: BasicVersion.o
1480094aa7SBarry Smith	-@${CLINKER} -o BasicVersion BasicVersion.o ${PETSC_LIB}
1580094aa7SBarry Smith	@${RM} -f BasicVersion.o
16d8c74875SBarry Smith
17dc0529c6SBarry SmithMPIVersion: MPIVersion.o
18d3ae85c4SBarry Smith	-@${CLINKER} -o MPIVersion MPIVersion.o ${PETSC_LIB}
19d3ae85c4SBarry Smith	@${RM} -f MPIVersion.o
20d3ae85c4SBarry Smith
2119816777SMarkCUDAVersion: CUDAVersion.o
2219816777SMark	-@${CLINKER} -o CUDAVersion CUDAVersion.o ${PETSC_LIB}
2319816777SMark	@${RM} -f CUDAVersion.o
2419816777SMark
25dc0529c6SBarry SmithOpenMPVersion: OpenMPVersion.o
264198fb66SBarry Smith	-@${CLINKER} -o OpenMPVersion OpenMPVersion.o
2780094aa7SBarry Smith	@${RM} -f OpenMPVersion.o
28d8c74875SBarry Smith
29dc0529c6SBarry SmithSSEVersion: SSEVersion.o
30b8a1809bSJed Brown	-${CLINKER} -o $@ $< ${PETSC_LIB}
31b8a1809bSJed Brown	${RM} -f $<
32d3ae85c4SBarry Smith
33dc0529c6SBarry SmithPthreadVersion: PthreadVersion.o
3493af4de9SShri Abhyankar	-@${CLINKER} -o PthreadVersion PthreadVersion.o ${PETSC_LIB}
3593af4de9SShri Abhyankar	@${RM} -f PthreadVersion.o
36b8a1809bSJed Brown
37dc32dc79SSatish Balay# make streams [NPMAX=integer_number_of_MPI_processes_to_use] [MPI_BINDING='binding options']
384198fb66SBarry Smithmpistream:  MPIVersion
395f27b2e0SBarry Smith	@if [ "${NPMAX}foo" = "foo" ]; then echo "---------"; printf " Run with [PETSC_OPTIONS=-process_view] make streams NPMAX=<integer number of MPI processes> [MPI_BINDING='-bind-to core -map-by numa']\n or       [I_MPI_PIN_PROCESSOR_LIST=:map=scatter] [PETSC_OPTIONS=-process_view] make streams NPMAX=<integer number of MPI processes>\n"; exit 1 ; fi
40a6cca095SBarry Smith	-@printf "" > scaling.log
414198fb66SBarry Smith	-@printf "Running streams with '${MPIEXEC} ${MPI_BINDING}' using 'NPMAX=${NPMAX}' \n"
42372a53e9SSatish Balay	-@i=0; while [ $${i} -lt ${NPMAX} ]; do i=`expr $${i} + 1`; \
437115ffacSBarry Smith	  ${MPIEXEC} ${MPI_BINDING} -n $${i} ./MPIVersion | tee -a scaling.log; \
44d3ae85c4SBarry Smith        done
45d8c74875SBarry Smith	-@echo "------------------------------------------------"
464198fb66SBarry Smith	-@${PYTHON} process.py MPI fileoutput
474198fb66SBarry Smith
4819816777SMark# Works on SUMMIT
4919816777SMarkcudastreamjsrun:  CUDAVersion
5019816777SMark	@if [ "${NPMAX}foo" = "foo" ]; then echo "---------"; printf " Run with [PETSC_OPTIONS=-process_view] make streams NPMAX=<integer number of MPI processes> [MPI_BINDING='-bind-to core -map-by numa']\n or       [I_MPI_PIN_PROCESSOR_LIST=:map=scatter] [PETSC_OPTIONS=-process_view] make streams NPMAX=<integer number of MPI processes>\n"; exit 1 ; fi
5119816777SMark	-@printf "" > scaling.log
5219816777SMark	-@printf "Running streams with '${MPIEXEC} ${MPI_BINDING}' using 'NPMAX=${NPMAX}' \n"
5319816777SMark	-@i=0; while [ $${i} -lt ${NPMAX} ] && [ $${i} -lt 7 ]; do i=`expr $${i} + 1`; \
5419816777SMark	  ${MPIEXEC} ${MPI_BINDING} -n 1 -c$${i} -a$${i} -g1 ./CUDAVersion | tee -a scaling.log; \
5519816777SMark        done
5619816777SMark	-@n=1; i=7; while [ $${i} -lt ${NPMAX} ]; do i=`expr $${i} + 7`; n=`expr $${n} + 1`; \
5719816777SMark	       c=5; while [ $${c} -lt 7 ]; do c=`expr $${c} + 1`; \
5819816777SMark	  ${MPIEXEC} ${MPI_BINDING} -n $${n} -c$${c} -a$${c} -g1 ./CUDAVersion | tee -a scaling.log; \
5919816777SMark        done; done
6019816777SMark	-@echo "------------------------------------------------"
6119816777SMark	-@${PYTHON} process.py CUDA fileoutput
6219816777SMark
634198fb66SBarry Smithopenmpstream:  OpenMPVersion
644198fb66SBarry Smith	@if [ "${NPMAX}foo" = "foo" ]; then echo "---------"; printf " Run with make openmpstream NPMAX=<integer number of threads>\n"; exit 1 ; fi
654198fb66SBarry Smith	-@printf "" > scaling.log
664198fb66SBarry Smith	@-@printf "Running openmpstreams using 'NPMAX=${NPMAX}'\n"
674198fb66SBarry Smith	-@i=0; while [ $${i} -lt ${NPMAX} ]; do i=`expr $${i} + 1`; \
684198fb66SBarry Smith	  OMP_NUM_THREADS=$${i} ./OpenMPVersion  | tee -a scaling.log;\
694198fb66SBarry Smith        done
704198fb66SBarry Smith	-@${PYTHON} process.py OpenMP fileoutput
71a6cca095SBarry Smith
72c40481a3SBarry Smithhwloc:
7377fe4bf7SBarry Smith	-@if [ "${LSTOPO}foo" != "foo" ]; then ${MPIEXEC} ${MPI_BINDING} -n 1 ${LSTOPO} --no-icaches --no-io --ignore PU ; fi
74c40481a3SBarry Smith
754198fb66SBarry Smithmpistreams: mpistream hwloc
764198fb66SBarry Smith	-@${PYTHON} process.py MPI
774198fb66SBarry Smith
784198fb66SBarry Smith
794198fb66SBarry Smithopenmpstreams: openmpstream hwloc
804198fb66SBarry Smith	-@${PYTHON} process.py OpenMP
81d3ae85c4SBarry Smith
82d3ae85c4SBarry Smith
83