xref: /petsc/src/benchmarks/streams/makefile (revision 198167770d420a45271c7f7f67c0aac7f5fdda28)
1d8c74875SBarry Smith
2d8c74875SBarry SmithALL:
3d8c74875SBarry Smith
40dad66eaSBarry SmithCFLAGS	      =
5d8c74875SBarry SmithFFLAGS	      =
6d8c74875SBarry SmithCPPFLAGS      =
7d8c74875SBarry SmithFPPFLAGS      =
840eeca14SSatish BalayLOCDIR        = src/benchmarks/streams/
9*19816777SMarkEXAMPLESC     = BasicVersion.c MPIVersion.c OpenMPVersion.c SSEVersion.c PthreadVersion.c CUDAVersion.cu
10d8c74875SBarry SmithEXAMPLESF     =
11d8c74875SBarry SmithTESTS         = BasicVersion OpenMPVersion
12d8c74875SBarry SmithMANSEC        = Sys
13d8c74875SBarry Smith
14af0996ceSBarry Smithinclude ${PETSC_DIR}/lib/petsc/conf/variables
15af0996ceSBarry Smithinclude ${PETSC_DIR}/lib/petsc/conf/rules
16af0996ceSBarry Smithinclude ${PETSC_DIR}/lib/petsc/conf/test
17d8c74875SBarry Smith
18dc0529c6SBarry SmithBasicVersion: BasicVersion.o
1980094aa7SBarry Smith	-@${CLINKER} -o BasicVersion BasicVersion.o ${PETSC_LIB}
2080094aa7SBarry Smith	@${RM} -f BasicVersion.o
21d8c74875SBarry Smith
22dc0529c6SBarry SmithMPIVersion: MPIVersion.o
23d3ae85c4SBarry Smith	-@${CLINKER} -o MPIVersion MPIVersion.o ${PETSC_LIB}
24d3ae85c4SBarry Smith	@${RM} -f MPIVersion.o
25d3ae85c4SBarry Smith
26*19816777SMarkCUDAVersion: CUDAVersion.o
27*19816777SMark	-@${CLINKER} -o CUDAVersion CUDAVersion.o ${PETSC_LIB}
28*19816777SMark	@${RM} -f CUDAVersion.o
29*19816777SMark
30dc0529c6SBarry SmithOpenMPVersion: OpenMPVersion.o
314198fb66SBarry Smith	-@${CLINKER} -o OpenMPVersion OpenMPVersion.o
3280094aa7SBarry Smith	@${RM} -f OpenMPVersion.o
33d8c74875SBarry Smith
34dc0529c6SBarry SmithSSEVersion: SSEVersion.o
35b8a1809bSJed Brown	-${CLINKER} -o $@ $< ${PETSC_LIB}
36b8a1809bSJed Brown	${RM} -f $<
37d3ae85c4SBarry Smith
38dc0529c6SBarry SmithPthreadVersion: PthreadVersion.o
3993af4de9SShri Abhyankar	-@${CLINKER} -o PthreadVersion PthreadVersion.o ${PETSC_LIB}
4093af4de9SShri Abhyankar	@${RM} -f PthreadVersion.o
41b8a1809bSJed Brown
42dc32dc79SSatish Balay# make streams [NPMAX=integer_number_of_MPI_processes_to_use] [MPI_BINDING='binding options']
434198fb66SBarry Smithmpistream:  MPIVersion
445f27b2e0SBarry Smith	@if [ "${NPMAX}foo" = "foo" ]; then echo "---------"; printf " Run with [PETSC_OPTIONS=-process_view] make streams NPMAX=<integer number of MPI processes> [MPI_BINDING='-bind-to core -map-by numa']\n or       [I_MPI_PIN_PROCESSOR_LIST=:map=scatter] [PETSC_OPTIONS=-process_view] make streams NPMAX=<integer number of MPI processes>\n"; exit 1 ; fi
45a6cca095SBarry Smith	-@printf "" > scaling.log
464198fb66SBarry Smith	-@printf "Running streams with '${MPIEXEC} ${MPI_BINDING}' using 'NPMAX=${NPMAX}' \n"
47372a53e9SSatish Balay	-@i=0; while [ $${i} -lt ${NPMAX} ]; do i=`expr $${i} + 1`; \
487115ffacSBarry Smith	  ${MPIEXEC} ${MPI_BINDING} -n $${i} ./MPIVersion | tee -a scaling.log; \
49d3ae85c4SBarry Smith        done
50d8c74875SBarry Smith	-@echo "------------------------------------------------"
514198fb66SBarry Smith	-@${PYTHON} process.py MPI fileoutput
524198fb66SBarry Smith
53*19816777SMark# Works on SUMMIT
54*19816777SMarkcudastreamjsrun:  CUDAVersion
55*19816777SMark	@if [ "${NPMAX}foo" = "foo" ]; then echo "---------"; printf " Run with [PETSC_OPTIONS=-process_view] make streams NPMAX=<integer number of MPI processes> [MPI_BINDING='-bind-to core -map-by numa']\n or       [I_MPI_PIN_PROCESSOR_LIST=:map=scatter] [PETSC_OPTIONS=-process_view] make streams NPMAX=<integer number of MPI processes>\n"; exit 1 ; fi
56*19816777SMark	-@printf "" > scaling.log
57*19816777SMark	-@printf "Running streams with '${MPIEXEC} ${MPI_BINDING}' using 'NPMAX=${NPMAX}' \n"
58*19816777SMark	-@i=0; while [ $${i} -lt ${NPMAX} ] && [ $${i} -lt 7 ]; do i=`expr $${i} + 1`; \
59*19816777SMark	  ${MPIEXEC} ${MPI_BINDING} -n 1 -c$${i} -a$${i} -g1 ./CUDAVersion | tee -a scaling.log; \
60*19816777SMark        done
61*19816777SMark	-@n=1; i=7; while [ $${i} -lt ${NPMAX} ]; do i=`expr $${i} + 7`; n=`expr $${n} + 1`; \
62*19816777SMark	       c=5; while [ $${c} -lt 7 ]; do c=`expr $${c} + 1`; \
63*19816777SMark	  ${MPIEXEC} ${MPI_BINDING} -n $${n} -c$${c} -a$${c} -g1 ./CUDAVersion | tee -a scaling.log; \
64*19816777SMark        done; done
65*19816777SMark	-@echo "------------------------------------------------"
66*19816777SMark	-@${PYTHON} process.py CUDA fileoutput
67*19816777SMark
684198fb66SBarry Smithopenmpstream:  OpenMPVersion
694198fb66SBarry Smith	@if [ "${NPMAX}foo" = "foo" ]; then echo "---------"; printf " Run with make openmpstream NPMAX=<integer number of threads>\n"; exit 1 ; fi
704198fb66SBarry Smith	-@printf "" > scaling.log
714198fb66SBarry Smith	@-@printf "Running openmpstreams using 'NPMAX=${NPMAX}'\n"
724198fb66SBarry Smith	-@i=0; while [ $${i} -lt ${NPMAX} ]; do i=`expr $${i} + 1`; \
734198fb66SBarry Smith	  OMP_NUM_THREADS=$${i} ./OpenMPVersion  | tee -a scaling.log;\
744198fb66SBarry Smith        done
754198fb66SBarry Smith	-@${PYTHON} process.py OpenMP fileoutput
76a6cca095SBarry Smith
77c40481a3SBarry Smithhwloc:
7877fe4bf7SBarry Smith	-@if [ "${LSTOPO}foo" != "foo" ]; then ${MPIEXEC} ${MPI_BINDING} -n 1 ${LSTOPO} --no-icaches --no-io --ignore PU ; fi
79c40481a3SBarry Smith
804198fb66SBarry Smithmpistreams: mpistream hwloc
814198fb66SBarry Smith	-@${PYTHON} process.py MPI
824198fb66SBarry Smith
834198fb66SBarry Smith
844198fb66SBarry Smithopenmpstreams: openmpstream hwloc
854198fb66SBarry Smith	-@${PYTHON} process.py OpenMP
86d3ae85c4SBarry Smith
87d3ae85c4SBarry Smith
88