xref: /petsc/src/benchmarks/streams/makefile (revision 9f0612e409f6220a780be6348417bea34ef34962)
1cdb0f33dSPierre Jolivet-include ../../../petscdir.mk
2f97672e5SBarry Smith
3d8c74875SBarry SmithMANSEC        = Sys
4*9f0612e4SBarry Smith#CFLAGS        = -mcmodel=large
5*9f0612e4SBarry Smith# The CFLAGS above allows very large global arrays
6d8c74875SBarry Smith
7af0996ceSBarry Smithinclude ${PETSC_DIR}/lib/petsc/conf/variables
8af0996ceSBarry Smithinclude ${PETSC_DIR}/lib/petsc/conf/rules
9d8c74875SBarry Smith
10dc0529c6SBarry SmithBasicVersion: BasicVersion.o
1180094aa7SBarry Smith	-@${CLINKER} -o BasicVersion BasicVersion.o ${PETSC_LIB}
1280094aa7SBarry Smith	@${RM} -f BasicVersion.o
13d8c74875SBarry Smith
14dc0529c6SBarry SmithMPIVersion: MPIVersion.o
15d3ae85c4SBarry Smith	-@${CLINKER} -o MPIVersion MPIVersion.o ${PETSC_LIB}
16d3ae85c4SBarry Smith	@${RM} -f MPIVersion.o
17d3ae85c4SBarry Smith
1819816777SMarkCUDAVersion: CUDAVersion.o
1919816777SMark	-@${CLINKER} -o CUDAVersion CUDAVersion.o ${PETSC_LIB}
2019816777SMark	@${RM} -f CUDAVersion.o
2119816777SMark
22dc0529c6SBarry SmithOpenMPVersion: OpenMPVersion.o
234198fb66SBarry Smith	-@${CLINKER} -o OpenMPVersion OpenMPVersion.o
2480094aa7SBarry Smith	@${RM} -f OpenMPVersion.o
25d8c74875SBarry Smith
26*9f0612e4SBarry SmithOpenMPVersionLikeMPI: OpenMPVersionLikeMPI.o
27*9f0612e4SBarry Smith	-@${CLINKER} -o OpenMPVersionLikeMPI OpenMPVersionLikeMPI.o
28*9f0612e4SBarry Smith	@${RM} -f OpenMPVersionLikeMPI.o
29*9f0612e4SBarry Smith
30dc0529c6SBarry SmithSSEVersion: SSEVersion.o
31b8a1809bSJed Brown	-${CLINKER} -o $@ $< ${PETSC_LIB}
32b8a1809bSJed Brown	${RM} -f $<
33d3ae85c4SBarry Smith
34dc0529c6SBarry SmithPthreadVersion: PthreadVersion.o
3593af4de9SShri Abhyankar	-@${CLINKER} -o PthreadVersion PthreadVersion.o ${PETSC_LIB}
3693af4de9SShri Abhyankar	@${RM} -f PthreadVersion.o
37b8a1809bSJed Brown
38dc32dc79SSatish Balay# make streams [NPMAX=integer_number_of_MPI_processes_to_use] [MPI_BINDING='binding options']
394198fb66SBarry Smithmpistream:  MPIVersion
405f27b2e0SBarry Smith	@if [ "${NPMAX}foo" = "foo" ]; then echo "---------"; printf " Run with [PETSC_OPTIONS=-process_view] make streams NPMAX=<integer number of MPI processes> [MPI_BINDING='-bind-to core -map-by numa']\n or       [I_MPI_PIN_PROCESSOR_LIST=:map=scatter] [PETSC_OPTIONS=-process_view] make streams NPMAX=<integer number of MPI processes>\n"; exit 1 ; fi
41a6cca095SBarry Smith	-@printf "" > scaling.log
424198fb66SBarry Smith	-@printf "Running streams with '${MPIEXEC} ${MPI_BINDING}' using 'NPMAX=${NPMAX}' \n"
43372a53e9SSatish Balay	-@i=0; while [ $${i} -lt ${NPMAX} ]; do i=`expr $${i} + 1`; \
447115ffacSBarry Smith	  ${MPIEXEC} ${MPI_BINDING} -n $${i} ./MPIVersion | tee -a scaling.log; \
45d3ae85c4SBarry Smith        done
46d8c74875SBarry Smith	-@echo "------------------------------------------------"
474198fb66SBarry Smith	-@${PYTHON} process.py MPI fileoutput
484198fb66SBarry Smith
4919816777SMark# Works on SUMMIT
5019816777SMarkcudastreamjsrun:  CUDAVersion
5119816777SMark	@if [ "${NPMAX}foo" = "foo" ]; then echo "---------"; printf " Run with [PETSC_OPTIONS=-process_view] make streams NPMAX=<integer number of MPI processes> [MPI_BINDING='-bind-to core -map-by numa']\n or       [I_MPI_PIN_PROCESSOR_LIST=:map=scatter] [PETSC_OPTIONS=-process_view] make streams NPMAX=<integer number of MPI processes>\n"; exit 1 ; fi
5219816777SMark	-@printf "" > scaling.log
5319816777SMark	-@printf "Running streams with '${MPIEXEC} ${MPI_BINDING}' using 'NPMAX=${NPMAX}' \n"
5419816777SMark	-@i=0; while [ $${i} -lt ${NPMAX} ] && [ $${i} -lt 7 ]; do i=`expr $${i} + 1`; \
5519816777SMark	  ${MPIEXEC} ${MPI_BINDING} -n 1 -c$${i} -a$${i} -g1 ./CUDAVersion | tee -a scaling.log; \
5619816777SMark        done
5719816777SMark	-@n=1; i=7; while [ $${i} -lt ${NPMAX} ]; do i=`expr $${i} + 7`; n=`expr $${n} + 1`; \
5819816777SMark	       c=5; while [ $${c} -lt 7 ]; do c=`expr $${c} + 1`; \
5919816777SMark	  ${MPIEXEC} ${MPI_BINDING} -n $${n} -c$${c} -a$${c} -g1 ./CUDAVersion | tee -a scaling.log; \
6019816777SMark        done; done
6119816777SMark	-@echo "------------------------------------------------"
6219816777SMark	-@${PYTHON} process.py CUDA fileoutput
6319816777SMark
644198fb66SBarry Smithopenmpstream:  OpenMPVersion
654198fb66SBarry Smith	@if [ "${NPMAX}foo" = "foo" ]; then echo "---------"; printf " Run with make openmpstream NPMAX=<integer number of threads>\n"; exit 1 ; fi
664198fb66SBarry Smith	-@printf "" > scaling.log
674198fb66SBarry Smith	@-@printf "Running openmpstreams using 'NPMAX=${NPMAX}'\n"
684198fb66SBarry Smith	-@i=0; while [ $${i} -lt ${NPMAX} ]; do i=`expr $${i} + 1`; \
694198fb66SBarry Smith	  OMP_NUM_THREADS=$${i} ./OpenMPVersion  | tee -a scaling.log;\
704198fb66SBarry Smith        done
714198fb66SBarry Smith	-@${PYTHON} process.py OpenMP fileoutput
72a6cca095SBarry Smith
73*9f0612e4SBarry Smithopenmplikempistream:  OpenMPVersionLikeMPI
74*9f0612e4SBarry Smith	@if [ "${NPMAX}foo" = "foo" ]; then echo "---------"; printf " Run with make openmplikempistream NPMAX=<integer number of threads>\n"; exit 1 ; fi
75*9f0612e4SBarry Smith	-@printf "" > scaling.log
76*9f0612e4SBarry Smith	@-@printf "Running openmplikempistreams using 'NPMAX=${NPMAX}'\n"
77*9f0612e4SBarry Smith	-@i=0; while [ $${i} -lt ${NPMAX} ]; do i=`expr $${i} + 1`; \
78*9f0612e4SBarry Smith	  OMP_NUM_THREADS=$${i} ./OpenMPVersionLikeMPI  | tee -a scaling.log;\
79*9f0612e4SBarry Smith        done
80*9f0612e4SBarry Smith	-@${PYTHON} process.py OpenMPLikeMPI fileoutput
81*9f0612e4SBarry Smith
82c40481a3SBarry Smithhwloc:
8377fe4bf7SBarry Smith	-@if [ "${LSTOPO}foo" != "foo" ]; then ${MPIEXEC} ${MPI_BINDING} -n 1 ${LSTOPO} --no-icaches --no-io --ignore PU ; fi
84c40481a3SBarry Smith
854198fb66SBarry Smithmpistreams: mpistream hwloc
864198fb66SBarry Smith	-@${PYTHON} process.py MPI
874198fb66SBarry Smith
884198fb66SBarry Smith
894198fb66SBarry Smithopenmpstreams: openmpstream hwloc
904198fb66SBarry Smith	-@${PYTHON} process.py OpenMP
91d3ae85c4SBarry Smith
92d3ae85c4SBarry Smith
93