Skip to content

Commit 8545842

Browse files
ldoktorportante
andauthored
Port various features from pbench-run-benchmark to pbench-linpack
linpack could be executed via the "pbench-run-benchmark" where it had "linpack-postprocess-cdm" postprocess script tailored for it. The same same script is reused for pbench-linpack. To inherit the file structure we copy the result file to the expected directory and then simply run the commands. When executing linpack via pbench-run-benchmark one had the opportunity to specify various attributes, while the original pbench-linpack relied on the prepared linpack.dat file. We utilize the already existing code to allow such customization in pbench-linpack as well. This is a go-for-broke refactoring which adjusts how `pbench-linpack` is written, drops the use of `check_install_rpm` in favor of a default install directory that can be overridden, adding iterations to the `metdata.log` file, and adds a set of unit tests covering its basic behaviors. Signed-off-by: Lukáš Doktor <[email protected]> Co-authored-by: Peter Portante <[email protected]>
1 parent ed0ddfb commit 8545842

File tree

18 files changed

+512
-104
lines changed

18 files changed

+512
-104
lines changed

agent/bench-scripts/pbench-linpack

Lines changed: 157 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -1,29 +1,19 @@
11
#!/bin/bash
2-
# -*- mode: shell-script; indent-tabs-mode: t; sh-basic-offset: 8; sh-indentation: 8; sh-indent-for-case-alt: + -*-
2+
# -*- mode: shell-script; indent-tabs-mode: t; sh-basic-offset: 8; sh-indentation: 8; tab-width: 8 -*-
33

44
# This is a script to run the linpack benchmark
55

66
# TODO:
7-
# 1) write results in pbench standard file names and formats
8-
# 2) add support to run mulitple samples and get stddev
9-
# 3) add support for multiple local or remote copies of benchmark running concurrently
10-
# 4) add support for binding copies of benchmark to numa nodes
7+
# 1) add support to run mulitple samples with stddev
8+
# 2) add support for multiple local or remote copies of benchmark running concurrently
9+
# 3) add support for binding copies of benchmark to numa nodes
1110

12-
script_path=`dirname $0`
13-
script_name=`basename $0`
14-
pbench_bin="`cd ${script_path}/..; /bin/pwd`"
11+
script_path="$(dirname ${0})"
12+
script_name="$(basename ${0})"
13+
pbench_bin="$(realpath -e ${script_path}/..)"
1514

1615
# source the base script
17-
. "$pbench_bin"/base
18-
19-
benchmark_rpm=$script_name
20-
export benchmark="linpack"
21-
ver=11.1.3
22-
linpack_dir=/usr/local/${script_name}-${ver}/benchmarks/linpack
23-
linpack_cmd=xlinpack_xeon64
24-
linpack_dat=linpack.dat
25-
threads=`cat /proc/cpuinfo | grep processor | wc -l`
26-
16+
. "${pbench_bin}"/base
2717

2818
# Every bench-script follows a similar sequence:
2919
# 1) process bench script arguments
@@ -34,101 +24,167 @@ threads=`cat /proc/cpuinfo | grep processor | wc -l`
3424
# 6) postprocess benchmark data
3525
# 7) postprocess analysis tool data
3626

27+
export benchmark="linpack"
28+
3729
# Defaults
38-
baseconfig="`uname -r`"
39-
start_iteration_num=1
40-
orig_cmd="$*"
30+
def_threads=$(cat /proc/cpuinfo | grep processor | wc -l)
31+
threads=${def_threads}
32+
def_nr_samples=2
33+
nr_samples=${def_nr_samples}
34+
orig_cmd="${*}"
4135
tool_group="default"
4236
export config=""
4337
sysinfo="default"
4438

4539
function usage {
4640
printf "\tThe following options are available:\n\n"
4741
printf -- "\t-C str --config=str name of the test config\n"
48-
printf -- "\t --threads=int[,int] number of threads to use (default is num_cpus)\n"
42+
printf -- "\t --samples=<int> number of samples to use per test iteration (default is ${def_nr_samples})\n"
43+
printf -- "\t --threads=int[,int] number of threads to use (default is # local CPUs)\n"
4944
printf -- "\t --tool-group=str\n"
5045
printf -- "\t --sysinfo=str, str= comma separated values of sysinfo to be collected\n"
5146
printf -- "\t available: $(pbench-display-sysinfo-options)\n"
5247
}
5348

5449
# Process options and arguments
55-
opts=$(getopt -q -o C:h --longoptions "config:,threads:,tool-group:,sysinfo:,help" -n "getopt.sh" -- "$@");
56-
if [ $? -ne 0 ]; then
57-
printf -- "${script_name} $*\n"
50+
opts=$(getopt -q -o C:h --longoptions "config:,samples:,threads:,tool-group:,sysinfo:,help" -n "getopt.sh" -- "${@}")
51+
if [[ ${?} -ne 0 ]]; then
52+
printf -- "${script_name} ${*}\n"
5853
printf -- "\n"
5954
printf -- "\tunrecognized option specified\n\n"
6055
usage
6156
exit 1
6257
fi
63-
eval set -- "$opts";
58+
eval set -- "${opts}"
6459
while true; do
65-
case "$1" in
66-
-c|--config)
67-
shift;
68-
if [ -n "$1" ]; then
69-
config="$1"
60+
arg=${1}
61+
shift
62+
case "${arg}" in
63+
-c|--config)
64+
if [[ -n "${1}" ]]; then
65+
config="${1}"
7066
shift;
7167
fi
7268
;;
73-
--threads)
74-
shift;
75-
if [ -n "$1" ]; then
76-
threads="$1"
69+
--samples)
70+
if [[ -n "${1}" ]]; then
71+
nr_samples="${1}"
7772
shift;
7873
fi
7974
;;
80-
--tool-group)
81-
shift;
82-
if [ -n "$1" ]; then
83-
tool_group="$1"
75+
--threads)
76+
if [[ -n "${1}" ]]; then
77+
threads="${1}"
8478
shift;
8579
fi
8680
;;
87-
--sysinfo)
88-
shift;
89-
if [ -n "$1" ]; then
90-
sysinfo="$1"
81+
--tool-group)
82+
if [[ -n "${1}" ]]; then
83+
tool_group="${1}"
84+
shift;
85+
fi
86+
;;
87+
--sysinfo)
88+
if [[ -n "${1}" ]]; then
89+
sysinfo="${1}"
9190
shift;
9291
fi
9392
;;
94-
-h|--help)
93+
-h|--help)
9594
usage
9695
exit 0
9796
;;
98-
--)
97+
--)
9998
shift;
10099
break;
101100
;;
102-
*)
103-
echo "what happened? [$1]"
104-
exit 0
105-
break;
101+
*)
102+
echo "what happened? [${1}]" >&2
103+
exit 1
106104
;;
107105
esac
108106
done
109-
verify_common_bench_script_options $tool_group $sysinfo
107+
verify_common_bench_script_options ${tool_group} ${sysinfo}
108+
109+
ver="$(pbench-config version ${benchmark})"
110+
if [[ -z "${ver}" ]]; then
111+
error_log "${script_name}: package version is missing in config file"
112+
exit 1
113+
fi
114+
if [[ -z "${linpack_dir}" ]]; then
115+
linpack_dir="/usr/local/${script_name}-${ver}/benchmarks/linpack"
116+
linpack_dir_kind="default"
117+
else
118+
linpack_dir_kind="provided"
119+
fi
120+
if [[ ! -d "${linpack_dir}" ]]; then
121+
error_log "${script_name}: the ${linpack_dir_kind} linpack directory, ${linpack_dir}, does not exist"
122+
exit 1
123+
fi
124+
linpack_cmd="${linpack_dir}/xlinpack_xeon64"
125+
if [[ ! -x "${linpack_cmd}" ]]; then
126+
error_log "${script_name}: the expected linpack command, ${linpack_cmd}, does not exist"
127+
exit 1
128+
fi
110129

111-
## Ensure the right version of the benchmark is installed
112-
check_install_rpm "${benchmark_rpm}" "${ver}"
130+
function store_and_run {
131+
local workdir=${1}
132+
local cmd=${2}
133+
local filename=${3}
134+
(
135+
cd "${1}" &&
136+
echo "${cmd}" > "${filename}" &&
137+
chmod +x "${filename}" &&
138+
"./${filename}" &> "${filename}.out"
139+
)
140+
ret=${?}
141+
if [[ ${ret} -ne 0 ]]; then
142+
warn_log "failed to execute: ${workdir}/${filename}"
143+
fi
144+
return ${ret}
145+
}
146+
147+
function preprocess-iteration {
148+
local _cmd="${pbench_bin}/bench-scripts/postprocess/linpack-prepare-input-file"
149+
store_and_run "${1}" "'${_cmd}' --output-dir '${1}' --threads '${2}' --run-samples 1 --linpack-binary '${linpack_cmd}'" "${benchmark}-preprocess.cmd"
150+
}
151+
152+
function postprocess-iteration {
153+
local _cmd="${pbench_bin}/bench-scripts/postprocess/linpack-postprocess-cdm"
154+
store_and_run "${1}" "'${_cmd}' '${1}' '${2}' '${tool_group}' 1 html" \
155+
"${benchmark}-postprocess.cmd"
156+
}
157+
158+
function postprocess-results {
159+
local _cmd="${pbench_bin}/bench-scripts/postprocess/generate-benchmark-summary"
160+
store_and_run "${1}" "'${_cmd}' '${benchmark}' '${orig_cmd}' '${1}'" \
161+
"generate-benchmark-summary.cmd"
162+
}
113163

114164
benchmark_fullname="${benchmark}_${config}_${date_suffix}"
115-
export benchmark_run_dir="$pbench_run/${benchmark_fullname}"
116-
benchmark_summary_txt_file="$benchmark_run_dir/$benchmark-summary.txt"
117-
benchmark_summary_html_file="$benchmark_run_dir/$benchmark-summary.html"
165+
export benchmark_run_dir="${pbench_run}/${benchmark_fullname}"
166+
167+
# we'll record the iterations in this file
118168
benchmark_iterations="${benchmark_run_dir}/.iterations"
169+
mdlog=${benchmark_run_dir}/metadata.log
170+
171+
function record_iteration {
172+
local count=${1}
173+
local thread=${2}
174+
local iteration=${3}
175+
176+
echo ${iteration} >> ${benchmark_iterations}
177+
echo ${count} | pbench-add-metalog-option ${mdlog} iterations/${iteration} iteration_number
178+
echo ${thread} | pbench-add-metalog-option ${mdlog} iterations/${iteration} threads
179+
echo ${iteration} | pbench-add-metalog-option ${mdlog} iterations/${iteration} iteration_name
180+
}
181+
119182

120183
mkdir -p ${benchmark_run_dir}/.running
121184

122185
# now that the benchmark_run_dir directory exists, we can initialize the iterations file
123186
> ${benchmark_iterations}
124187

125-
iteration=1-$threads-threads
126-
sample="sample1"
127-
echo $iteration >> $benchmark_iterations
128-
benchmark_results_dir="$benchmark_run_dir/$iteration/${sample}"
129-
result_file=$benchmark_results_dir/result.txt
130-
mkdir -p $benchmark_results_dir
131-
132188
# Start the tool meisters on each registered local/remote host
133189
pbench-tool-meister-start --sysinfo="${sysinfo}" "${tool_group}"
134190
if [[ ${?} != 0 ]]; then
@@ -138,21 +194,52 @@ fi
138194

139195
trap "interrupt" INT QUIT TERM
140196

141-
## Run the benchmark and start/stop perf analysis tools
142-
pbench-start-tools --group=$tool_group --dir=$benchmark_results_dir
197+
let count=1
198+
for thread in ${threads//,/ }; do
199+
iteration="${count}-${thread}-threads"
200+
echo ${iteration} >> ${benchmark_iterations}
201+
iteration_dir="${benchmark_run_dir}/${iteration}"
202+
mkdir -p "${iteration_dir}"
203+
record_iteration ${count} ${thread} ${iteration}
204+
echo "Starting iteration ${iteration}"
205+
206+
# Pre-processing: generate the linpack.input dirs (in /var/lib/pbench-agent/tmp)
207+
preprocess-iteration "${iteration_dir}" "${thread}"
208+
209+
for sample in $(seq 1 ${nr_samples}); do
210+
echo "test sample ${sample} of ${nr_samples}"
211+
sample_dir="${iteration_dir}/sample${sample}"
212+
# Create the output directory expected by the postprocess script
213+
mkdir -p "${sample_dir}/clients/localhost/"
214+
215+
# Run the benchmark and start/stop perf analysis tools
216+
pbench-start-tools --group=${tool_group} --dir="${sample_dir}"
143217

144-
OMP_NUM_THREADS=$threads ${linpack_dir}/${linpack_cmd} < ${linpack_dir}/${linpack_dat} | tee $result_file
218+
( cd "${iteration_dir}" && ./linpack.sh )
145219

146-
pbench-stop-tools --group=$tool_group --dir=$benchmark_results_dir
147-
pbench-send-tools --group=$tool_group --dir=$benchmark_results_dir
148-
pbench-postprocess-tools --group=$tool_group --dir=$benchmark_results_dir
220+
pbench-stop-tools --group=${tool_group} --dir="${sample_dir}"
149221

150-
ln -s ${sample} $benchmark_run_dir/$iteration/reference-result
222+
# Move the results to the output directory expected by the postprocess script
223+
mv "${iteration_dir}/"linpack.{out,meta} "${sample_dir}/clients/localhost"
224+
225+
# Have the Tool Meisters send the tool data back and post-process it
226+
pbench-send-tools --group=${tool_group} --dir="${sample_dir}"
227+
pbench-postprocess-tools --group=${tool_group} --dir="${sample_dir}"
228+
229+
# Post-process the benchmark results after the tools so that the
230+
# post-processing has that data available to it.
231+
postprocess-iteration "${sample_dir}" "${benchmark_run_dir}"
232+
done
233+
234+
echo "Iteration ${iteration} complete"
235+
let count++
236+
done
237+
postprocess-results "${benchmark_run_dir}"
151238

152239
# Stop the tool meisters on each registered local/remote host
153240
pbench-tool-meister-stop --sysinfo="${sysinfo}" "${tool_group}"
154241
if [[ ${?} != 0 ]]; then
155242
error_log "[${script_name}]: failed to stop the tool meisters."
156243
fi
157244

158-
rmdir $benchmark_run_dir/.running
245+
rmdir ${benchmark_run_dir}/.running

0 commit comments

Comments
 (0)