Skip to content

Commit c07f6b8

Browse files
[fix] local oss-cluster runner/profiler running as expected. [add] Printing summary result at the end of each benchmark (#232)
1 parent 2736176 commit c07f6b8

File tree

12 files changed

+356
-108
lines changed

12 files changed

+356
-108
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "redisbench-admin"
3-
version = "0.5.10"
3+
version = "0.5.11"
44
description = "Redis benchmark run helper. A wrapper around Redis and Redis Modules benchmark tools ( ftsb_redisearch, memtier_benchmark, redis-benchmark, aibench, etc... )."
55
authors = ["filipecosta90 <[email protected]>","Redis Performance Group <[email protected]>"]
66
readme = "README.md"

redisbench_admin/environments/oss_cluster.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def spin_up_local_redis_cluster(
2929
shard_port = master_shard_id + start_port - 1
3030

3131
command = generate_cluster_redis_server_args(
32-
dbdir, local_module_file, ip, shard_port, configuration_parameters
32+
dbdir, local_module_file, ip, shard_port, configuration_parameters, "no"
3333
)
3434

3535
logging.info(
@@ -41,7 +41,7 @@ def spin_up_local_redis_cluster(
4141
r = redis.StrictRedis(port=shard_port)
4242
result = wait_for_conn(r, dataset_load_timeout_secs)
4343
if result is True:
44-
logging.info("Redis available")
44+
logging.info("Redis available. pid={}".format(redis_process.pid))
4545
redis_conns.append(r)
4646
redis_processes.append(redis_process)
4747
return redis_processes, redis_conns
@@ -131,6 +131,7 @@ def generate_cluster_redis_server_args(
131131
ip,
132132
port,
133133
configuration_parameters=None,
134+
daemonize="yes",
134135
):
135136
# start redis-server
136137
command = [
@@ -142,7 +143,7 @@ def generate_cluster_redis_server_args(
142143
"--cluster-enabled",
143144
"yes",
144145
"--daemonize",
145-
"yes",
146+
daemonize,
146147
"--dbfilename",
147148
get_cluster_dbfilename(port),
148149
"--protected-mode",

redisbench_admin/profilers/perf.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -170,8 +170,10 @@ def _is_alive(self, process):
170170
return False
171171
# Check if child process has terminated. Set and return returncode
172172
# attribute
173-
if process.poll() is None:
173+
ret = process.poll()
174+
if ret is None:
174175
return True
176+
self.profiler_process_exit_code = ret
175177
return False
176178

177179
def stop_profile(self, **kwargs):
@@ -182,8 +184,16 @@ def stop_profile(self, **kwargs):
182184
self.profile_end_time = time.time()
183185
if not self._is_alive(self.profiler_process):
184186
logging.error(
185-
"Profiler process is not alive, might have crash during test execution, "
187+
"Profiler process is not alive, might have crash during test execution. Exit code: {}".format(
188+
self.profiler_process_exit_code
189+
)
186190
)
191+
(
192+
self.profiler_process_stdout,
193+
self.profiler_process_stderr,
194+
) = self.profiler_process.communicate()
195+
logging.error("Profiler stderr: {}".format(self.profiler_process_stderr))
196+
logging.error("Profiler stdout: {}".format(self.profiler_process_stdout))
187197
return result
188198
try:
189199
self.profiler_process.terminate()

redisbench_admin/run/args.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
DEFAULT_TRIGGERING_ENV = socket.gethostname()
2323
TRIGGERING_ENV = os.getenv("TRIGGERING_ENV", DEFAULT_TRIGGERING_ENV)
2424
ENV = os.getenv("ENV", "oss-standalone,oss-cluster")
25+
SETUP = os.getenv("SETUP", "")
2526
PUSH_S3 = bool(os.getenv("PUSH_S3", False))
2627
PROFILERS_DSO = os.getenv("PROFILERS_DSO", None)
2728
PROFILERS_ENABLED = bool(os.getenv("PROFILE", 0))
@@ -121,6 +122,12 @@ def common_run_args(parser):
121122
"--allowed-envs",
122123
type=str,
123124
default=ENV,
124-
help="Comma delimited allowed setups: 'oss-standalone','oss-cluster'",
125+
help="Comma delimited allowed topologies: 'oss-standalone','oss-cluster'",
126+
)
127+
parser.add_argument(
128+
"--allowed-setups",
129+
type=str,
130+
default=SETUP,
131+
help="Comma delimited allowed setups. By default all setups are allowed.",
125132
)
126133
return parser

redisbench_admin/run/common.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
import os
1111
import time
1212
import redis
13+
from pytablewriter import MarkdownTableWriter
1314

1415
from redisbench_admin.run.aibench_run_inference_redisai_vision.aibench_run_inference_redisai_vision import (
1516
prepare_aibench_benchmark_command,
@@ -18,6 +19,7 @@
1819
from redisbench_admin.run.memtier_benchmark.memtier_benchmark import (
1920
prepare_memtier_benchmark_command,
2021
)
22+
from redisbench_admin.run.metrics import extract_results_table
2123
from redisbench_admin.run.redis_benchmark.redis_benchmark import (
2224
prepare_redis_benchmark_command,
2325
)
@@ -570,3 +572,27 @@ def common_properties_log(
570572
logging.info("\tgithub_sha: {}".format(tf_github_sha))
571573
logging.info("\ttriggering env: {}".format(tf_triggering_env))
572574
logging.info("\tsetup_name sufix: {}".format(tf_setup_name_sufix))
575+
576+
577+
def print_results_table_stdout(
578+
benchmark_config, default_metrics, results_dict, setup_name, test_name
579+
):
580+
# check which metrics to extract
581+
(_, metrics,) = merge_default_and_config_metrics(
582+
benchmark_config,
583+
default_metrics,
584+
None,
585+
)
586+
table_name = "Results for {} test-case on {} topology".format(test_name, setup_name)
587+
results_matrix_headers = [
588+
"Metric JSON Path",
589+
"Metric Value",
590+
]
591+
results_matrix = extract_results_table(metrics, results_dict)
592+
results_matrix = [[x[0], "{:.3f}".format(x[3])] for x in results_matrix]
593+
writer = MarkdownTableWriter(
594+
table_name=table_name,
595+
headers=results_matrix_headers,
596+
value_matrix=results_matrix,
597+
)
598+
writer.write_table()

redisbench_admin/run/metrics.py

Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
# BSD 3-Clause License
2+
#
3+
# Copyright (c) 2021., Redis Labs Modules
4+
# All rights reserved.
5+
#
6+
import logging
7+
8+
from jsonpath_ng import parse
9+
10+
11+
def extract_results_table(
12+
metrics,
13+
results_dict,
14+
):
15+
results_matrix = []
16+
cleaned_metrics = []
17+
already_present_metrics = []
18+
# insert first the dict metrics
19+
for jsonpath in metrics:
20+
if type(jsonpath) == dict:
21+
cleaned_metrics.append(jsonpath)
22+
metric_jsonpath = list(jsonpath.keys())[0]
23+
already_present_metrics.append(metric_jsonpath)
24+
for jsonpath in metrics:
25+
if type(jsonpath) == str:
26+
if jsonpath not in already_present_metrics:
27+
already_present_metrics.append(jsonpath)
28+
cleaned_metrics.append(jsonpath)
29+
30+
for jsonpath in cleaned_metrics:
31+
test_case_targets_dict = {}
32+
metric_jsonpath = jsonpath
33+
find_res = None
34+
try:
35+
if type(jsonpath) == str:
36+
jsonpath_expr = parse(jsonpath)
37+
if type(jsonpath) == dict:
38+
metric_jsonpath = list(jsonpath.keys())[0]
39+
test_case_targets_dict = jsonpath[metric_jsonpath]
40+
jsonpath_expr = parse(metric_jsonpath)
41+
find_res = jsonpath_expr.find(results_dict)
42+
except Exception:
43+
pass
44+
finally:
45+
if find_res is not None:
46+
use_metric_context_path = False
47+
if len(find_res) > 1:
48+
use_metric_context_path = True
49+
for metric in find_res:
50+
metric_name = str(metric.path)
51+
metric_value = float(metric.value)
52+
metric_context_path = str(metric.context.path)
53+
if metric_jsonpath[0] == "$":
54+
metric_jsonpath = metric_jsonpath[1:]
55+
if metric_jsonpath[0] == ".":
56+
metric_jsonpath = metric_jsonpath[1:]
57+
58+
# retro-compatible naming
59+
if use_metric_context_path is False:
60+
metric_name = metric_jsonpath
61+
62+
metric_name = metric_name.replace("'", "")
63+
metric_name = metric_name.replace('"', "")
64+
metric_name = metric_name.replace("(", "")
65+
metric_name = metric_name.replace(")", "")
66+
metric_name = metric_name.replace(" ", "_")
67+
68+
results_matrix.append(
69+
[
70+
metric_jsonpath,
71+
metric_context_path,
72+
metric_name,
73+
metric_value,
74+
test_case_targets_dict,
75+
use_metric_context_path,
76+
]
77+
)
78+
79+
else:
80+
logging.warning(
81+
"Unable to find metric path {} in result dict".format(jsonpath)
82+
)
83+
return results_matrix

redisbench_admin/run_local/local_db.py

Lines changed: 66 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,16 +5,28 @@
55
#
66
import logging
77
import tempfile
8+
import datetime
89

910
import redis
1011

12+
from redisbench_admin.run.run import calculate_client_tool_duration_and_check
13+
from redisbench_admin.run_local.local_helpers import (
14+
check_benchmark_binaries_local_requirements,
15+
run_local_benchmark,
16+
)
17+
1118
from redisbench_admin.environments.oss_cluster import (
1219
spin_up_local_redis_cluster,
1320
setup_redis_cluster_from_conns,
1421
)
1522
from redisbench_admin.environments.oss_standalone import spin_up_local_redis
1623
from redisbench_admin.run.cluster import cluster_init_steps
17-
from redisbench_admin.run.common import run_redis_pre_steps
24+
from redisbench_admin.run.common import (
25+
run_redis_pre_steps,
26+
check_dbconfig_tool_requirement,
27+
prepare_benchmark_parameters,
28+
dbconfig_keyspacelen_check,
29+
)
1830
from redisbench_admin.utils.benchmark_config import extract_redis_dbconfig_parameters
1931
from redisbench_admin.utils.local import (
2032
check_dataset_local_requirements,
@@ -101,15 +113,64 @@ def local_db_spin(
101113
dataset_load_timeout_secs,
102114
)
103115

104-
for redis_process in redis_processes:
105-
if is_process_alive(redis_process) is False:
106-
raise Exception("Redis process is not alive. Failing test.")
107-
108116
r = redis.StrictRedis(port=args.port)
109117
redis_conns.append(r)
118+
119+
for shardn, redis_process in enumerate(redis_processes):
120+
logging.info(
121+
"Checking if shard #{} process with pid={} is alive".format(
122+
shardn + 1, redis_process.pid
123+
)
124+
)
125+
if is_process_alive(redis_process) is False:
126+
raise Exception("Redis process is not alive. Failing test.")
127+
110128
if setup_type == "oss-cluster":
111129

112130
cluster_init_steps(clusterconfig, redis_conns, local_module_file)
113131

132+
if check_dbconfig_tool_requirement(benchmark_config):
133+
logging.info("Detected the requirements to load data via client tool")
134+
local_benchmark_output_filename = "{}/load-data.txt".format(temporary_dir)
135+
(
136+
benchmark_tool,
137+
full_benchmark_path,
138+
benchmark_tool_workdir,
139+
) = check_benchmark_binaries_local_requirements(
140+
benchmark_config, args.allowed_tools, "./binaries", "dbconfig"
141+
)
142+
143+
# prepare the benchmark command
144+
command, command_str = prepare_benchmark_parameters(
145+
benchmark_config,
146+
full_benchmark_path,
147+
args.port,
148+
"localhost",
149+
local_benchmark_output_filename,
150+
False,
151+
benchmark_tool_workdir,
152+
cluster_api_enabled,
153+
"dbconfig",
154+
)
155+
156+
# run the benchmark
157+
load_via_benchmark_start_time = datetime.datetime.now()
158+
run_local_benchmark(benchmark_tool, command)
159+
load_via_benchmark_end_time = datetime.datetime.now()
160+
load_via_benchmark_duration_seconds = calculate_client_tool_duration_and_check(
161+
load_via_benchmark_end_time, load_via_benchmark_start_time
162+
)
163+
logging.info(
164+
"Loading data via benchmark tool took {} secs.".format(
165+
load_via_benchmark_duration_seconds
166+
)
167+
)
168+
169+
dbconfig_keyspacelen_check(
170+
benchmark_config,
171+
redis_conns,
172+
)
173+
114174
run_redis_pre_steps(benchmark_config, redis_conns[0], required_modules)
175+
115176
return cluster_api_enabled, redis_conns, redis_processes

redisbench_admin/run_local/local_helpers.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,10 @@ def run_local_benchmark(benchmark_tool, command):
2929

3030

3131
def check_benchmark_binaries_local_requirements(
32-
benchmark_config, allowed_tools, binaries_localtemp_dir="./binaries"
32+
benchmark_config,
33+
allowed_tools,
34+
binaries_localtemp_dir="./binaries",
35+
config_key="clientconfig",
3336
):
3437
(
3538
benchmark_min_tool_version,
@@ -40,7 +43,7 @@ def check_benchmark_binaries_local_requirements(
4043
tool_source,
4144
tool_source_bin_path,
4245
_,
43-
) = extract_benchmark_tool_settings(benchmark_config)
46+
) = extract_benchmark_tool_settings(benchmark_config, config_key)
4447
which_benchmark_tool = None
4548
if benchmark_tool is not None:
4649
logging.info("Detected benchmark config tool {}".format(benchmark_tool))

redisbench_admin/run_local/run_local.py

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
BENCHMARK_REPETITIONS,
2020
get_setup_type_and_primaries_count,
2121
dso_check,
22+
print_results_table_stdout,
2223
)
2324
from redisbench_admin.run.redistimeseries import datasink_profile_tabular_data
2425
from redisbench_admin.run.run import (
@@ -108,7 +109,7 @@ def run_local_command_logic(args, project_name, project_version):
108109

109110
(
110111
benchmark_definitions,
111-
_,
112+
default_metrics,
112113
_,
113114
default_specs,
114115
clusterconfig,
@@ -141,6 +142,15 @@ def run_local_command_logic(args, project_name, project_version):
141142
setup_type,
142143
shard_count,
143144
) = get_setup_type_and_primaries_count(setup_settings)
145+
if args.allowed_setups != "":
146+
allowed_setups = args.allowed_setups.split()
147+
if setup_name not in allowed_setups:
148+
logging.warning(
149+
"SKIPPING setup named {} of topology type {}.".format(
150+
setup_name, setup_type
151+
)
152+
)
153+
continue
144154
if setup_type in args.allowed_envs:
145155
redis_processes = []
146156
# after we've spinned Redis, even on error we should always teardown
@@ -314,6 +324,13 @@ def run_local_command_logic(args, project_name, project_version):
314324
local_benchmark_output_filename, "r"
315325
) as json_file:
316326
results_dict = json.load(json_file)
327+
print_results_table_stdout(
328+
benchmark_config,
329+
default_metrics,
330+
results_dict,
331+
setup_name,
332+
test_name,
333+
)
317334

318335
# check KPIs
319336
return_code = results_dict_kpi_check(

0 commit comments

Comments
 (0)