Skip to content

Commit 7e0eea2

Browse files
Fixed self contained coordinator circular dependency import (#121)
* Added --docker-air-gap to builder and scc * Add last_n to cli trigger tool * Fixed circular dependency on scc * Fixed self contained coordinator circular dependency import
1 parent 4e25e9c commit 7e0eea2

19 files changed

+949
-202
lines changed

pyproject.toml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "redis-benchmarks-specification"
3-
version = "0.1.41"
3+
version = "0.1.50"
44
description = "The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute."
55
authors = ["filipecosta90 <[email protected]>","Redis Performance Group <[email protected]>"]
66
readme = "Readme.md"
@@ -19,7 +19,6 @@ docker = "^5.0.0"
1919
redisbench-admin = "^0.9.3"
2020
#redisbench-admin = {path = "../redisbench-admin", develop = true}
2121
psutil = "^5.8.0"
22-
tox-docker = "^3.1.0"
2322
PyGithub = "^1.55"
2423
GitPython = "^3.1.20"
2524
semver = "^2.13.0"

redis_benchmarks_specification/__builder__/builder.py

Lines changed: 43 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -381,35 +381,48 @@ def build_spec_image_prefetch(builders_folder, different_build_specs):
381381
build_config, id = get_build_config(builders_folder + "/" + build_spec)
382382
if build_config["kind"] == "docker":
383383
build_image = build_config["build_image"]
384-
if build_image not in already_checked_images:
385-
logging.info(
386-
"Build {} requirement: checking build image {} is available.".format(
387-
id, build_image
388-
)
389-
)
390-
local_images = [
391-
x.tags[0]
392-
for x in client.images.list(filters={"reference": build_image})
393-
]
394-
if build_image not in local_images:
395-
logging.info(
396-
"Build {} requirement: build image {} is not available locally. Fetching it from hub".format(
397-
id, build_image
398-
)
399-
)
400-
client.images.pull(build_image)
401-
hub_pulled_images = hub_pulled_images + 1
402-
else:
403-
logging.info(
404-
"Build {} requirement: build image {} is available locally.".format(
405-
id, build_image
406-
)
407-
)
408-
already_checked_images.append(build_image)
409-
else:
410-
logging.info(
411-
"Build {} requirement: build image {} availability was already checked.".format(
412-
id, build_image
413-
)
384+
hub_pulled_images = check_docker_image_available(
385+
already_checked_images, build_image, client, hub_pulled_images, id
386+
)
387+
if "run_image" in build_config:
388+
run_image = build_config["run_image"]
389+
hub_pulled_images = check_docker_image_available(
390+
already_checked_images, run_image, client, hub_pulled_images, id
414391
)
415392
return already_checked_images, hub_pulled_images
393+
394+
395+
def check_docker_image_available(
396+
already_checked_images, build_image, client, hub_pulled_images, id
397+
):
398+
if build_image not in already_checked_images:
399+
logging.info(
400+
"Build {} requirement: checking docker image {} is available.".format(
401+
id, build_image
402+
)
403+
)
404+
local_images = [
405+
x.tags[0] for x in client.images.list(filters={"reference": build_image})
406+
]
407+
if build_image not in local_images:
408+
logging.info(
409+
"Build {} requirement: docker image {} is not available locally. Fetching it from hub".format(
410+
id, build_image
411+
)
412+
)
413+
client.images.pull(build_image)
414+
hub_pulled_images = hub_pulled_images + 1
415+
else:
416+
logging.info(
417+
"Build {} requirement: docker image {} is available locally.".format(
418+
id, build_image
419+
)
420+
)
421+
already_checked_images.append(build_image)
422+
else:
423+
logging.info(
424+
"Build {} requirement: docker image {} availability was already checked.".format(
425+
id, build_image
426+
)
427+
)
428+
return hub_pulled_images

redis_benchmarks_specification/__cli__/args.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,4 +63,10 @@ def spec_cli_args(parser):
6363
action="store_true",
6464
help="Only check how many benchmarks we would trigger. Don't request benchmark runs at the end.",
6565
)
66+
parser.add_argument(
67+
"--last_n",
68+
type=int,
69+
default=-1,
70+
help="Use the last N samples. by default will use all available values",
71+
)
6672
return parser

redis_benchmarks_specification/__cli__/cli.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,7 @@ def cli_command_logic(args, project_name, project_version):
8989
)
9090
repo = git.Repo(redisDirPath)
9191
commits = []
92+
total_commits = 0
9293
if args.use_branch:
9394
for commit in repo.iter_commits():
9495
if (
@@ -98,8 +99,17 @@ def cli_command_logic(args, project_name, project_version):
9899
)
99100
<= args.to_date
100101
):
101-
print(commit.summary)
102-
commits.append({"git_hash": commit.hexsha, "git_branch": args.branch})
102+
if (
103+
args.last_n > 0 and total_commits < args.last_n
104+
) or args.last_n == -1:
105+
total_commits = total_commits + 1
106+
print(commit.summary)
107+
commits.append(
108+
{
109+
"git_hash": commit.hexsha,
110+
"git_branch": repo.active_branch.name,
111+
}
112+
)
103113
if args.use_tags:
104114
tags_regexp = args.tags_regexp
105115
if tags_regexp == ".*":
@@ -150,7 +160,7 @@ def cli_command_logic(args, project_name, project_version):
150160
pass
151161
by_description = "n/a"
152162
if args.use_branch:
153-
by_description = "from branch {}".format(args.branch)
163+
by_description = "from branch {}".format(repo.active_branch.name)
154164
if args.use_tags:
155165
by_description = "by tags"
156166
logging.info(

redis_benchmarks_specification/__self_contained_coordinator__/args.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -116,4 +116,10 @@ def create_self_contained_coordinator_args(project_name):
116116
action="store_true",
117117
help="Read the docker images from redis keys.",
118118
)
119+
parser.add_argument(
120+
"--verbose",
121+
default=False,
122+
action="store_true",
123+
help="Run in verbose mode.",
124+
)
119125
return parser
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
import json
2+
import logging
3+
4+
5+
def extract_build_info_from_streamdata(testDetails):
6+
use_git_timestamp = False
7+
git_timestamp_ms = None
8+
git_version = None
9+
git_branch = None
10+
metadata = None
11+
build_variant_name = None
12+
fields = [fieldname.decode() for fieldname in testDetails.keys()]
13+
logging.info("Fields on stream {}".format(fields))
14+
git_hash = testDetails[b"git_hash"]
15+
if b"use_git_timestamp" in testDetails:
16+
use_git_timestamp = bool(testDetails[b"use_git_timestamp"].decode())
17+
if b"git_timestamp_ms" in testDetails:
18+
git_timestamp_ms = int(testDetails[b"git_timestamp_ms"].decode())
19+
if b"id" in testDetails:
20+
build_variant_name = testDetails[b"id"]
21+
if type(build_variant_name) == bytes:
22+
build_variant_name = build_variant_name.decode()
23+
if b"git_branch" in testDetails:
24+
git_branch = testDetails[b"git_branch"]
25+
if type(git_branch) == bytes:
26+
git_branch = git_branch.decode()
27+
if b"git_version" in testDetails:
28+
git_version = testDetails[b"git_version"]
29+
if type(git_version) == bytes:
30+
git_version = git_version.decode()
31+
if type(git_hash) == bytes:
32+
git_hash = git_hash.decode()
33+
logging.info("Received commit hash specifier {}.".format(git_hash))
34+
build_artifacts_str = "redis-server"
35+
build_image = testDetails[b"build_image"].decode()
36+
run_image = build_image
37+
if b"run_image" in testDetails:
38+
run_image = testDetails[b"run_image"].decode()
39+
logging.info("detected run image info {}.".format(run_image))
40+
else:
41+
logging.info("using build image info {}.".format(build_image))
42+
if b"build_artifacts" in testDetails:
43+
build_artifacts_str = testDetails[b"build_artifacts"].decode()
44+
build_artifacts = build_artifacts_str.split(",")
45+
if b"metadata" in testDetails:
46+
metadata = json.loads(testDetails[b"metadata"].decode())
47+
return (
48+
build_variant_name,
49+
metadata,
50+
build_artifacts,
51+
git_hash,
52+
git_branch,
53+
git_version,
54+
run_image,
55+
use_git_timestamp,
56+
git_timestamp_ms,
57+
)
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
def prepare_memtier_benchmark_parameters(
2+
clientconfig,
3+
full_benchmark_path,
4+
port,
5+
server,
6+
local_benchmark_output_filename,
7+
oss_cluster_api_enabled,
8+
):
9+
benchmark_command = [
10+
full_benchmark_path,
11+
"--port",
12+
"{}".format(port),
13+
"--server",
14+
"{}".format(server),
15+
"--json-out-file",
16+
local_benchmark_output_filename,
17+
]
18+
if oss_cluster_api_enabled is True:
19+
benchmark_command.append("--cluster-mode")
20+
benchmark_command_str = " ".join(benchmark_command)
21+
if "arguments" in clientconfig:
22+
benchmark_command_str = benchmark_command_str + " " + clientconfig["arguments"]
23+
24+
return None, benchmark_command_str
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import math
2+
3+
4+
def generate_cpuset_cpus(ceil_db_cpu_limit, current_cpu_pos):
5+
previous_cpu_pos = current_cpu_pos
6+
current_cpu_pos = current_cpu_pos + int(ceil_db_cpu_limit)
7+
db_cpuset_cpus = ",".join(
8+
[str(x) for x in range(previous_cpu_pos, current_cpu_pos)]
9+
)
10+
return db_cpuset_cpus, current_cpu_pos
11+
12+
13+
def extract_db_cpu_limit(topologies_map, topology_spec_name):
14+
topology_spec = topologies_map[topology_spec_name]
15+
db_cpu_limit = topology_spec["resources"]["requests"]["cpus"]
16+
ceil_db_cpu_limit = math.ceil(float(db_cpu_limit))
17+
return ceil_db_cpu_limit
Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
import logging
2+
3+
import docker
4+
5+
from redis_benchmarks_specification.__self_contained_coordinator__.cpuset import (
6+
generate_cpuset_cpus,
7+
)
8+
9+
10+
def generate_standalone_redis_server_args(
11+
binary, port, dbdir, configuration_parameters=None
12+
):
13+
added_params = ["port", "protected-mode", "dir"]
14+
# start redis-server
15+
command = [
16+
binary,
17+
"--protected-mode",
18+
"no",
19+
"--port",
20+
"{}".format(port),
21+
"--dir",
22+
dbdir,
23+
]
24+
if configuration_parameters is not None:
25+
for parameter, parameter_value in configuration_parameters.items():
26+
if parameter not in added_params:
27+
command.extend(
28+
[
29+
"--{}".format(parameter),
30+
parameter_value,
31+
]
32+
)
33+
return command
34+
35+
36+
def teardown_containers(redis_containers, container_type):
37+
for container in redis_containers:
38+
try:
39+
container.stop()
40+
except docker.errors.NotFound:
41+
logging.info(
42+
"When trying to stop {} container with id {} and image {} it was already stopped".format(
43+
container_type, container.id, container.image
44+
)
45+
)
46+
pass
47+
48+
49+
def spin_docker_standalone_redis(
50+
ceil_db_cpu_limit,
51+
current_cpu_pos,
52+
docker_client,
53+
redis_configuration_parameters,
54+
redis_containers,
55+
redis_proc_start_port,
56+
run_image,
57+
temporary_dir,
58+
):
59+
mnt_point = "/mnt/redis/"
60+
command = generate_standalone_redis_server_args(
61+
"{}redis-server".format(mnt_point),
62+
redis_proc_start_port,
63+
mnt_point,
64+
redis_configuration_parameters,
65+
)
66+
command_str = " ".join(command)
67+
db_cpuset_cpus, current_cpu_pos = generate_cpuset_cpus(
68+
ceil_db_cpu_limit, current_cpu_pos
69+
)
70+
logging.info(
71+
"Running redis-server on docker image {} (cpuset={}) with the following args: {}".format(
72+
run_image, db_cpuset_cpus, command_str
73+
)
74+
)
75+
container = docker_client.containers.run(
76+
image=run_image,
77+
volumes={
78+
temporary_dir: {"bind": mnt_point, "mode": "rw"},
79+
},
80+
auto_remove=True,
81+
privileged=True,
82+
working_dir=mnt_point,
83+
command=command_str,
84+
network_mode="host",
85+
detach=True,
86+
cpuset_cpus=db_cpuset_cpus,
87+
pid_mode="host",
88+
)
89+
redis_containers.append(container)
90+
return current_cpu_pos

0 commit comments

Comments
 (0)