Skip to content

Commit 9fc6dd0

Browse files
committed
fix(tests): Use architecture-specific streams in coordinator tests
Fix 7 failing tests in test_self_contained_coordinator_memtier.py by: - Adding messages to arch-specific streams instead of base stream - Fixing consumer group creation parameters (arch and id) - Updating assertions to check arch-specific streams This aligns tests with the arch-specific stream routing implemented in the coordinator, which reads from streams like: - oss:api:gh/redis/redis/builds:amd64 (for amd64) - oss:api:gh/redis/redis/builds:arm64 (for arm64) Fixes: - test_self_contained_coordinator_dockerhub_preload - test_self_contained_coordinator_dockerhub - test_self_contained_coordinator_dockerhub_iothreads - test_self_contained_coordinator_dockerhub_valkey - test_dockerhub_via_cli - test_dockerhub_via_cli_airgap - test_self_contained_coordinator_duplicated_ts
1 parent 2ae2ae4 commit 9fc6dd0

File tree

1 file changed

+42
-26
lines changed

1 file changed

+42
-26
lines changed

utils/tests/test_self_contained_coordinator_memtier.py

Lines changed: 42 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
)
2121
from redis_benchmarks_specification.__common__.env import (
2222
STREAM_KEYNAME_NEW_BUILD_EVENTS,
23+
get_arch_specific_stream_name,
2324
)
2425
from redis_benchmarks_specification.__common__.spec import (
2526
extract_client_tool,
@@ -252,8 +253,9 @@ def test_self_contained_coordinator_dockerhub_preload():
252253
)
253254
build_stream_fields["mnt_point"] = ""
254255
if result is True:
256+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
255257
benchmark_stream_id = conn.xadd(
256-
STREAM_KEYNAME_NEW_BUILD_EVENTS, build_stream_fields
258+
arch_specific_stream, build_stream_fields
257259
)
258260
logging.info(
259261
"sucessfully requested a new run {}. Stream id: {}".format(
@@ -264,11 +266,12 @@ def test_self_contained_coordinator_dockerhub_preload():
264266
build_variant_name = "gcc:15.2.0-amd64-debian-bookworm-default"
265267
expected_datapoint_ts = None
266268

267-
assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
268-
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0
269+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
270+
assert conn.exists(arch_specific_stream)
271+
assert conn.xlen(arch_specific_stream) > 0
269272
running_platform = "fco-ThinkPad-T490"
270273

271-
build_runners_consumer_group_create(conn, running_platform, "0")
274+
build_runners_consumer_group_create(conn, running_platform, arch=build_arch, id="0")
272275
datasink_conn = redis.StrictRedis(port=db_port)
273276
docker_client = docker.from_env()
274277
home = str(Path.home())
@@ -378,8 +381,9 @@ def test_self_contained_coordinator_dockerhub():
378381
)
379382
build_stream_fields["mnt_point"] = ""
380383
if result is True:
384+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
381385
benchmark_stream_id = conn.xadd(
382-
STREAM_KEYNAME_NEW_BUILD_EVENTS, build_stream_fields
386+
arch_specific_stream, build_stream_fields
383387
)
384388
logging.info(
385389
"sucessfully requested a new run {}. Stream id: {}".format(
@@ -390,11 +394,12 @@ def test_self_contained_coordinator_dockerhub():
390394
build_variant_name = "gcc:15.2.0-amd64-debian-bookworm-default"
391395
expected_datapoint_ts = None
392396

393-
assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
394-
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0
397+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
398+
assert conn.exists(arch_specific_stream)
399+
assert conn.xlen(arch_specific_stream) > 0
395400
running_platform = "fco-ThinkPad-T490"
396401

397-
build_runners_consumer_group_create(conn, running_platform, "0")
402+
build_runners_consumer_group_create(conn, running_platform, arch=build_arch, id="0")
398403
datasink_conn = redis.StrictRedis(port=db_port)
399404
docker_client = docker.from_env()
400405
home = str(Path.home())
@@ -504,8 +509,9 @@ def test_self_contained_coordinator_dockerhub_iothreads():
504509
)
505510
build_stream_fields["mnt_point"] = ""
506511
if result is True:
512+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
507513
benchmark_stream_id = conn.xadd(
508-
STREAM_KEYNAME_NEW_BUILD_EVENTS, build_stream_fields
514+
arch_specific_stream, build_stream_fields
509515
)
510516
logging.info(
511517
"sucessfully requested a new run {}. Stream id: {}".format(
@@ -516,11 +522,12 @@ def test_self_contained_coordinator_dockerhub_iothreads():
516522
build_variant_name = "gcc:15.2.0-amd64-debian-bookworm-default"
517523
expected_datapoint_ts = None
518524

519-
assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
520-
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0
525+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
526+
assert conn.exists(arch_specific_stream)
527+
assert conn.xlen(arch_specific_stream) > 0
521528
running_platform = "fco-ThinkPad-T490"
522529

523-
build_runners_consumer_group_create(conn, running_platform, "0")
530+
build_runners_consumer_group_create(conn, running_platform, arch=build_arch, id="0")
524531
datasink_conn = redis.StrictRedis(port=db_port)
525532
docker_client = docker.from_env()
526533
home = str(Path.home())
@@ -641,20 +648,22 @@ def test_self_contained_coordinator_dockerhub_valkey():
641648
f"requesting stream with following info: {build_stream_fields}"
642649
)
643650
if result is True:
651+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
644652
benchmark_stream_id = conn.xadd(
645-
STREAM_KEYNAME_NEW_BUILD_EVENTS, build_stream_fields
653+
arch_specific_stream, build_stream_fields
646654
)
647655
logging.info(
648656
"sucessfully requested a new run {}. Stream id: {}".format(
649657
build_stream_fields, benchmark_stream_id
650658
)
651659
)
652660

653-
assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
654-
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0
661+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
662+
assert conn.exists(arch_specific_stream)
663+
assert conn.xlen(arch_specific_stream) > 0
655664
running_platform = "fco-ThinkPad-T490"
656665

657-
build_runners_consumer_group_create(conn, running_platform, "0")
666+
build_runners_consumer_group_create(conn, running_platform, arch=build_arch, id="0")
658667
datasink_conn = redis.StrictRedis(port=db_port)
659668
docker_client = docker.from_env()
660669
home = str(Path.home())
@@ -783,11 +792,14 @@ def test_dockerhub_via_cli():
783792
assert e.code == 0
784793

785794
# confirm request was made via the cli
786-
assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
787-
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0
795+
# CLI adds to arch-specific stream (defaults to amd64)
796+
build_arch = "amd64"
797+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
798+
assert conn.exists(arch_specific_stream)
799+
assert conn.xlen(arch_specific_stream) > 0
788800
running_platform = "fco-ThinkPad-T490"
789801

790-
build_runners_consumer_group_create(conn, running_platform, "0")
802+
build_runners_consumer_group_create(conn, running_platform, arch=build_arch, id="0")
791803
datasink_conn = redis.StrictRedis(port=db_port)
792804
docker_client = docker.from_env()
793805
home = str(Path.home())
@@ -912,11 +924,14 @@ def test_dockerhub_via_cli_airgap():
912924
assert e.code == 0
913925

914926
# confirm request was made via the cli
915-
assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
916-
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0
927+
# CLI adds to arch-specific stream (defaults to amd64)
928+
build_arch = "amd64"
929+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
930+
assert conn.exists(arch_specific_stream)
931+
assert conn.xlen(arch_specific_stream) > 0
917932
running_platform = "fco-ThinkPad-T490"
918933

919-
build_runners_consumer_group_create(conn, running_platform, "0")
934+
build_runners_consumer_group_create(conn, running_platform, arch=build_arch, id="0")
920935
datasink_conn = redis.StrictRedis(port=db_port)
921936
docker_client = docker.from_env()
922937
home = str(Path.home())
@@ -1308,6 +1323,7 @@ def test_self_contained_coordinator_duplicated_ts():
13081323

13091324
# generate 2 stream requests with the same timestamp
13101325
timestamp = int(datetime.datetime.now().timestamp())
1326+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
13111327
for _ in range(0, 2):
13121328
build_stream_fields, result = generate_benchmark_stream_request(
13131329
id,
@@ -1323,23 +1339,23 @@ def test_self_contained_coordinator_duplicated_ts():
13231339
build_stream_fields["mnt_point"] = ""
13241340
if result is True:
13251341
benchmark_stream_id = conn.xadd(
1326-
STREAM_KEYNAME_NEW_BUILD_EVENTS, build_stream_fields
1342+
arch_specific_stream, build_stream_fields
13271343
)
13281344
logging.info(
13291345
"sucessfully requested a new run {}. Stream id: {}".format(
13301346
build_stream_fields, benchmark_stream_id
13311347
)
13321348
)
13331349

1334-
assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
1335-
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) == 2
1350+
assert conn.exists(arch_specific_stream)
1351+
assert conn.xlen(arch_specific_stream) == 2
13361352

13371353
running_platform = "fco-ThinkPad-T490"
13381354

13391355
# process the 2 stream requests
13401356
for _ in range(0, 2):
13411357

1342-
build_runners_consumer_group_create(conn, running_platform, "0")
1358+
build_runners_consumer_group_create(conn, running_platform, arch=build_arch, id="0")
13431359
datasink_conn = redis.StrictRedis(port=db_port)
13441360
docker_client = docker.from_env()
13451361
home = str(Path.home())

0 commit comments

Comments
 (0)