13
13
from redis_benchmarks_specification .__builder__ .builder import (
14
14
generate_benchmark_stream_request ,
15
15
)
16
+ from redis_benchmarks_specification .__cli__ .args import spec_cli_args
17
+ from redis_benchmarks_specification .__cli__ .cli import (
18
+ trigger_tests_dockerhub_cli_command_logic ,
19
+ )
16
20
from redis_benchmarks_specification .__common__ .env import (
17
21
STREAM_KEYNAME_NEW_BUILD_EVENTS ,
18
22
)
@@ -359,6 +363,8 @@ def test_self_contained_coordinator_dockerhub_valkey():
359
363
id = "dockerhub"
360
364
redis_version = "7.2.6"
361
365
run_image = f"valkey/valkey:{ redis_version } -bookworm"
366
+ github_org = "valkey"
367
+ github_repo = "valkey"
362
368
build_arch = "amd64"
363
369
testDetails = {}
364
370
build_os = "test_build_os"
@@ -370,8 +376,6 @@ def test_self_contained_coordinator_dockerhub_valkey():
370
376
testDetails ,
371
377
build_os ,
372
378
)
373
- github_org = "valkey"
374
- github_repo = "valkey"
375
379
build_stream_fields ["github_repo" ] = github_repo
376
380
build_stream_fields ["github_org" ] = github_org
377
381
build_stream_fields ["server_name" ] = github_repo
@@ -386,9 +390,6 @@ def test_self_contained_coordinator_dockerhub_valkey():
386
390
)
387
391
)
388
392
389
- build_variant_name = "gcc:8.5.0-amd64-debian-buster-default"
390
- expected_datapoint_ts = None
391
-
392
393
assert conn .exists (STREAM_KEYNAME_NEW_BUILD_EVENTS )
393
394
assert conn .xlen (STREAM_KEYNAME_NEW_BUILD_EVENTS ) > 0
394
395
running_platform = "fco-ThinkPad-T490"
@@ -479,6 +480,135 @@ def test_self_contained_coordinator_dockerhub_valkey():
479
480
pass
480
481
481
482
483
+ def test_dockerhub_via_cli ():
484
+ if run_coordinator_tests_dockerhub ():
485
+ import argparse
486
+
487
+ db_port = int (os .getenv ("DATASINK_PORT" , "6379" ))
488
+ conn = redis .StrictRedis (port = db_port )
489
+ conn .ping ()
490
+ conn .flushall ()
491
+ redis_version = "7.2.6"
492
+ run_image = f"valkey/valkey:{ redis_version } -bookworm"
493
+ github_org = "valkey"
494
+ github_repo = "valkey"
495
+
496
+ db_port = os .getenv ("DATASINK_PORT" , "6379" )
497
+
498
+ # should error due to missing --use-tags or --use-branch
499
+ parser = argparse .ArgumentParser (
500
+ description = "test" ,
501
+ formatter_class = argparse .ArgumentDefaultsHelpFormatter ,
502
+ )
503
+ parser = spec_cli_args (parser )
504
+ run_args = [
505
+ "--server_name" ,
506
+ "valkey" ,
507
+ "--run_image" ,
508
+ run_image ,
509
+ "--gh_org" ,
510
+ github_org ,
511
+ "--gh_repo" ,
512
+ github_repo ,
513
+ "--redis_port" ,
514
+ "{}" .format (db_port ),
515
+ ]
516
+ args = parser .parse_args (
517
+ args = run_args ,
518
+ )
519
+ try :
520
+ trigger_tests_dockerhub_cli_command_logic (args , "tool" , "v0" )
521
+ except SystemExit as e :
522
+ assert e .code == 0
523
+
524
+ # confirm request was made via the cli
525
+ assert conn .exists (STREAM_KEYNAME_NEW_BUILD_EVENTS )
526
+ assert conn .xlen (STREAM_KEYNAME_NEW_BUILD_EVENTS ) > 0
527
+ running_platform = "fco-ThinkPad-T490"
528
+
529
+ build_runners_consumer_group_create (conn , running_platform , "0" )
530
+ datasink_conn = redis .StrictRedis (port = db_port )
531
+ docker_client = docker .from_env ()
532
+ home = str (Path .home ())
533
+ stream_id = ">"
534
+ topologies_map = get_topologies (
535
+ "./redis_benchmarks_specification/setups/topologies/topologies.yml"
536
+ )
537
+ # we use a benchmark spec with smaller CPU limit for client given github machines only contain 2 cores
538
+ # and we need 1 core for DB and another for CLIENT
539
+ testsuite_spec_files = [
540
+ "./utils/tests/test_data/test-suites/test-memtier-dockerhub.yml"
541
+ ]
542
+ defaults_filename = "./utils/tests/test_data/test-suites/defaults.yml"
543
+ (
544
+ _ ,
545
+ _ ,
546
+ default_metrics ,
547
+ _ ,
548
+ _ ,
549
+ _ ,
550
+ ) = get_defaults (defaults_filename )
551
+
552
+ (
553
+ result ,
554
+ stream_id ,
555
+ number_processed_streams ,
556
+ num_process_test_suites ,
557
+ ) = self_contained_coordinator_blocking_read (
558
+ conn ,
559
+ True ,
560
+ docker_client ,
561
+ home ,
562
+ stream_id ,
563
+ datasink_conn ,
564
+ testsuite_spec_files ,
565
+ topologies_map ,
566
+ running_platform ,
567
+ False ,
568
+ [],
569
+ "" ,
570
+ 0 ,
571
+ 6399 ,
572
+ 1 ,
573
+ False ,
574
+ 5 ,
575
+ default_metrics ,
576
+ "amd64" ,
577
+ None ,
578
+ 0 ,
579
+ 10000 ,
580
+ "unstable" ,
581
+ "" ,
582
+ True ,
583
+ False ,
584
+ )
585
+
586
+ assert result == True
587
+ assert number_processed_streams == 1
588
+ assert num_process_test_suites == 1
589
+ by_version_key = f"ci.benchmarks.redislabs/ci/{ github_org } /{ github_repo } /memtier_benchmark-1Mkeys-load-string-with-10B-values/by.version/{ redis_version } /benchmark_end/oss-standalone/memory_maxmemory"
590
+ assert datasink_conn .exists (by_version_key )
591
+ rts = datasink_conn .ts ()
592
+ # check we have by version metrics
593
+ assert "version" in rts .info (by_version_key ).labels
594
+ assert redis_version == rts .info (by_version_key ).labels ["version" ]
595
+
596
+ # get all keys
597
+ all_keys = datasink_conn .keys ("*" )
598
+ by_hash_keys = []
599
+ for key in all_keys :
600
+ if "/by.hash/" in key .decode ():
601
+ by_hash_keys .append (key )
602
+
603
+ # ensure we have by hash keys
604
+ assert len (by_hash_keys ) > 0
605
+ for hash_key in by_hash_keys :
606
+ # ensure we have both version and hash info on the key
607
+ assert "version" in rts .info (hash_key ).labels
608
+ assert "hash" in rts .info (hash_key ).labels
609
+ assert redis_version == rts .info (hash_key ).labels ["version" ]
610
+
611
+
482
612
def test_self_contained_coordinator_skip_build_variant ():
483
613
try :
484
614
if run_coordinator_tests ():
0 commit comments