Skip to content

Commit 4f855b7

Browse files
Added dockerhub E2E triggering/tests
1 parent 2ca9e8a commit 4f855b7

File tree

11 files changed

+618
-94
lines changed

11 files changed

+618
-94
lines changed

Readme.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -389,6 +389,12 @@ To run a specific test:
389389
$ tox -- utils/tests/test_runner.py
390390
```
391391

392+
To run a specific test with verbose logging:
393+
394+
```sh
395+
$ tox -- -vv --log-cli-level=INFO utils/tests/test_runner.py
396+
```
397+
392398
## License
393399

394400
redis-benchmarks-specification is distributed under the BSD3 license - see [LICENSE](LICENSE)

redis_benchmarks_specification/__builder__/builder.py

Lines changed: 104 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -429,48 +429,32 @@ def builder_process_stream(
429429
build_duration = build_end_datetime - build_start_datetime
430430
build_duration_secs = build_duration.total_seconds()
431431

432-
build_stream_fields = {
433-
"id": id,
434-
"git_hash": git_hash,
435-
"use_git_timestamp": str(use_git_timestamp),
436-
"build_image": build_image,
437-
"run_image": run_image,
438-
"compiler": compiler,
439-
"cpp_compiler": cpp_compiler,
440-
"os": build_os,
441-
"arch": build_arch,
442-
"build_vars": build_vars_str,
443-
"build_command": build_command,
444-
"metadata": json.dumps(build_config_metadata),
445-
"build_artifacts": ",".join(build_artifacts),
446-
"tests_regexp": tests_regexp,
447-
"tests_priority_upper_limit": tests_priority_upper_limit,
448-
"tests_priority_lower_limit": tests_priority_lower_limit,
449-
"tests_groups_regexp": tests_groups_regexp,
450-
}
451-
if pull_request is not None:
452-
build_stream_fields["pull_request"] = pull_request
453-
if git_branch is not None:
454-
build_stream_fields["git_branch"] = git_branch
455-
if git_version is not None:
456-
build_stream_fields["git_version"] = git_version
457-
if git_timestamp_ms is not None:
458-
build_stream_fields["git_timestamp_ms"] = git_timestamp_ms
459-
for artifact in build_artifacts:
460-
bin_key = "zipped:artifacts:{}:{}.zip".format(id, artifact)
461-
bin_artifact = open(
462-
"{}src/{}".format(redis_temporary_dir, artifact), "rb"
463-
).read()
464-
bin_artifact_len = len(bytes(bin_artifact))
465-
assert bin_artifact_len > 0
466-
conn.set(bin_key, bytes(bin_artifact), ex=REDIS_BINS_EXPIRE_SECS)
467-
build_stream_fields[artifact] = bin_key
468-
build_stream_fields["{}_len_bytes".format(artifact)] = (
469-
bin_artifact_len
470-
)
471-
result = True
472-
if b"platform" in testDetails:
473-
build_stream_fields["platform"] = testDetails[b"platform"]
432+
build_stream_fields, result = generate_benchmark_stream_request(
433+
id,
434+
conn,
435+
run_image,
436+
build_arch,
437+
testDetails,
438+
build_os,
439+
build_artifacts,
440+
build_command,
441+
build_config_metadata,
442+
build_image,
443+
build_vars_str,
444+
compiler,
445+
cpp_compiler,
446+
git_branch,
447+
git_hash,
448+
git_timestamp_ms,
449+
git_version,
450+
pull_request,
451+
redis_temporary_dir,
452+
tests_groups_regexp,
453+
tests_priority_lower_limit,
454+
tests_priority_upper_limit,
455+
tests_regexp,
456+
use_git_timestamp,
457+
)
474458
if result is True:
475459
benchmark_stream_id = conn.xadd(
476460
STREAM_KEYNAME_NEW_BUILD_EVENTS, build_stream_fields
@@ -551,6 +535,84 @@ def builder_process_stream(
551535
return previous_id, new_builds_count, build_stream_fields_arr
552536

553537

538+
def generate_benchmark_stream_request(
539+
id,
540+
conn,
541+
run_image,
542+
build_arch,
543+
testDetails,
544+
build_os,
545+
build_artifacts=[],
546+
build_command=None,
547+
build_config_metadata=None,
548+
build_image=None,
549+
build_vars_str=None,
550+
compiler=None,
551+
cpp_compiler=None,
552+
git_branch=None,
553+
git_hash=None,
554+
git_timestamp_ms=None,
555+
git_version=None,
556+
pull_request=None,
557+
redis_temporary_dir=None,
558+
tests_groups_regexp=".*",
559+
tests_priority_lower_limit=0,
560+
tests_priority_upper_limit=10000,
561+
tests_regexp=".*",
562+
use_git_timestamp=False,
563+
):
564+
build_stream_fields = {
565+
"id": id,
566+
"use_git_timestamp": str(use_git_timestamp),
567+
"run_image": run_image,
568+
"os": build_os,
569+
"arch": build_arch,
570+
"build_artifacts": ",".join(build_artifacts),
571+
"tests_regexp": tests_regexp,
572+
"tests_priority_upper_limit": tests_priority_upper_limit,
573+
"tests_priority_lower_limit": tests_priority_lower_limit,
574+
"tests_groups_regexp": tests_groups_regexp,
575+
}
576+
if build_config_metadata is not None:
577+
build_stream_fields["metadata"] = json.dumps(build_config_metadata)
578+
if compiler is not None:
579+
build_stream_fields["compiler"] = compiler
580+
if cpp_compiler is not None:
581+
build_stream_fields["cpp_compiler"] = cpp_compiler
582+
if build_vars_str is not None:
583+
build_stream_fields["build_vars"] = build_vars_str
584+
if build_command is not None:
585+
build_stream_fields["build_command"] = build_command
586+
if build_image is not None:
587+
build_stream_fields["build_image"] = build_image
588+
else:
589+
build_stream_fields["build_image"] = run_image
590+
if git_hash is not None:
591+
build_stream_fields["git_hash"] = git_hash
592+
if pull_request is not None:
593+
build_stream_fields["pull_request"] = pull_request
594+
if git_branch is not None:
595+
build_stream_fields["git_branch"] = git_branch
596+
if git_version is not None:
597+
build_stream_fields["git_version"] = git_version
598+
if git_timestamp_ms is not None:
599+
build_stream_fields["git_timestamp_ms"] = git_timestamp_ms
600+
for artifact in build_artifacts:
601+
bin_key = "zipped:artifacts:{}:{}.zip".format(id, artifact)
602+
bin_artifact = open(
603+
"{}src/{}".format(redis_temporary_dir, artifact), "rb"
604+
).read()
605+
bin_artifact_len = len(bytes(bin_artifact))
606+
assert bin_artifact_len > 0
607+
conn.set(bin_key, bytes(bin_artifact), ex=REDIS_BINS_EXPIRE_SECS)
608+
build_stream_fields[artifact] = bin_key
609+
build_stream_fields["{}_len_bytes".format(artifact)] = bin_artifact_len
610+
result = True
611+
if b"platform" in testDetails:
612+
build_stream_fields["platform"] = testDetails[b"platform"]
613+
return build_stream_fields, result
614+
615+
554616
def build_spec_image_prefetch(builders_folder, different_build_specs):
555617
logging.info("checking build spec requirements")
556618
already_checked_images = []

redis_benchmarks_specification/__common__/runner.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,9 @@ def exporter_datasink_common(
149149
default_metrics=None,
150150
git_hash=None,
151151
):
152-
logging.info("Using datapoint_time_ms: {}".format(datapoint_time_ms))
152+
logging.info(
153+
f"Using datapoint_time_ms: {datapoint_time_ms}. git_has={git_hash}, git_branch={git_branch}, git_version={git_version}"
154+
)
153155
timeseries_test_sucess_flow(
154156
datasink_push_results_redistimeseries,
155157
git_version,
@@ -171,6 +173,7 @@ def exporter_datasink_common(
171173
metadata,
172174
build_variant_name,
173175
running_platform,
176+
None,
174177
git_hash,
175178
)
176179
logging.info("Collecting memory metrics")

redis_benchmarks_specification/__common__/timeseries.py

Lines changed: 84 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -283,10 +283,12 @@ def from_metric_kv_to_timeserie(
283283
tf_triggering_env,
284284
use_metric_context_path,
285285
)
286+
logging.info(f"Adding timeserie named {ts_name} to time_series_dict.")
286287
time_series_dict[ts_name] = {
287288
"labels": timeserie_tags.copy(),
288289
"data": {datapoints_timestamp: metric_value},
289290
}
291+
290292
original_ts_name = ts_name
291293
target_table_keyname = "target_tables:{triggering_env}:ci.benchmarks.redislabs/{break_by_key}/{break_by_str}/{tf_github_org}/{tf_github_repo}/{deployment_type}/{deployment_name}/{test_name}/{metric_name}".format(
292294
triggering_env=tf_triggering_env,
@@ -361,6 +363,8 @@ def common_timeseries_extraction(
361363
time_series_dict = {}
362364
target_tables = {}
363365
cleaned_metrics_arr = extract_results_table(metrics, results_dict)
366+
total_metrics = len(cleaned_metrics_arr)
367+
logging.info(f"Total of {total_metrics} cleaned metrics: {cleaned_metrics_arr}")
364368
for cleaned_metric in cleaned_metrics_arr:
365369

366370
metric_jsonpath = cleaned_metric[0]
@@ -398,6 +402,48 @@ def common_timeseries_extraction(
398402
return time_series_dict, target_tables
399403

400404

405+
def extract_perhash_timeseries_from_results(
406+
datapoints_timestamp: int,
407+
metrics: list,
408+
results_dict: dict,
409+
git_hash: str,
410+
tf_github_org: str,
411+
tf_github_repo: str,
412+
deployment_name: str,
413+
deployment_type: str,
414+
test_name: str,
415+
tf_triggering_env: str,
416+
metadata_tags={},
417+
build_variant_name=None,
418+
running_platform=None,
419+
testcase_metric_context_paths=[],
420+
):
421+
break_by_key = "hash"
422+
break_by_str = "by.{}".format(break_by_key)
423+
(
424+
time_series_dict,
425+
target_tables,
426+
) = common_timeseries_extraction(
427+
break_by_key,
428+
break_by_str,
429+
datapoints_timestamp,
430+
deployment_name,
431+
deployment_type,
432+
metrics,
433+
git_hash,
434+
results_dict,
435+
test_name,
436+
tf_github_org,
437+
tf_github_repo,
438+
tf_triggering_env,
439+
metadata_tags,
440+
build_variant_name,
441+
running_platform,
442+
testcase_metric_context_paths,
443+
)
444+
return True, time_series_dict, target_tables
445+
446+
401447
def extract_perversion_timeseries_from_results(
402448
datapoints_timestamp: int,
403449
metrics: list,
@@ -718,38 +764,46 @@ def common_exporter_logic(
718764
)
719765
)
720766
assert used_ts is not None
721-
767+
total_break_by_added = 0
722768
if (git_hash is not None) and (git_hash != ""):
723-
break_by_key = "hash"
724-
break_by_str = "by.{}".format(break_by_key)
769+
# extract per-hash datapoints
725770
(
771+
_,
726772
per_hash_time_series_dict,
727-
hash_target_tables,
728-
) = common_timeseries_extraction(
729-
break_by_key,
730-
break_by_str,
731-
datapoints_timestamp,
732-
deployment_name,
733-
deployment_type,
773+
version_target_tables,
774+
) = extract_perhash_timeseries_from_results(
775+
used_ts,
734776
metrics,
735-
git_hash,
736777
results_dict,
737-
test_name,
778+
git_hash,
738779
tf_github_org,
739780
tf_github_repo,
781+
deployment_name,
782+
deployment_type,
783+
test_name,
740784
tf_triggering_env,
741785
metadata_tags,
742786
build_variant_name,
743787
running_platform,
744788
testcase_metric_context_paths,
745789
)
746-
790+
total_break_by_added += 1
791+
else:
792+
logging.warning(
793+
"there was no git hash information to push data brokedown by hash"
794+
)
747795
if (
748796
artifact_version is not None
749797
and artifact_version != ""
750798
and artifact_version != "N/A"
751799
):
752800
# extract per-version datapoints
801+
total_hs_ts = len(per_hash_time_series_dict.keys())
802+
logging.info(
803+
f"Extending the by.hash {git_hash} timeseries ({total_hs_ts}) with version info {artifact_version}"
804+
)
805+
for hash_timeserie in per_hash_time_series_dict.values():
806+
hash_timeserie["labels"]["version"] = artifact_version
753807
(
754808
_,
755809
per_version_time_series_dict,
@@ -770,7 +824,18 @@ def common_exporter_logic(
770824
running_platform,
771825
testcase_metric_context_paths,
772826
)
827+
total_break_by_added += 1
828+
else:
829+
logging.warning(
830+
"there was no git VERSION information to push data brokedown by VERSION"
831+
)
773832
if tf_github_branch is not None and tf_github_branch != "":
833+
total_hs_ts = len(per_hash_time_series_dict.keys())
834+
logging.info(
835+
f"Extending the by.hash {git_hash} timeseries ({total_hs_ts}) with branch info {tf_github_branch}"
836+
)
837+
for hash_timeserie in per_hash_time_series_dict.values():
838+
hash_timeserie["labels"]["branch"] = tf_github_branch
774839
# extract per branch datapoints
775840
(
776841
_,
@@ -792,10 +857,14 @@ def common_exporter_logic(
792857
running_platform,
793858
testcase_metric_context_paths,
794859
)
860+
total_break_by_added += 1
795861
else:
862+
logging.warning(
863+
"there was no git BRANCH information to push data brokedown by BRANCH"
864+
)
865+
if total_break_by_added == 0:
796866
logging.error(
797-
"Requested to push data to RedisTimeSeries but "
798-
'no exporter definition was found. Missing "exporter" config.'
867+
"There was no BRANCH, HASH, or VERSION info to break this info by in timeseries"
799868
)
800869
return (
801870
per_version_time_series_dict,

redis_benchmarks_specification/__runner__/runner.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@ def prepare_memtier_benchmark_parameters(
211211
server,
212212
password,
213213
local_benchmark_output_filename,
214-
oss_cluster_api_enabled,
214+
oss_cluster_api_enabled=False,
215215
tls_enabled=False,
216216
tls_skip_verify=False,
217217
tls_cert=None,

redis_benchmarks_specification/__self_contained_coordinator__/build_info.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,13 @@ def extract_build_info_from_streamdata(testDetails):
1010
arch = "amd64"
1111
use_git_timestamp = False
1212
git_timestamp_ms = None
13-
metadata = None
13+
metadata = {}
1414
build_variant_name = None
1515
fields = [fieldname.decode() for fieldname in testDetails.keys()]
16-
logging.info("Fields on stream {}".format(fields))
17-
git_hash = testDetails[b"git_hash"]
16+
logging.info("Fields on stream {}".format(testDetails))
17+
git_hash = None
18+
if b"git_hash" in testDetails:
19+
git_hash = testDetails[b"git_hash"].decode()
1820
if b"use_git_timestamp" in testDetails:
1921
use_git_timestamp = bool(testDetails[b"use_git_timestamp"].decode())
2022
if b"git_timestamp_ms" in testDetails:

redis_benchmarks_specification/__self_contained_coordinator__/docker.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@ def generate_standalone_redis_server_args(
1818
"no",
1919
"--port",
2020
"{}".format(port),
21-
"--dir",
22-
dbdir,
2321
]
22+
if dbdir != "":
23+
command.extend(["--dbdir", dbdir])
2424
if configuration_parameters is not None:
2525
for parameter, parameter_value in configuration_parameters.items():
2626
if parameter not in added_params:

0 commit comments

Comments
 (0)