Skip to content

Commit 9a6a6f7

Browse files
[fix] Fixed metric extraction expections on missing metrics. Made RTS results pushing more resilient (#109)
1 parent 6a224e8 commit 9a6a6f7

File tree

9 files changed

+302
-113
lines changed

9 files changed

+302
-113
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "redisbench-admin"
3-
version = "0.1.76"
3+
version = "0.1.77"
44
description = "Redis benchmark run helper. A wrapper around Redis and Redis Modules benchmark tools ( ftsb_redisearch, memtier_benchmark, redis-benchmark, aibench, etc... )."
55
authors = ["filipecosta90 <[email protected]>"]
66
readme = "README.md"

redisbench_admin/run/common.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -145,6 +145,9 @@ def common_exporter_logic(
145145
tf_triggering_env,
146146
artifact_version="N/A",
147147
):
148+
per_version_time_series_dict = None
149+
per_branch_time_series_dict = None
150+
148151
if exporter_timemetric_path is not None and len(metrics) > 0:
149152
# extract timestamp
150153
datapoints_timestamp = parse_exporter_timemetric(
@@ -171,7 +174,7 @@ def common_exporter_logic(
171174
push_data_to_redistimeseries(rts, per_version_time_series_dict)
172175
if tf_github_branch is not None and tf_github_branch != "":
173176
# extract per branch datapoints
174-
ok, branch_time_series_dict = extract_perbranch_timeseries_from_results(
177+
ok, per_branch_time_series_dict = extract_perbranch_timeseries_from_results(
175178
datapoints_timestamp,
176179
metrics,
177180
results_dict,
@@ -183,7 +186,7 @@ def common_exporter_logic(
183186
tf_triggering_env,
184187
)
185188
# push per-branch data
186-
push_data_to_redistimeseries(rts, branch_time_series_dict)
189+
push_data_to_redistimeseries(rts, per_branch_time_series_dict)
187190
else:
188191
logging.warning(
189192
"Requested to push data to RedisTimeSeries but no git"
@@ -196,6 +199,7 @@ def common_exporter_logic(
196199
"Requested to push data to RedisTimeSeries but "
197200
'no exporter definition was found. Missing "exporter" config.'
198201
)
202+
return per_version_time_series_dict, per_branch_time_series_dict
199203

200204

201205
def get_start_time_vars(start_time=None):

redisbench_admin/run/tsbs_run_queries_redistimeseries/tsbs_run_queries_redistimeseries.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -28,17 +28,17 @@ def prepare_tsbs_benchmark_command(
2828
command_arr.extend(
2929
["--host", "{}:{}".format(server_private_ip, server_plaintext_port)]
3030
)
31-
32-
for k in benchmark_config["parameters"]:
33-
if "file" in k:
34-
input_file = k["file"]
35-
input_file = check_if_needs_remote_fetch(
36-
input_file, "/tmp", None, remote_queries_file, is_remote
37-
)
38-
command_arr.extend(["--file", input_file])
39-
else:
40-
for kk in k.keys():
41-
command_arr.extend(["--{}".format(kk), str(k[kk])])
31+
if "parameters" in benchmark_config:
32+
for k in benchmark_config["parameters"]:
33+
if "file" in k:
34+
input_file = k["file"]
35+
input_file = check_if_needs_remote_fetch(
36+
input_file, "/tmp", None, remote_queries_file, is_remote
37+
)
38+
command_arr.extend(["--file", input_file])
39+
else:
40+
for kk in k.keys():
41+
command_arr.extend(["--{}".format(kk), str(k[kk])])
4242

4343
command_arr.extend(["--results-file", result_file])
4444

redisbench_admin/run_remote/run_remote.py

Lines changed: 42 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import sys
55
import traceback
66

7+
import redis
78
from python_terraform import Terraform
89
from redistimeseries.client import Client
910

@@ -525,31 +526,47 @@ def run_remote_command_logic(args):
525526
tf_github_repo,
526527
tf_triggering_env,
527528
)
528-
rts.redis.sadd(testcases_setname, test_name)
529-
rts.incrby(
530-
tsname_project_total_success,
531-
1,
532-
timestamp=start_time_ms,
533-
labels=get_project_ts_tags(
534-
tf_github_org,
535-
tf_github_repo,
536-
deployment_type,
537-
tf_triggering_env,
538-
),
539-
)
529+
try:
530+
rts.redis.sadd(testcases_setname, test_name)
531+
rts.incrby(
532+
tsname_project_total_success,
533+
1,
534+
timestamp=start_time_ms,
535+
labels=get_project_ts_tags(
536+
tf_github_org,
537+
tf_github_repo,
538+
deployment_type,
539+
tf_triggering_env,
540+
),
541+
)
542+
except redis.exceptions.ResponseError as e:
543+
logging.warning(
544+
"Error while updating secondary data structures {}. ".format(
545+
e.__str__()
546+
)
547+
)
548+
pass
540549
except:
541550
if args.push_results_redistimeseries:
542-
rts.incrby(
543-
tsname_project_total_failures,
544-
1,
545-
timestamp=start_time_ms,
546-
labels=get_project_ts_tags(
547-
tf_github_org,
548-
tf_github_repo,
549-
deployment_type,
550-
tf_triggering_env,
551-
),
552-
)
551+
try:
552+
rts.incrby(
553+
tsname_project_total_failures,
554+
1,
555+
timestamp=start_time_ms,
556+
labels=get_project_ts_tags(
557+
tf_github_org,
558+
tf_github_repo,
559+
deployment_type,
560+
tf_triggering_env,
561+
),
562+
)
563+
except redis.exceptions.ResponseError as e:
564+
logging.warning(
565+
"Error while updating secondary data structures {}. ".format(
566+
e.__str__()
567+
)
568+
)
569+
pass
553570
return_code |= 1
554571
logging.critical(
555572
"Some unexpected exception was caught "
@@ -596,7 +613,7 @@ def redistimeseries_results_logic(
596613
exporter_timemetric_path, metrics = merge_default_and_config_metrics(
597614
benchmark_config, default_metrics, exporter_timemetric_path
598615
)
599-
common_exporter_logic(
616+
per_version_time_series_dict, per_branch_time_series_dict = common_exporter_logic(
600617
deployment_type,
601618
exporter_timemetric_path,
602619
metrics,
@@ -609,6 +626,7 @@ def redistimeseries_results_logic(
609626
tf_triggering_env,
610627
artifact_version,
611628
)
629+
return per_version_time_series_dict, per_branch_time_series_dict
612630

613631

614632
def merge_default_and_config_metrics(

redisbench_admin/utils/remote.py

Lines changed: 86 additions & 75 deletions
Original file line numberDiff line numberDiff line change
@@ -414,36 +414,37 @@ def fetch_remote_setup_from_config(
414414
def push_data_to_redistimeseries(rts: client, branch_time_series_dict: dict):
415415
datapoint_errors = 0
416416
datapoint_inserts = 0
417-
for timeseries_name, time_series in branch_time_series_dict.items():
418-
try:
419-
logging.info(
420-
"Creating timeseries named {} with labels {}".format(
421-
timeseries_name, time_series["labels"]
422-
)
423-
)
424-
rts.create(timeseries_name, labels=time_series["labels"])
425-
except redis.exceptions.ResponseError:
426-
logging.warning(
427-
"Timeseries named {} already exists".format(timeseries_name)
428-
)
429-
pass
430-
for timestamp, value in time_series["data"].items():
417+
if rts is not None:
418+
for timeseries_name, time_series in branch_time_series_dict.items():
431419
try:
432-
rts.add(
433-
timeseries_name,
434-
timestamp,
435-
value,
436-
duplicate_policy="last",
420+
logging.info(
421+
"Creating timeseries named {} with labels {}".format(
422+
timeseries_name, time_series["labels"]
423+
)
437424
)
438-
datapoint_inserts += 1
425+
rts.create(timeseries_name, labels=time_series["labels"])
439426
except redis.exceptions.ResponseError:
440427
logging.warning(
441-
"Error while inserting datapoint ({} : {}) in timeseries named {}. ".format(
442-
timestamp, value, timeseries_name
443-
)
428+
"Timeseries named {} already exists".format(timeseries_name)
444429
)
445-
datapoint_errors += 1
446430
pass
431+
for timestamp, value in time_series["data"].items():
432+
try:
433+
rts.add(
434+
timeseries_name,
435+
timestamp,
436+
value,
437+
duplicate_policy="last",
438+
)
439+
datapoint_inserts += 1
440+
except redis.exceptions.ResponseError:
441+
logging.warning(
442+
"Error while inserting datapoint ({} : {}) in timeseries named {}. ".format(
443+
timestamp, value, timeseries_name
444+
)
445+
)
446+
datapoint_errors += 1
447+
pass
447448
return datapoint_errors, datapoint_inserts
448449

449450

@@ -470,34 +471,40 @@ def extract_perversion_timeseries_from_results(
470471
for jsonpath in metrics:
471472
jsonpath_expr = parse(jsonpath)
472473
metric_name = jsonpath[2:]
473-
metric_value = float(jsonpath_expr.find(results_dict)[0].value)
474-
# prepare tags
475-
# branch tags
476-
version_tags = get_project_ts_tags(
477-
tf_github_org, tf_github_repo, deployment_type, tf_triggering_env
478-
)
479-
version_tags["version"] = project_version
480-
version_tags["test_name"] = str(test_name)
481-
version_tags["metric"] = str(metric_name)
482-
483-
ts_name = (
484-
"ci.benchmarks.redislabs/by.version/"
485-
"{triggering_env}/{github_org}/{github_repo}/"
486-
"{test_name}/{deployment_type}/{version}/{metric}".format(
487-
version=project_version,
488-
github_org=tf_github_org,
489-
github_repo=tf_github_repo,
490-
deployment_type=deployment_type,
491-
test_name=test_name,
492-
triggering_env=tf_triggering_env,
493-
metric=metric_name,
474+
find_res = jsonpath_expr.find(results_dict)
475+
if find_res is not None and len(find_res) > 0:
476+
metric_value = float(find_res[0].value)
477+
# prepare tags
478+
# branch tags
479+
version_tags = get_project_ts_tags(
480+
tf_github_org, tf_github_repo, deployment_type, tf_triggering_env
481+
)
482+
version_tags["version"] = project_version
483+
version_tags["test_name"] = str(test_name)
484+
version_tags["metric"] = str(metric_name)
485+
486+
ts_name = (
487+
"ci.benchmarks.redislabs/by.version/"
488+
"{triggering_env}/{github_org}/{github_repo}/"
489+
"{test_name}/{deployment_type}/{version}/{metric}".format(
490+
version=project_version,
491+
github_org=tf_github_org,
492+
github_repo=tf_github_repo,
493+
deployment_type=deployment_type,
494+
test_name=test_name,
495+
triggering_env=tf_triggering_env,
496+
metric=metric_name,
497+
)
494498
)
495-
)
496499

497-
branch_time_series_dict[ts_name] = {
498-
"labels": version_tags.copy(),
499-
"data": {datapoints_timestamp: metric_value},
500-
}
500+
branch_time_series_dict[ts_name] = {
501+
"labels": version_tags.copy(),
502+
"data": {datapoints_timestamp: metric_value},
503+
}
504+
else:
505+
logging.warning(
506+
"Unable to find metric path {} in {}".format(jsonpath, results_dict)
507+
)
501508
return True, branch_time_series_dict
502509

503510

@@ -532,34 +539,38 @@ def extract_perbranch_timeseries_from_results(
532539
for jsonpath in metrics:
533540
jsonpath_expr = parse(jsonpath)
534541
metric_name = jsonpath[2:]
535-
metric_value = float(jsonpath_expr.find(results_dict)[0].value)
536-
# prepare tags
537-
# branch tags
542+
find_res = jsonpath_expr.find(results_dict)
543+
if find_res is not None and len(find_res) > 0:
544+
metric_value = float(find_res[0].value)
538545

539-
branch_tags = get_project_ts_tags(
540-
tf_github_org, tf_github_repo, deployment_type, tf_triggering_env
541-
)
542-
branch_tags["branch"] = str(tf_github_branch)
543-
branch_tags["test_name"] = str(test_name)
544-
branch_tags["metric"] = str(metric_name)
545-
ts_name = (
546-
"ci.benchmarks.redislabs/by.branch/"
547-
"{triggering_env}/{github_org}/{github_repo}/"
548-
"{test_name}/{deployment_type}/{branch}/{metric}".format(
549-
branch=str(tf_github_branch),
550-
github_org=tf_github_org,
551-
github_repo=tf_github_repo,
552-
deployment_type=deployment_type,
553-
test_name=test_name,
554-
triggering_env=tf_triggering_env,
555-
metric=metric_name,
546+
branch_tags = get_project_ts_tags(
547+
tf_github_org, tf_github_repo, deployment_type, tf_triggering_env
548+
)
549+
branch_tags["branch"] = str(tf_github_branch)
550+
branch_tags["test_name"] = str(test_name)
551+
branch_tags["metric"] = str(metric_name)
552+
ts_name = (
553+
"ci.benchmarks.redislabs/by.branch/"
554+
"{triggering_env}/{github_org}/{github_repo}/"
555+
"{test_name}/{deployment_type}/{branch}/{metric}".format(
556+
branch=str(tf_github_branch),
557+
github_org=tf_github_org,
558+
github_repo=tf_github_repo,
559+
deployment_type=deployment_type,
560+
test_name=test_name,
561+
triggering_env=tf_triggering_env,
562+
metric=metric_name,
563+
)
556564
)
557-
)
558565

559-
branch_time_series_dict[ts_name] = {
560-
"labels": branch_tags.copy(),
561-
"data": {datapoints_timestamp: metric_value},
562-
}
566+
branch_time_series_dict[ts_name] = {
567+
"labels": branch_tags.copy(),
568+
"data": {datapoints_timestamp: metric_value},
569+
}
570+
else:
571+
logging.warning(
572+
"Unable to find metric path {} in {}".format(jsonpath, results_dict)
573+
)
563574
return True, branch_time_series_dict
564575

565576

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
name: "tsbs-devops-ingestion-scale100-4days"
2+
remote:
3+
- type: oss-standalone
4+
- setup: redistimeseries-m5
5+
clientconfig:
6+
- tool: tsbs_load_redistimeseries
7+
- parameters:
8+
- workers: 64
9+
- reporting-period: 1s
10+
- file: "https://s3.amazonaws.com/benchmarks.redislabs/redistimeseries/tsbs/datasets/devops/scale100/data_redistimeseries_cpu-only_100.dat"
11+
exporter:
12+
redistimeseries:
13+
timemetric: "$.StartTime"
14+
metrics:
15+
- "$.Totals.metricRate"
16+
- "$.Totals.rowRate"

0 commit comments

Comments
 (0)