Skip to content

Commit 3d7c1cf

Browse files
Enabled uploading profile results to s3. making txt profile outputs to be main thread related (#141)
* [add] enabled uploading profile results to s3. making txt profile outputs to be main thread related
1 parent 20824cf commit 3d7c1cf

File tree

6 files changed

+58
-14
lines changed

6 files changed

+58
-14
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "redisbench-admin"
3-
version = "0.2.10"
3+
version = "0.2.11"
44
description = "Redis benchmark run helper. A wrapper around Redis and Redis Modules benchmark tools ( ftsb_redisearch, memtier_benchmark, redis-benchmark, aibench, etc... )."
55
authors = ["filipecosta90 <[email protected]>"]
66
readme = "README.md"

redisbench_admin/profilers/perf.py

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -103,16 +103,20 @@ def generate_record_command(self, pid, output, frequency=None):
103103
cmd += ["--freq", "{}".format(frequency)]
104104
return cmd
105105

106-
def generate_report_command(self, input, dso=None):
106+
def generate_report_command(self, tid, input, dso, percentage_mode):
107107
cmd = [self.perf, "report"]
108108
if dso is not None:
109109
cmd += ["--dso", dso]
110110
cmd += [
111111
"--header",
112+
"--tid",
113+
"{}".format(tid),
112114
"--no-children",
113115
"--stdio",
114116
"-g",
115117
"none,1.0,caller,function",
118+
"--percentage",
119+
percentage_mode,
116120
"--input",
117121
input,
118122
]
@@ -293,12 +297,13 @@ def generate_outputs(self, use_case, **kwargs):
293297
if artifact_result is True:
294298
outputs["perf output"] = os.path.abspath(self.output)
295299

300+
tid = self.pid
296301
# generate perf report --stdio report
297302
logging.info("Generating perf report text outputs")
298303
perf_report_output = self.output + ".perf-report.top-cpu.txt"
299304

300305
artifact_result, perf_report_artifact = self.run_perf_report(
301-
perf_report_output, None
306+
tid, perf_report_output, None, "absolute"
302307
)
303308

304309
if artifact_result is True:
@@ -312,7 +317,7 @@ def generate_outputs(self, use_case, **kwargs):
312317
perf_report_output_dso = self.output + ".perf-report.top-cpu.dso.txt"
313318

314319
artifact_result, perf_report_artifact = self.run_perf_report(
315-
perf_report_output_dso, binary
320+
tid, perf_report_output_dso, binary, "relative"
316321
)
317322

318323
if artifact_result is True:
@@ -406,14 +411,10 @@ def generate_flame_graph(self, title="Flame Graph", subtitle="", filename=None):
406411
def get_collapsed_stacks(self):
407412
return self.collapsed_stacks
408413

409-
def run_perf_report(
410-
self,
411-
output,
412-
dso,
413-
):
414+
def run_perf_report(self, tid, output, dso, percentage_mode):
414415
status = False
415416
result_artifact = None
416-
args = self.generate_report_command(self.output, dso)
417+
args = self.generate_report_command(tid, self.output, dso, percentage_mode)
417418
logging.info("Running {} report with args {}".format(self.perf, args))
418419
try:
419420
stdout, _ = subprocess.Popen(

redisbench_admin/run_local/args.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
PROFILE_FREQ_DEFAULT,
1313
)
1414

15+
PUSH_S3 = bool(os.getenv("PUSH_S3", False))
1516
PROFILERS_ENABLED = os.getenv("PROFILE", 0)
1617
PROFILERS = os.getenv("PROFILERS", PROFILERS_DEFAULT)
1718
PROFILE_FREQ = os.getenv("PROFILE_FREQ", PROFILE_FREQ_DEFAULT)
@@ -38,6 +39,19 @@ def create_run_local_arguments(parser):
3839
help="path to the module file. "
3940
"You can use `--required-module` more than once",
4041
)
42+
parser.add_argument(
43+
"--s3_bucket_name",
44+
type=str,
45+
default="ci.benchmarks.redislabs",
46+
help="S3 bucket name.",
47+
)
48+
parser.add_argument(
49+
"--upload_results_s3",
50+
default=PUSH_S3,
51+
action="store_true",
52+
help="uploads the result files and configuration file to public "
53+
"'ci.benchmarks.redislabs' bucket. Proper credentials are required",
54+
)
4155
parser.add_argument("--profilers", type=str, default=PROFILERS)
4256
parser.add_argument(
4357
"--enable-profilers",

redisbench_admin/run_local/run_local.py

Lines changed: 26 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@
3838
)
3939
from redisbench_admin.run_remote.run_remote import (
4040
extract_module_semver_from_info_modules_cmd,
41+
get_test_s3_bucket_path,
4142
)
4243
from redisbench_admin.utils.local import (
4344
spin_up_local_redis,
@@ -49,7 +50,11 @@
4950
extract_git_vars,
5051
)
5152
from redisbench_admin.utils.results import post_process_benchmark_results
52-
from redisbench_admin.utils.utils import decompress_file, get_decompressed_filename
53+
from redisbench_admin.utils.utils import (
54+
decompress_file,
55+
get_decompressed_filename,
56+
upload_artifacts_to_s3,
57+
)
5358

5459

5560
def run_local_command_logic(args):
@@ -66,6 +71,7 @@ def run_local_command_logic(args):
6671
os.path.abspath(".")
6772
required_modules = args.required_module
6873
profilers_enabled = args.enable_profilers
74+
s3_bucket_name = args.s3_bucket_name
6975
profilers_map = {}
7076
profilers_list = []
7177
if profilers_enabled:
@@ -119,6 +125,7 @@ def run_local_command_logic(args):
119125

120126
for test_name, benchmark_config in benchmark_definitions.items():
121127
redis_process = None
128+
122129
# after we've spinned Redis, even on error we should always teardown
123130
# in case of some unexpected error we fail the test
124131
# noinspection PyBroadException
@@ -249,6 +256,13 @@ def run_local_command_logic(args):
249256
+ "If that is not possible please change the profile frequency to an higher value."
250257
+ "via the env variable PROFILE_FREQ. NOTICE THAT THIS INCREASES OVERHEAD!!!"
251258
)
259+
s3_bucket_path = get_test_s3_bucket_path(
260+
s3_bucket_name,
261+
test_name,
262+
github_org_name,
263+
github_repo_name,
264+
"profiles",
265+
)
252266
for profiler_name, profiler_obj in profilers_map.items():
253267
# Collect and fold stacks
254268
logging.info(
@@ -278,6 +292,7 @@ def run_local_command_logic(args):
278292
len(profile_res_artifacts_map.values()),
279293
)
280294
)
295+
artifact_paths = []
281296
for (
282297
artifact_name,
283298
profile_artifact,
@@ -290,11 +305,21 @@ def run_local_command_logic(args):
290305
profile_artifact,
291306
]
292307
)
308+
artifact_paths.append(profile_artifact)
293309
logging.info(
294310
"artifact {}: {}.".format(
295311
artifact_name, profile_artifact
296312
)
297313
)
314+
if args.upload_results_s3:
315+
logging.info(
316+
"Uploading results to s3. s3 bucket name: {}. s3 bucket path: {}".format(
317+
s3_bucket_name, s3_bucket_path
318+
)
319+
)
320+
upload_artifacts_to_s3(
321+
artifact_paths, s3_bucket_name, s3_bucket_path
322+
)
298323

299324
post_process_benchmark_results(
300325
benchmark_tool,

redisbench_admin/run_remote/run_remote.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -864,11 +864,14 @@ def extract_tsbs_extra_links(benchmark_config, benchmark_tool):
864864
return queries_file_link, remote_tool_link, tool_link
865865

866866

867-
def get_test_s3_bucket_path(s3_bucket_name, test_name, tf_github_org, tf_github_repo):
868-
s3_bucket_path = "{github_org}/{github_repo}/results/{test_name}/".format(
867+
def get_test_s3_bucket_path(
868+
s3_bucket_name, test_name, tf_github_org, tf_github_repo, folder="results"
869+
):
870+
s3_bucket_path = "{github_org}/{github_repo}/{folder}/{test_name}/".format(
869871
github_org=tf_github_org,
870872
github_repo=tf_github_repo,
871873
test_name=test_name,
874+
folder=folder,
872875
)
873876
return s3_bucket_path
874877

redisbench_admin/utils/utils.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,14 +25,15 @@
2525

2626

2727
def upload_artifacts_to_s3(artifacts, s3_bucket_name, s3_bucket_path):
28-
print("-- uploading results to s3 -- ")
28+
logging.info("-- uploading results to s3 -- ")
2929
s3 = boto3.resource("s3")
3030
bucket = s3.Bucket(s3_bucket_name)
3131
progress = tqdm(unit="files", total=len(artifacts))
3232
for artifact in artifacts:
3333
object_key = "{bucket_path}{filename}".format(
3434
bucket_path=s3_bucket_path, filename=artifact
3535
)
36+
3637
bucket.upload_file(artifact, object_key)
3738
object_acl = s3.ObjectAcl(s3_bucket_name, object_key)
3839
object_acl.put(ACL="public-read")

0 commit comments

Comments
 (0)