Skip to content

Commit 3f6282c

Browse files
Merge pull request #9366 from mr-raj12/fix-benchmark-compact-warnings
benchmark crud: suppress compact warnings during benchmark runs, fixes #9365
2 parents 1c0bf36 + c7261ae commit 3f6282c

File tree

1 file changed

+68
-57
lines changed

1 file changed

+68
-57
lines changed

src/borg/archiver/benchmark_cmd.py

Lines changed: 68 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from contextlib import contextmanager
33
import functools
44
import json
5+
import logging
56
import os
67
import tempfile
78
import time
@@ -29,67 +30,77 @@ def parse_args(args, cmd):
2930
return self.parse_args(cmd)
3031

3132
def measurement_run(repo, path):
32-
compression = "--compression=none"
33-
# measure create perf (without files cache to always have it chunking)
34-
t_start = time.monotonic()
35-
rc = get_reset_ec(
36-
self.do_create(
37-
parse_args(
38-
args,
39-
[
40-
f"--repo={repo}",
41-
"create",
42-
compression,
43-
"--files-cache=disabled",
44-
"borg-benchmark-crud1",
45-
path,
46-
],
33+
# Suppress "Done. Run borg compact..." warnings from internal do_delete() calls —
34+
# they clutter benchmark output and are irrelevant here (repo is temporary).
35+
archiver_logger = logging.getLogger("borg.archiver")
36+
original_level = archiver_logger.level
37+
archiver_logger.setLevel(logging.ERROR)
38+
try:
39+
compression = "--compression=none"
40+
# measure create perf (without files cache to always have it chunking)
41+
t_start = time.monotonic()
42+
rc = get_reset_ec(
43+
self.do_create(
44+
parse_args(
45+
args,
46+
[
47+
f"--repo={repo}",
48+
"create",
49+
compression,
50+
"--files-cache=disabled",
51+
"borg-benchmark-crud1",
52+
path,
53+
],
54+
)
55+
)
56+
)
57+
t_end = time.monotonic()
58+
dt_create = t_end - t_start
59+
assert rc == 0
60+
# now build files cache
61+
rc1 = get_reset_ec(
62+
self.do_create(
63+
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path])
64+
)
65+
)
66+
rc2 = get_reset_ec(
67+
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"]))
68+
)
69+
assert rc1 == rc2 == 0
70+
# measure a no-change update (archive1 is still present)
71+
t_start = time.monotonic()
72+
rc1 = get_reset_ec(
73+
self.do_create(
74+
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path])
4775
)
4876
)
49-
)
50-
t_end = time.monotonic()
51-
dt_create = t_end - t_start
52-
assert rc == 0
53-
# now build files cache
54-
rc1 = get_reset_ec(
55-
self.do_create(
56-
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path])
77+
t_end = time.monotonic()
78+
dt_update = t_end - t_start
79+
rc2 = get_reset_ec(
80+
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"]))
81+
)
82+
assert rc1 == rc2 == 0
83+
# measure extraction (dry-run: without writing result to disk)
84+
t_start = time.monotonic()
85+
rc = get_reset_ec(
86+
self.do_extract(
87+
parse_args(args, [f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"])
88+
)
5789
)
58-
)
59-
rc2 = get_reset_ec(
60-
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"]))
61-
)
62-
assert rc1 == rc2 == 0
63-
# measure a no-change update (archive1 is still present)
64-
t_start = time.monotonic()
65-
rc1 = get_reset_ec(
66-
self.do_create(
67-
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path])
90+
t_end = time.monotonic()
91+
dt_extract = t_end - t_start
92+
assert rc == 0
93+
# measure archive deletion (of LAST present archive with the data)
94+
t_start = time.monotonic()
95+
rc = get_reset_ec(
96+
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"]))
6897
)
69-
)
70-
t_end = time.monotonic()
71-
dt_update = t_end - t_start
72-
rc2 = get_reset_ec(
73-
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"]))
74-
)
75-
assert rc1 == rc2 == 0
76-
# measure extraction (dry-run: without writing result to disk)
77-
t_start = time.monotonic()
78-
rc = get_reset_ec(
79-
self.do_extract(parse_args(args, [f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"]))
80-
)
81-
t_end = time.monotonic()
82-
dt_extract = t_end - t_start
83-
assert rc == 0
84-
# measure archive deletion (of LAST present archive with the data)
85-
t_start = time.monotonic()
86-
rc = get_reset_ec(
87-
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"]))
88-
)
89-
t_end = time.monotonic()
90-
dt_delete = t_end - t_start
91-
assert rc == 0
92-
return dt_create, dt_update, dt_extract, dt_delete
98+
t_end = time.monotonic()
99+
dt_delete = t_end - t_start
100+
assert rc == 0
101+
return dt_create, dt_update, dt_extract, dt_delete
102+
finally:
103+
archiver_logger.setLevel(original_level)
93104

94105
@contextmanager
95106
def test_files(path, count, size, random):

0 commit comments

Comments
 (0)