|
2 | 2 | from contextlib import contextmanager |
3 | 3 | import functools |
4 | 4 | import json |
| 5 | +import logging |
5 | 6 | import os |
6 | 7 | import tempfile |
7 | 8 | import time |
@@ -29,67 +30,77 @@ def parse_args(args, cmd): |
29 | 30 | return self.parse_args(cmd) |
30 | 31 |
|
31 | 32 | def measurement_run(repo, path): |
32 | | - compression = "--compression=none" |
33 | | - # measure create perf (without files cache to always have it chunking) |
34 | | - t_start = time.monotonic() |
35 | | - rc = get_reset_ec( |
36 | | - self.do_create( |
37 | | - parse_args( |
38 | | - args, |
39 | | - [ |
40 | | - f"--repo={repo}", |
41 | | - "create", |
42 | | - compression, |
43 | | - "--files-cache=disabled", |
44 | | - "borg-benchmark-crud1", |
45 | | - path, |
46 | | - ], |
| 33 | + # Suppress "Done. Run borg compact..." warnings from internal do_delete() calls — |
| 34 | + # they clutter benchmark output and are irrelevant here (repo is temporary). |
| 35 | + archiver_logger = logging.getLogger("borg.archiver") |
| 36 | + original_level = archiver_logger.level |
| 37 | + archiver_logger.setLevel(logging.ERROR) |
| 38 | + try: |
| 39 | + compression = "--compression=none" |
| 40 | + # measure create perf (without files cache to always have it chunking) |
| 41 | + t_start = time.monotonic() |
| 42 | + rc = get_reset_ec( |
| 43 | + self.do_create( |
| 44 | + parse_args( |
| 45 | + args, |
| 46 | + [ |
| 47 | + f"--repo={repo}", |
| 48 | + "create", |
| 49 | + compression, |
| 50 | + "--files-cache=disabled", |
| 51 | + "borg-benchmark-crud1", |
| 52 | + path, |
| 53 | + ], |
| 54 | + ) |
| 55 | + ) |
| 56 | + ) |
| 57 | + t_end = time.monotonic() |
| 58 | + dt_create = t_end - t_start |
| 59 | + assert rc == 0 |
| 60 | + # now build files cache |
| 61 | + rc1 = get_reset_ec( |
| 62 | + self.do_create( |
| 63 | + parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path]) |
| 64 | + ) |
| 65 | + ) |
| 66 | + rc2 = get_reset_ec( |
| 67 | + self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"])) |
| 68 | + ) |
| 69 | + assert rc1 == rc2 == 0 |
| 70 | + # measure a no-change update (archive1 is still present) |
| 71 | + t_start = time.monotonic() |
| 72 | + rc1 = get_reset_ec( |
| 73 | + self.do_create( |
| 74 | + parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path]) |
47 | 75 | ) |
48 | 76 | ) |
49 | | - ) |
50 | | - t_end = time.monotonic() |
51 | | - dt_create = t_end - t_start |
52 | | - assert rc == 0 |
53 | | - # now build files cache |
54 | | - rc1 = get_reset_ec( |
55 | | - self.do_create( |
56 | | - parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path]) |
| 77 | + t_end = time.monotonic() |
| 78 | + dt_update = t_end - t_start |
| 79 | + rc2 = get_reset_ec( |
| 80 | + self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"])) |
| 81 | + ) |
| 82 | + assert rc1 == rc2 == 0 |
| 83 | + # measure extraction (dry-run: without writing result to disk) |
| 84 | + t_start = time.monotonic() |
| 85 | + rc = get_reset_ec( |
| 86 | + self.do_extract( |
| 87 | + parse_args(args, [f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"]) |
| 88 | + ) |
57 | 89 | ) |
58 | | - ) |
59 | | - rc2 = get_reset_ec( |
60 | | - self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"])) |
61 | | - ) |
62 | | - assert rc1 == rc2 == 0 |
63 | | - # measure a no-change update (archive1 is still present) |
64 | | - t_start = time.monotonic() |
65 | | - rc1 = get_reset_ec( |
66 | | - self.do_create( |
67 | | - parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path]) |
| 90 | + t_end = time.monotonic() |
| 91 | + dt_extract = t_end - t_start |
| 92 | + assert rc == 0 |
| 93 | + # measure archive deletion (of LAST present archive with the data) |
| 94 | + t_start = time.monotonic() |
| 95 | + rc = get_reset_ec( |
| 96 | + self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"])) |
68 | 97 | ) |
69 | | - ) |
70 | | - t_end = time.monotonic() |
71 | | - dt_update = t_end - t_start |
72 | | - rc2 = get_reset_ec( |
73 | | - self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"])) |
74 | | - ) |
75 | | - assert rc1 == rc2 == 0 |
76 | | - # measure extraction (dry-run: without writing result to disk) |
77 | | - t_start = time.monotonic() |
78 | | - rc = get_reset_ec( |
79 | | - self.do_extract(parse_args(args, [f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"])) |
80 | | - ) |
81 | | - t_end = time.monotonic() |
82 | | - dt_extract = t_end - t_start |
83 | | - assert rc == 0 |
84 | | - # measure archive deletion (of LAST present archive with the data) |
85 | | - t_start = time.monotonic() |
86 | | - rc = get_reset_ec( |
87 | | - self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"])) |
88 | | - ) |
89 | | - t_end = time.monotonic() |
90 | | - dt_delete = t_end - t_start |
91 | | - assert rc == 0 |
92 | | - return dt_create, dt_update, dt_extract, dt_delete |
| 98 | + t_end = time.monotonic() |
| 99 | + dt_delete = t_end - t_start |
| 100 | + assert rc == 0 |
| 101 | + return dt_create, dt_update, dt_extract, dt_delete |
| 102 | + finally: |
| 103 | + archiver_logger.setLevel(original_level) |
93 | 104 |
|
94 | 105 | @contextmanager |
95 | 106 | def test_files(path, count, size, random): |
|
0 commit comments