Skip to content

Commit eb539c6

Browse files
authored
Support specifying hooks in pyperformance (#361)
* Support specifying hooks in pyperformance * Adjust formatting * Add a test
1 parent 6a42ffc commit eb539c6

File tree

4 files changed

+34
-0
lines changed

4 files changed

+34
-0
lines changed

doc/usage.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,7 @@ Usage::
104104
[--append FILENAME] [--manifest MANIFEST]
105105
[--timeout TIMEOUT] [-b BM_LIST]
106106
[--inherit-environ VAR_LIST] [-p PYTHON]
107+
[--hook HOOK]
107108

108109
options::
109110

@@ -146,6 +147,9 @@ options::
146147
Use the same number of loops as a previous run
147148
(i.e., don't recalibrate). Should be a path to a
148149
.json file from a previous run.
150+
--hook HOOK
151+
Apply the given pyperf hook when running the
152+
benchmarks.
149153

150154
show
151155
----

pyperformance/cli.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@
1919
cmd_compare,
2020
)
2121

22+
from pyperf import _hooks
23+
2224

2325
def comma_separated(values):
2426
values = [value.strip() for value in values.split(',')]
@@ -93,6 +95,12 @@ def parse_args():
9395
help="Specify a timeout in seconds for a single "
9496
"benchmark run (default: disabled)",
9597
type=check_positive)
98+
hook_names = list(_hooks.get_hook_names())
99+
cmd.add_argument("--hook",
100+
action="append",
101+
choices=hook_names,
102+
metavar=f"{', '.join(x for x in hook_names if not x.startswith('_'))}",
103+
help="Apply the given pyperf hook(s) when running each benchmark")
96104
filter_opts(cmd)
97105

98106
# show

pyperformance/run.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -242,5 +242,8 @@ def get_pyperf_opts(options):
242242
opts.append('--min-time=%s' % options.min_time)
243243
if options.timeout:
244244
opts.append('--timeout=%s' % options.timeout)
245+
if options.hook:
246+
for hook in options.hook:
247+
opts.append('--hook=%s' % hook)
245248

246249
return opts

pyperformance/tests/test_commands.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -173,6 +173,25 @@ def test_run_test_benchmarks(self):
173173
capture=None,
174174
)
175175

176+
def test_run_with_hook(self):
177+
# We expect this to fail, since pystats requires a special build of Python
178+
filename = self.resolve_tmp('bench-test-hook.json')
179+
180+
stdout = self.run_pyperformance(
181+
'run',
182+
'--manifest', os.path.join(tests.DATA_DIR, 'MANIFEST'),
183+
'-b', 'all',
184+
'-o', filename,
185+
'--hook', 'pystats',
186+
exitcode=1,
187+
capture='combined'
188+
)
189+
190+
self.assertIn(
191+
"Can not collect pystats because python was not built with --enable-pystats",
192+
stdout
193+
)
194+
176195
###################################
177196
# compile
178197

0 commit comments

Comments
 (0)