Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions doc/usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ Usage::
[--append FILENAME] [--manifest MANIFEST]
[--timeout TIMEOUT] [-b BM_LIST]
[--inherit-environ VAR_LIST] [-p PYTHON]
[--hook HOOK]

options::

Expand Down Expand Up @@ -146,6 +147,9 @@ options::
Use the same number of loops as a previous run
(i.e., don't recalibrate). Should be a path to a
.json file from a previous run.
--hook HOOK
Apply the given pyperf hook when running the
benchmarks.

show
----
Expand Down
8 changes: 8 additions & 0 deletions pyperformance/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
cmd_compare,
)

from pyperf import _hooks


def comma_separated(values):
values = [value.strip() for value in values.split(',')]
Expand Down Expand Up @@ -93,6 +95,12 @@ def parse_args():
help="Specify a timeout in seconds for a single "
"benchmark run (default: disabled)",
type=check_positive)
hook_names = list(_hooks.get_hook_names())
cmd.add_argument("--hook",
action="append",
choices=hook_names,
metavar=f"{', '.join(x for x in hook_names if not x.startswith('_'))}",
help="Apply the given pyperf hook(s) when running each benchmark")
filter_opts(cmd)

# show
Expand Down
3 changes: 3 additions & 0 deletions pyperformance/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,5 +242,8 @@ def get_pyperf_opts(options):
opts.append('--min-time=%s' % options.min_time)
if options.timeout:
opts.append('--timeout=%s' % options.timeout)
if options.hook:
for hook in options.hook:
opts.append('--hook=%s' % hook)

return opts
Loading