Skip to content

Commit 47812b7

Browse files
committed
feat: add performance tests to benchmark Robocop on release
1 parent 3e1df7d commit 47812b7

File tree

12 files changed

+1844
-1979
lines changed

12 files changed

+1844
-1979
lines changed

.github/workflows/docs-check.yml

Lines changed: 0 additions & 18 deletions
This file was deleted.
Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
name: Release Check
2+
3+
on:
4+
pull_request:
5+
types: [opened, synchronize, labeled]
6+
workflow_dispatch:
7+
8+
jobs:
9+
build-docs:
10+
runs-on: ubuntu-latest
11+
# Only run on release PRs (release-please labels them with 'autorelease: pending')
12+
if: "contains(github.event.pull_request.labels.*.name, 'autorelease: pending')"
13+
steps:
14+
- uses: actions/checkout@v4
15+
- uses: astral-sh/setup-uv@v4
16+
- name: Install dependencies
17+
run: uv sync --group doc
18+
- name: Build docs
19+
run: uv run mkdocs build --clean
20+
21+
performance-tests:
22+
runs-on: ubuntu-latest
23+
# if: "contains(github.event.pull_request.labels.*.name, 'autorelease: pending')" TODO: temporary for testing
24+
steps:
25+
- uses: actions/checkout@v4
26+
- uses: astral-sh/setup-uv@v4
27+
28+
- name: Set up Python 3.13
29+
uses: actions/setup-python@v5
30+
with:
31+
python-version: 3.13
32+
33+
- name: Install dependencies
34+
run: uv sync --dev
35+
36+
- name: Resolve app versions
37+
env:
38+
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
39+
run: |
40+
echo "ROBOCOP_VERSIONS=$(gh release list --limit 4 --json tagName --jq '.[].tagName' | tr '\n' ',')" >> $GITHUB_ENV
41+
42+
- name: Run performance tests
43+
run: uv run nox -s performance > performance.log 2>&1
44+
45+
- name: Merge report and prepare for publishing
46+
run: uv run tests/performance/merge_reports.py
47+
48+
- name: Publish to job summary
49+
run: cat perf_report.md >> "$GITHUB_STEP_SUMMARY"
50+
51+
- name: Publish to PR
52+
if: github.event_name == 'pull_request'
53+
env:
54+
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
55+
run: |
56+
gh pr comment \
57+
"${{ github.event.pull_request.number }}" \
58+
--body-file perf_report.md
59+
60+
- name: Upload log artifact
61+
if: always()
62+
uses: actions/upload-artifact@v4
63+
with:
64+
name: performance.log
65+
path: performance.log

.github/workflows/tests.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ on:
44
push:
55
branches: [main]
66
pull_request:
7-
branches: [ main ]
7+
branches: [ temp ]
88

99
jobs:
1010
build:

noxfile.py

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,20 @@
44
> uv run nox -s docs
55
"""
66

7+
import os
8+
79
import nox
810

911
nox.options.default_venv_backend = "uv"
1012

1113
PYTHON_VERSIONS = ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
1214

1315
ROBOT_VERSIONS = ["robotframework==4.*", "robotframework==5.*", "robotframework==6.*", "robotframework==7.*"]
16+
ROBOCOP_VERSIONS = (
17+
[*[non_empty for non_empty in os.environ["ROBOCOP_VERSIONS"].split(",") if non_empty], "local"]
18+
if os.environ.get("ROBOCOP_VERSIONS")
19+
else ["local"]
20+
)
1421

1522

1623
@nox.session(python=PYTHON_VERSIONS) # , reuse_venv=False
@@ -52,3 +59,28 @@ def docs(session):
5259
# session.run("sphinx-build", "-a", "-E", "-b", "html", "docs", "docs/_build/")
5360
command = ["sphinx-build", "-a", "-E", "--verbose", "-b", "html", "docs/source", "docs/_build/"]
5461
session.run(*command)
62+
63+
64+
@nox.session(python=PYTHON_VERSIONS[-2])
65+
@nox.parametrize("robocop_version", ROBOCOP_VERSIONS)
66+
def performance(session: nox.Session, robocop_version: str) -> None:
67+
robocop_version = robocop_version.removeprefix("v")
68+
if not robocop_version:
69+
return
70+
if robocop_version == "local":
71+
session.run_install(
72+
"uv",
73+
"sync",
74+
f"--python={session.virtualenv.location}",
75+
env={"UV_PROJECT_ENVIRONMENT": session.virtualenv.location},
76+
)
77+
else:
78+
session.run(
79+
"uv",
80+
"add",
81+
f"robotframework-robocop=={robocop_version}",
82+
f"--python={session.virtualenv.location}",
83+
"--dev",
84+
env={"UV_PROJECT_ENVIRONMENT": session.virtualenv.location},
85+
)
86+
session.run("python", "-m", "tests.performance.generate_reports", external=True, silent=False)

pyproject.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,8 @@ dev = [
8080
"pytest-xdist>=3.6.1",
8181
"ruff==0.14.8",
8282
"pysonar",
83+
"nox>=2025.11.12",
84+
"packaging>=25.0",
8385
]
8486
doc = [
8587
"mkdocs",

tests/performance/__init__.py

Whitespace-only changes.

tests/performance/generate_reports.py

Lines changed: 62 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -16,16 +16,39 @@
1616

1717
from robocop import __version__, config
1818
from robocop.formatter.formatters import FORMATTERS
19+
from robocop.linter.utils.version_matching import Version
1920
from robocop.run import check_files, format_files
2021
from tests import working_directory
2122

2223
LINTER_TESTS_DIR = Path(__file__).parent.parent / "linter"
2324
TEST_DATA = Path(__file__).parent / "test_data"
25+
FEATURES_MIN_VER = {
26+
"cache": Version("7.1.0"),
27+
}
2428
REPORTS = {}
2529

2630

27-
def performance_report(runs: int = 100):
28-
"""Use as decorator to measure performance of a function and store results."""
31+
def is_feature_enabled(feature: str) -> bool:
32+
"""
33+
Check if the feature is enabled based on a Robocop version.
34+
35+
It is used to determine which tests should be run based on a Robocop version.
36+
"""
37+
if feature not in FEATURES_MIN_VER:
38+
return True
39+
robocop_version = Version(__version__)
40+
return FEATURES_MIN_VER[feature] <= robocop_version
41+
42+
43+
def performance_report(runs: int = 100, cut_off: int = 0):
44+
"""
45+
Use as decorator to measure performance of a function and store results.
46+
47+
Args:
48+
runs: Number of runs to take into account when calculating the average.
49+
cut_off: Number of slowest and fastest runs to exclude from the average.
50+
51+
"""
2952

3053
def decorator(func):
3154
@wraps(func)
@@ -38,18 +61,13 @@ def wrapper(*args, **kwargs):
3861
print(f"Run {run + 1} / {runs} of {func.__name__}")
3962
start = time.perf_counter()
4063
counter = func(*args, **kwargs)
41-
end = time.perf_counter()
42-
time_taken = end - start
64+
time_taken = time.perf_counter() - start
4365
run_times.append(time_taken)
4466
print(f" Execution time: {time_taken:.6f} seconds")
4567
run_times.sort()
46-
cut_off = int(runs * 0.1)
47-
if cut_off + 2 > runs:
48-
cut_off = 0
49-
if len(run_times) > 2:
50-
avg_time = sum(run_times[cut_off:-cut_off]) / (len(run_times) - 2 * cut_off)
51-
else:
52-
avg_time = sum(run_times) / len(run_times)
68+
if cut_off:
69+
run_times = run_times[cut_off:-cut_off]
70+
avg_time = sum(run_times) / len(run_times)
5371
print(f"Mean average execution time over {runs} runs: {avg_time:.6f} seconds")
5472
if report_name:
5573
if func.__name__ not in REPORTS:
@@ -63,7 +81,7 @@ def wrapper(*args, **kwargs):
6381
return decorator
6482

6583

66-
@performance_report(runs=50)
84+
@performance_report(runs=10, cut_off=2)
6785
def project_traversing_report() -> int:
6886
"""
6987
Measure how long it takes to traverse Robocop repository files.
@@ -90,17 +108,17 @@ def project_traversing_report() -> int:
90108
return files_count
91109

92110

93-
@performance_report(runs=50)
94-
def formatter_report(formatter: str, report_name: str, cache: bool = True) -> int: # noqa: ARG001
111+
@performance_report(runs=10, cut_off=2)
112+
def formatter_report(formatter: str, report_name: str, **kwargs) -> int: # noqa: ARG001
95113
main_dir = Path(__file__).parent.parent.parent
96114
formatter_dir = main_dir / "tests" / "formatter" / "formatters" / formatter
97115
with working_directory(formatter_dir):
98-
format_files(["source"], select=[formatter], overwrite=False, return_result=True, silent=True, cache=cache)
116+
format_files(["source"], select=[formatter], overwrite=False, return_result=True, silent=True, **kwargs)
99117
source_dir = formatter_dir / "source"
100118
return len(list(source_dir.iterdir()))
101119

102120

103-
@performance_report(runs=10)
121+
@performance_report(runs=5)
104122
def linter_report(report_name: str, **kwargs) -> int: # noqa: ARG001
105123
main_dir = Path(__file__).parent.parent.parent
106124
linter_dir = main_dir / "tests" / "linter"
@@ -109,10 +127,13 @@ def linter_report(report_name: str, **kwargs) -> int: # noqa: ARG001
109127
return len(list(linter_dir.glob("**/*.robot")))
110128

111129

112-
@performance_report(runs=2)
130+
@performance_report(runs=1)
113131
def lint_large_file(report_name: str, lint_dir: Path, **kwargs) -> int: # noqa: ARG001
114132
with working_directory(lint_dir):
115-
check_files(return_result=True, select=["ALL"], cache=False, **kwargs)
133+
if is_feature_enabled("cache"):
134+
check_files(return_result=True, select=["ALL"], cache=False, **kwargs)
135+
else:
136+
check_files(return_result=True, select=["ALL"], **kwargs)
116137
return 1
117138

118139

@@ -135,30 +156,34 @@ def generate_large_file(template_path: Path, output_dir: Path) -> None:
135156
f.write(rendered_content)
136157

137158

138-
if __name__ == "__main__":
139-
# TODO: prepare i.e. nox script to install external robocops and run this script
140-
# So we can generate reports for multiple past versions. It is important since the actual seconds change depending
141-
# on where we run the script from, but the % change between version should be comparable. Also we can use new tests
142-
# on old versions
143-
linter_report(report_name="with_print_cache", cache=True)
144-
linter_report(report_name="with_print_no_cache", cache=False)
145-
linter_report(report_name="without_print_cache", silent=True, cache=True)
146-
linter_report(report_name="without_print_no_cache", silent=True, cache=False)
147-
for formatter in FORMATTERS:
148-
formatter_report(formatter=formatter, report_name=formatter)
149-
formatter_report(formatter=formatter, report_name=f"{formatter}_no_cache", cache=False)
159+
def generate_reports() -> None:
160+
if is_feature_enabled("cache"):
161+
linter_report(report_name="with_print_cache", cache=True)
162+
linter_report(report_name="with_print_no_cache", cache=False)
163+
linter_report(report_name="without_print_cache", silent=True, cache=True)
164+
linter_report(report_name="without_print_no_cache", silent=True, cache=False)
165+
for formatter in FORMATTERS:
166+
formatter_report(formatter=formatter, report_name=formatter, cache=True)
167+
formatter_report(formatter=formatter, report_name=f"{formatter}_no_cache", cache=False)
168+
else:
169+
linter_report(report_name="with_print_no_cache")
170+
linter_report(report_name="without_print_no_cache", silent=True)
171+
for formatter in FORMATTERS:
172+
formatter_report(formatter=formatter, report_name=f"{formatter}_no_cache")
150173
project_traversing_report()
151174
with tempfile.TemporaryDirectory() as temp_dir:
152175
temp_dir = Path(temp_dir)
153176
generate_large_file(TEST_DATA / "large_file.robot", temp_dir)
154177
lint_large_file(report_name="large_file_with_print", lint_dir=temp_dir)
155178
lint_large_file(report_name="large_file_without_print", lint_dir=temp_dir, silent=True)
156179

157-
report_path = Path(__file__).parent / "reports" / f"robocop_{__version__.replace('.', '_')}.json"
158-
if report_path.exists():
159-
with open(report_path) as fp:
160-
prev_report = json.load(fp)
161-
REPORTS = merge_dictionaries(prev_report, REPORTS)
162180

163-
with open(report_path, "w") as fp:
164-
json.dump(REPORTS, fp, indent=4)
181+
if __name__ == "__main__":
182+
whole_run_start = time.perf_counter()
183+
report_path = Path(__file__).parent / "reports" / f"robocop_{__version__.replace('.', '_')}.json"
184+
if not report_path.exists():
185+
generate_reports()
186+
print(f"Generating report in {report_path}")
187+
with open(report_path, "w") as fp:
188+
json.dump(REPORTS, fp, indent=4)
189+
print(f"Took {time.perf_counter() - whole_run_start:.2f} seconds to generate report.")

0 commit comments

Comments
 (0)