Skip to content

Commit 21311b0

Browse files
authored
fix(ci-insights): Always fill test metrics from reports (#347)
The goal of this change is to ensure we are always filling metrics from a test report. Before this, an early-return skipped the recording for the setup and teardown phase leading to incomplete metrics (total_duration, etc.)
1 parent 481fbf3 commit 21311b0

File tree

3 files changed

+41
-33
lines changed

3 files changed

+41
-33
lines changed

pytest_mergify/__init__.py

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -352,6 +352,9 @@ def pytest_exception_interact(
352352
)
353353

354354
def pytest_runtest_logreport(self, report: _pytest.reports.TestReport) -> None:
355+
if self.mergify_ci.flaky_detector:
356+
self.mergify_ci.flaky_detector.try_fill_metrics_from_report(report)
357+
355358
if self.tracer is None:
356359
return
357360

@@ -377,15 +380,13 @@ def pytest_runtest_logreport(self, report: _pytest.reports.TestReport) -> None:
377380
}
378381
)
379382

380-
if not self.mergify_ci.flaky_detector:
381-
return
382-
383-
if not self.mergify_ci.flaky_detector.try_fill_metrics_from_report(report):
384-
return
385-
386-
test_span.set_attributes({"cicd.test.flaky_detection": True})
387-
if self.mergify_ci.flaky_detector.mode == "new":
388-
test_span.set_attributes({"cicd.test.new": True})
383+
if (
384+
self.mergify_ci.flaky_detector
385+
and self.mergify_ci.flaky_detector.is_rerunning_test(report.nodeid)
386+
):
387+
test_span.set_attributes({"cicd.test.flaky_detection": True})
388+
if self.mergify_ci.flaky_detector.mode == "new":
389+
test_span.set_attributes({"cicd.test.new": True})
389390

390391

391392
def pytest_addoption(parser: _pytest.config.argparsing.Parser) -> None:

pytest_mergify/flaky_detection.py

Lines changed: 20 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -179,23 +179,31 @@ def _fetch_context(self) -> _FlakyDetectionContext:
179179

180180
return result
181181

182-
def try_fill_metrics_from_report(self, report: _pytest.reports.TestReport) -> bool:
183-
if report.outcome not in ["failed", "passed", "rerun"]:
184-
return False
185-
182+
def try_fill_metrics_from_report(self, report: _pytest.reports.TestReport) -> None:
186183
test = report.nodeid
187184

185+
if report.outcome == "skipped":
186+
# Remove metrics for skipped tests. Setup phase may have passed and
187+
# initialized metrics before call phase was skipped.
188+
self._test_metrics.pop(test, None)
189+
return
190+
188191
if test not in self._tests_to_process:
189-
return False
192+
return
190193

191194
if len(test) > self._context.max_test_name_length:
192195
self._over_length_tests.add(test)
193-
return False
196+
return
197+
198+
if test not in self._test_metrics:
199+
if report.when != "setup":
200+
# Metrics have been removed (e.g. for a skipped test), do nothing.
201+
return
194202

195-
metrics = self._test_metrics.setdefault(test, _TestMetrics())
196-
metrics.fill_from_report(report)
203+
# Initialize metrics after setup phase.
204+
self._test_metrics[test] = _TestMetrics()
197205

198-
return True
206+
self._test_metrics[test].fill_from_report(report)
199207

200208
def prepare_for_session(self, session: _pytest.main.Session) -> None:
201209
tests_in_session = {item.nodeid for item in session.items}
@@ -242,7 +250,9 @@ def is_test_too_slow(self, test: str) -> bool:
242250
)
243251

244252
def is_rerunning_test(self, test: str) -> bool:
245-
return test in self._test_metrics
253+
return (
254+
metrics := self._test_metrics.get(test)
255+
) is not None and metrics.rerun_count >= 1
246256

247257
def is_last_rerun_for_test(self, test: str) -> bool:
248258
metrics = self._test_metrics[test]

tests/test_flaky_detection.py

Lines changed: 11 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import freezegun
77
import pytest
88

9+
import pytest_mergify
910
from pytest_mergify import flaky_detection
1011

1112
_NOW = datetime.datetime(
@@ -73,23 +74,19 @@ def make_report(
7374
detector._context = _make_flaky_detection_context(max_test_name_length=100)
7475
detector._tests_to_process = ["foo"]
7576

76-
detector.try_fill_metrics_from_report(
77-
make_report(nodeid="foo", when="setup", duration=1)
78-
)
79-
detector.try_fill_metrics_from_report(
80-
make_report(nodeid="foo", when="call", duration=2)
81-
)
82-
detector.try_fill_metrics_from_report(
77+
plugin = pytest_mergify.PytestMergify()
78+
plugin.mergify_ci = pytest_mergify.ci_insights.MergifyCIInsights()
79+
plugin.mergify_ci.flaky_detector = detector
80+
81+
plugin.pytest_runtest_logreport(make_report(nodeid="foo", when="setup", duration=1))
82+
plugin.pytest_runtest_logreport(make_report(nodeid="foo", when="call", duration=2))
83+
plugin.pytest_runtest_logreport(
8384
make_report(nodeid="foo", when="teardown", duration=3)
8485
)
8586

86-
detector.try_fill_metrics_from_report(
87-
make_report(nodeid="foo", when="setup", duration=4)
88-
)
89-
detector.try_fill_metrics_from_report(
90-
make_report(nodeid="foo", when="call", duration=5)
91-
)
92-
detector.try_fill_metrics_from_report(
87+
plugin.pytest_runtest_logreport(make_report(nodeid="foo", when="setup", duration=4))
88+
plugin.pytest_runtest_logreport(make_report(nodeid="foo", when="call", duration=5))
89+
plugin.pytest_runtest_logreport(
9390
make_report(nodeid="foo", when="teardown", duration=6)
9491
)
9592

0 commit comments

Comments
 (0)