Skip to content

Commit 04094f7

Browse files
ydshiehnicoddemus
andauthored
Fix skipTest output inside TestCase.subTest (#169)
Previously the output when using `skipTest` inside `TestCase.subTest` was not correct, missing the skipped tests entirely (see #169 for example). --------- Co-authored-by: ydshieh <[email protected]> Co-authored-by: Bruno Oliveira <[email protected]>
1 parent 3933305 commit 04094f7

File tree

4 files changed

+208
-6
lines changed

4 files changed

+208
-6
lines changed

CHANGELOG.rst

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,12 @@ CHANGELOG
44
UNRELEASED
55
----------
66

7-
* Fix `pytest` requirement to `>=7.3` (`#159`_).
7+
* Fix output when using ``TestCase.skipTest`` (`#169`_).
8+
9+
* Fix ``pytest`` requirement to ``>=7.3`` (`#159`_).
810

911
.. _#159: https://github.com/pytest-dev/pytest-subtests/issues/159
12+
.. _#169: https://github.com/pytest-dev/pytest-subtests/pull/169
1013

1114
0.13.1 (2024-07-16)
1215
-------------------

src/pytest_subtests/plugin.py

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,29 @@ def _from_test_report(cls, test_report: TestReport) -> SubTestReport:
9898
return super()._from_json(test_report._to_json())
9999

100100

101+
def _addSkip(self: TestCaseFunction, testcase: TestCase, reason: str) -> None:
102+
from unittest.case import _SubTest # type: ignore[attr-defined]
103+
104+
if isinstance(testcase, _SubTest):
105+
self._originaladdSkip(testcase, reason) # type: ignore[attr-defined]
106+
if self._excinfo is not None:
107+
exc_info = self._excinfo[-1]
108+
self.addSubTest(testcase.test_case, testcase, exc_info) # type: ignore[attr-defined]
109+
else:
110+
# For python < 3.11: the non-subtest skips have to be added by `_originaladdSkip` only after all subtest
111+
# failures are processed by `_addSubTest`.
112+
if sys.version_info < (3, 11):
113+
subtest_errors = [
114+
x
115+
for x, y in self.instance._outcome.errors
116+
if isinstance(x, _SubTest) and y is not None
117+
]
118+
if len(subtest_errors) == 0:
119+
self._originaladdSkip(testcase, reason) # type: ignore[attr-defined]
120+
else:
121+
self._originaladdSkip(testcase, reason) # type: ignore[attr-defined]
122+
123+
101124
def _addSubTest(
102125
self: TestCaseFunction,
103126
test_case: Any,
@@ -122,10 +145,41 @@ def _addSubTest(
122145
node=self, call=call_info, report=sub_report
123146
)
124147

148+
# For python < 3.11: add non-subtest skips once all subtest failures are processed by # `_addSubTest`.
149+
if sys.version_info < (3, 11):
150+
from unittest.case import _SubTest # type: ignore[attr-defined]
151+
152+
non_subtest_skip = [
153+
(x, y)
154+
for x, y in self.instance._outcome.skipped
155+
if not isinstance(x, _SubTest)
156+
]
157+
subtest_errors = [
158+
(x, y)
159+
for x, y in self.instance._outcome.errors
160+
if isinstance(x, _SubTest) and y is not None
161+
]
162+
# Check if we have non-subtest skips: if there are also sub failures, non-subtest skips are not treated in
163+
# `_addSubTest` and have to be added using `_originaladdSkip` after all subtest failures are processed.
164+
if len(non_subtest_skip) > 0 and len(subtest_errors) > 0:
165+
# Make sure we have processed the last subtest failure
166+
last_subset_error = subtest_errors[-1]
167+
if exc_info is last_subset_error[-1]:
168+
# Add non-subtest skips (as they could not be treated in `_addSkip`)
169+
for testcase, reason in non_subtest_skip:
170+
self._originaladdSkip(testcase, reason) # type: ignore[attr-defined]
171+
125172

126173
def pytest_configure(config: pytest.Config) -> None:
127174
TestCaseFunction.addSubTest = _addSubTest # type: ignore[attr-defined]
128175
TestCaseFunction.failfast = False # type: ignore[attr-defined]
176+
# This condition is to prevent `TestCaseFunction._originaladdSkip` being assigned again in a subprocess from a
177+
# parent python process where `addSkip` is already `_addSkip`. A such case is when running tests in
178+
# `test_subtests.py` where `pytester.runpytest` is used. Without this guard condition, `_originaladdSkip` is
179+
# assigned to `_addSkip` which is wrong as well as causing an infinite recursion in some cases.
180+
if not hasattr(TestCaseFunction, "_originaladdSkip"):
181+
TestCaseFunction._originaladdSkip = TestCaseFunction.addSkip # type: ignore[attr-defined]
182+
TestCaseFunction.addSkip = _addSkip # type: ignore[method-assign]
129183

130184
# Hack (#86): the terminal does not know about the "subtests"
131185
# status, so it will by default turn the output to yellow.
@@ -154,6 +208,9 @@ def pytest_unconfigure() -> None:
154208
del TestCaseFunction.addSubTest
155209
if hasattr(TestCaseFunction, "failfast"):
156210
del TestCaseFunction.failfast
211+
if hasattr(TestCaseFunction, "_originaladdSkip"):
212+
TestCaseFunction.addSkip = TestCaseFunction._originaladdSkip # type: ignore[method-assign]
213+
del TestCaseFunction._originaladdSkip
157214

158215

159216
@pytest.fixture

tests/test_subtests.py

Lines changed: 147 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -340,6 +340,153 @@ def test_foo(self):
340340
["collected 1 item", "* 3 xfailed, 1 passed in *"]
341341
)
342342

343+
@pytest.mark.parametrize("runner", ["unittest", "pytest-normal", "pytest-xdist"])
344+
def test_skip_with_failure(
345+
self,
346+
pytester: pytest.Pytester,
347+
monkeypatch: pytest.MonkeyPatch,
348+
runner: Literal["unittest", "pytest-normal", "pytest-xdist"],
349+
) -> None:
350+
monkeypatch.setenv("COLUMNS", "200")
351+
p = pytester.makepyfile(
352+
"""
353+
import pytest
354+
from unittest import expectedFailure, TestCase, main
355+
356+
class T(TestCase):
357+
def test_foo(self):
358+
for i in range(10):
359+
with self.subTest("custom message", i=i):
360+
if i < 4:
361+
self.skipTest(f"skip subtest i={i}")
362+
assert i < 4
363+
364+
if __name__ == '__main__':
365+
main()
366+
"""
367+
)
368+
if runner == "unittest":
369+
result = pytester.runpython(p)
370+
if sys.version_info < (3, 11):
371+
result.stderr.re_match_lines(
372+
[
373+
"FAIL: test_foo \(__main__\.T\) \[custom message\] \(i=4\).*",
374+
"FAIL: test_foo \(__main__\.T\) \[custom message\] \(i=9\).*",
375+
"Ran 1 test in .*",
376+
"FAILED \(failures=6, skipped=4\)",
377+
]
378+
)
379+
else:
380+
result.stderr.re_match_lines(
381+
[
382+
"FAIL: test_foo \(__main__\.T\.test_foo\) \[custom message\] \(i=4\).*",
383+
"FAIL: test_foo \(__main__\.T\.test_foo\) \[custom message\] \(i=9\).*",
384+
"Ran 1 test in .*",
385+
"FAILED \(failures=6, skipped=4\)",
386+
]
387+
)
388+
elif runner == "pytest-normal":
389+
result = pytester.runpytest(p, "-v", "-rsf")
390+
result.stdout.re_match_lines(
391+
[
392+
r"test_skip_with_failure.py::T::test_foo \[custom message\] \(i=0\) SUBSKIP \(skip subtest i=0\) .*",
393+
r"test_skip_with_failure.py::T::test_foo \[custom message\] \(i=3\) SUBSKIP \(skip subtest i=3\) .*",
394+
r"test_skip_with_failure.py::T::test_foo \[custom message\] \(i=4\) SUBFAIL .*",
395+
r"test_skip_with_failure.py::T::test_foo \[custom message\] \(i=9\) SUBFAIL .*",
396+
"test_skip_with_failure.py::T::test_foo PASSED .*",
397+
"[custom message] (i=0) SUBSKIP [1] test_skip_with_failure.py:5: skip subtest i=0",
398+
"[custom message] (i=0) SUBSKIP [1] test_skip_with_failure.py:5: skip subtest i=3",
399+
"[custom message] (i=4) SUBFAIL test_skip_with_failure.py::T::test_foo - AssertionError: assert 4 < 4",
400+
"[custom message] (i=9) SUBFAIL test_skip_with_failure.py::T::test_foo - AssertionError: assert 9 < 4",
401+
".* 6 failed, 1 passed, 4 skipped in .*",
402+
]
403+
)
404+
else:
405+
pytest.xfail("Not producing the expected results (#5)")
406+
result = pytester.runpytest(p) # type:ignore[unreachable]
407+
result.stdout.fnmatch_lines(
408+
["collected 1 item", "* 3 skipped, 1 passed in *"]
409+
)
410+
411+
@pytest.mark.parametrize("runner", ["unittest", "pytest-normal", "pytest-xdist"])
412+
def test_skip_with_failure_and_non_subskip(
413+
self,
414+
pytester: pytest.Pytester,
415+
monkeypatch: pytest.MonkeyPatch,
416+
runner: Literal["unittest", "pytest-normal", "pytest-xdist"],
417+
) -> None:
418+
monkeypatch.setenv("COLUMNS", "200")
419+
p = pytester.makepyfile(
420+
"""
421+
import pytest
422+
from unittest import expectedFailure, TestCase, main
423+
424+
class T(TestCase):
425+
def test_foo(self):
426+
for i in range(10):
427+
with self.subTest("custom message", i=i):
428+
if i < 4:
429+
self.skipTest(f"skip subtest i={i}")
430+
assert i < 4
431+
self.skipTest(f"skip the test")
432+
433+
if __name__ == '__main__':
434+
main()
435+
"""
436+
)
437+
if runner == "unittest":
438+
result = pytester.runpython(p)
439+
if sys.version_info < (3, 11):
440+
result.stderr.re_match_lines(
441+
[
442+
"FAIL: test_foo \(__main__\.T\) \[custom message\] \(i=4\).*",
443+
"FAIL: test_foo \(__main__\.T\) \[custom message\] \(i=9\).*",
444+
"Ran 1 test in .*",
445+
"FAILED \(failures=6, skipped=5\)",
446+
]
447+
)
448+
else:
449+
result.stderr.re_match_lines(
450+
[
451+
"FAIL: test_foo \(__main__\.T\.test_foo\) \[custom message\] \(i=4\).*",
452+
"FAIL: test_foo \(__main__\.T\.test_foo\) \[custom message\] \(i=9\).*",
453+
"Ran 1 test in .*",
454+
"FAILED \(failures=6, skipped=5\)",
455+
]
456+
)
457+
elif runner == "pytest-normal":
458+
result = pytester.runpytest(p, "-v", "-rsf")
459+
# The `(i=0)` is not correct but it's given by pytest `TerminalReporter` without `--no-fold-skipped`
460+
result.stdout.re_match_lines(
461+
[
462+
r"test_skip_with_failure_and_non_subskip.py::T::test_foo \[custom message\] \(i=4\) SUBFAIL .*",
463+
r"test_skip_with_failure_and_non_subskip.py::T::test_foo SKIPPED \(skip the test\)",
464+
r"\[custom message\] \(i=0\) SUBSKIP \[1\] test_skip_with_failure_and_non_subskip.py:5: skip subtest i=3",
465+
r"\[custom message\] \(i=0\) SUBSKIP \[1\] test_skip_with_failure_and_non_subskip.py:5: skip the test",
466+
r"\[custom message\] \(i=4\) SUBFAIL test_skip_with_failure_and_non_subskip.py::T::test_foo",
467+
r".* 6 failed, 5 skipped in .*",
468+
]
469+
)
470+
# check with `--no-fold-skipped` (which gives the correct information)
471+
if sys.version_info >= (3, 10):
472+
result = pytester.runpytest(p, "-v", "--no-fold-skipped", "-rsf")
473+
result.stdout.re_match_lines(
474+
[
475+
r"test_skip_with_failure_and_non_subskip.py::T::test_foo \[custom message\] \(i=4\) SUBFAIL .*",
476+
r"test_skip_with_failure_and_non_subskip.py::T::test_foo SKIPPED \(skip the test\).*",
477+
r"\[custom message\] \(i=3\) SUBSKIP test_skip_with_failure_and_non_subskip.py::T::test_foo - Skipped: skip subtest i=3",
478+
r"SKIPPED test_skip_with_failure_and_non_subskip.py::T::test_foo - Skipped: skip the test",
479+
r"\[custom message\] \(i=4\) SUBFAIL test_skip_with_failure_and_non_subskip.py::T::test_foo",
480+
r".* 6 failed, 5 skipped in .*",
481+
]
482+
)
483+
else:
484+
pytest.xfail("Not producing the expected results (#5)")
485+
result = pytester.runpytest(p) # type:ignore[unreachable]
486+
result.stdout.fnmatch_lines(
487+
["collected 1 item", "* 3 skipped, 1 passed in *"]
488+
)
489+
343490

344491
class TestCapture:
345492
def create_file(self, pytester: pytest.Pytester) -> None:

tox.ini

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,6 @@
22
envlist = py38,py39,py310,py311,py312
33

44
[testenv]
5-
passenv =
6-
USER
7-
USERNAME
8-
TRAVIS
9-
PYTEST_ADDOPTS
105
deps =
116
pytest-xdist>=3.3.0
127

0 commit comments

Comments
 (0)