Skip to content

Commit 4ed412e

Browse files
committed
unittest's unexpectedSuccess should work as non-strict xpass
Make sure tests for that behavior obtain the same return code using either pytest or unittest to run the same file
1 parent dfc659f commit 4ed412e

File tree

2 files changed

+61
-22
lines changed

2 files changed

+61
-22
lines changed

_pytest/skipping.py

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -220,6 +220,18 @@ def check_strict_xfail(pyfuncitem):
220220
pytest.fail('[XPASS(strict)] ' + explanation, pytrace=False)
221221

222222

223+
def _is_unittest_unexpected_success_a_failure():
224+
"""Return if the test suite should fail if a @expectedFailure unittest test PASSES.
225+
226+
From https://docs.python.org/3/library/unittest.html?highlight=unittest#unittest.TestResult.wasSuccessful:
227+
Changed in version 3.4: Returns False if there were any
228+
unexpectedSuccesses from tests marked with the expectedFailure() decorator.
229+
230+
TODO: this should be moved to the "compat" module.
231+
"""
232+
return sys.version_info >= (3, 4)
233+
234+
223235
@pytest.hookimpl(hookwrapper=True)
224236
def pytest_runtest_makereport(item, call):
225237
outcome = yield
@@ -228,13 +240,15 @@ def pytest_runtest_makereport(item, call):
228240
evalskip = getattr(item, '_evalskip', None)
229241
# unitttest special case, see setting of _unexpectedsuccess
230242
if hasattr(item, '_unexpectedsuccess') and rep.when == "call":
231-
# unittest treats an 'unexpected successes' as a failure
232-
# which means pytest needs to handle it like a 'xfail(strict=True)'
233-
rep.outcome = "failed"
234243
if item._unexpectedsuccess:
235244
rep.longrepr = "Unexpected success: {0}".format(item._unexpectedsuccess)
236245
else:
237246
rep.longrepr = "Unexpected success"
247+
if _is_unittest_unexpected_success_a_failure():
248+
rep.outcome = "failed"
249+
else:
250+
rep.outcome = "passed"
251+
rep.wasxfail = rep.longrepr
238252
elif item.config.option.runxfail:
239253
pass # don't interefere
240254
elif call.excinfo and call.excinfo.errisinstance(pytest.xfail.Exception):

testing/test_unittest.py

Lines changed: 44 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -419,8 +419,9 @@ def setup_class(cls):
419419
def test_method(self):
420420
pass
421421
""")
422+
from _pytest.skipping import _is_unittest_unexpected_success_a_failure
423+
should_fail = _is_unittest_unexpected_success_a_failure()
422424
result = testdir.runpytest("-rxs")
423-
assert result.ret == 0
424425
result.stdout.fnmatch_lines_random([
425426
"*XFAIL*test_trial_todo*",
426427
"*trialselfskip*",
@@ -429,8 +430,9 @@ def test_method(self):
429430
"*i2wanto*",
430431
"*sys.version_info*",
431432
"*skip_in_method*",
432-
"*4 skipped*3 xfail*1 xpass*",
433+
"*1 failed*4 skipped*3 xfailed*" if should_fail else "*4 skipped*3 xfail*1 xpass*",
433434
])
435+
assert result.ret == (1 if should_fail else 0)
434436

435437
def test_trial_error(self, testdir):
436438
testdir.makepyfile("""
@@ -587,39 +589,62 @@ def test_hello(self, arg1):
587589
assert "TypeError" in result.stdout.str()
588590
assert result.ret == 1
589591

592+
590593
@pytest.mark.skipif("sys.version_info < (2,7)")
591-
def test_unittest_expected_failure_for_failing_test_is_xfail(testdir):
592-
testdir.makepyfile("""
594+
@pytest.mark.parametrize('runner', ['pytest', 'unittest'])
595+
def test_unittest_expected_failure_for_failing_test_is_xfail(testdir, runner):
596+
script = testdir.makepyfile("""
593597
import unittest
594598
class MyTestCase(unittest.TestCase):
595599
@unittest.expectedFailure
596600
def test_failing_test_is_xfail(self):
597601
assert False
602+
if __name__ == '__main__':
603+
unittest.main()
598604
""")
599-
result = testdir.runpytest("-rxX")
600-
result.stdout.fnmatch_lines([
601-
"*XFAIL*MyTestCase*test_failing_test_is_xfail*",
602-
"*1 xfailed*",
603-
])
605+
if runner == 'pytest':
606+
result = testdir.runpytest("-rxX")
607+
result.stdout.fnmatch_lines([
608+
"*XFAIL*MyTestCase*test_failing_test_is_xfail*",
609+
"*1 xfailed*",
610+
])
611+
else:
612+
result = testdir.runpython(script)
613+
result.stderr.fnmatch_lines([
614+
"*1 test in*",
615+
"*OK*(expected failures=1)*",
616+
])
604617
assert result.ret == 0
605618

619+
606620
@pytest.mark.skipif("sys.version_info < (2,7)")
607-
def test_unittest_expected_failure_for_passing_test_is_fail(testdir):
608-
testdir.makepyfile("""
621+
@pytest.mark.parametrize('runner', ['pytest', 'unittest'])
622+
def test_unittest_expected_failure_for_passing_test_is_fail(testdir, runner):
623+
script = testdir.makepyfile("""
609624
import unittest
610625
class MyTestCase(unittest.TestCase):
611626
@unittest.expectedFailure
612627
def test_passing_test_is_fail(self):
613628
assert True
629+
if __name__ == '__main__':
630+
unittest.main()
614631
""")
615-
result = testdir.runpytest("-rxX")
616-
result.stdout.fnmatch_lines([
617-
"*FAILURES*",
618-
"*MyTestCase*test_passing_test_is_fail*",
619-
"*Unexpected success*",
620-
"*1 failed*",
621-
])
622-
assert result.ret == 1
632+
from _pytest.skipping import _is_unittest_unexpected_success_a_failure
633+
should_fail = _is_unittest_unexpected_success_a_failure()
634+
if runner == 'pytest':
635+
result = testdir.runpytest("-rxX")
636+
result.stdout.fnmatch_lines([
637+
"*MyTestCase*test_passing_test_is_fail*",
638+
"*1 failed*" if should_fail else "*1 xpassed*",
639+
])
640+
else:
641+
result = testdir.runpython(script)
642+
result.stderr.fnmatch_lines([
643+
"*1 test in*",
644+
"*(unexpected successes=1)*",
645+
])
646+
647+
assert result.ret == (1 if should_fail else 0)
623648

624649

625650
@pytest.mark.parametrize('fix_type, stmt', [

0 commit comments

Comments
 (0)