Skip to content

Commit 291b7bb

Browse files
committed
Merge branch 'develop' into doc
2 parents b945b95 + 6835cf4 commit 291b7bb

8 files changed

+101
-76
lines changed

CHANGES.rst

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,13 @@ Bug fixes and minor changes
99

1010
+ `#40`_: add logging.
1111
+ `#50`_, `#51`_: test suite incompatibility with pytest 6.2.0.
12+
+ `#58`_: declare the type of automark_dependency ini-option correctly
13+
as bool.
1214

1315
.. _#40: https://github.com/RKrahl/pytest-dependency/issues/40
1416
.. _#50: https://github.com/RKrahl/pytest-dependency/issues/50
1517
.. _#51: https://github.com/RKrahl/pytest-dependency/pull/51
18+
.. _#58: https://github.com/RKrahl/pytest-dependency/pull/58
1619

1720
0.5.1 (2020-02-14)
1821
~~~~~~~~~~~~~~~~~~

src/pytest_dependency.py

Lines changed: 2 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -11,20 +11,6 @@
1111
_ignore_unknown = False
1212

1313

14-
def _get_bool(value):
15-
"""Evaluate string representation of a boolean value.
16-
"""
17-
if value:
18-
if value.lower() in ["0", "no", "n", "false", "f", "off"]:
19-
return False
20-
elif value.lower() in ["1", "yes", "y", "true", "t", "on"]:
21-
return True
22-
else:
23-
raise ValueError("Invalid truth value '%s'" % value)
24-
else:
25-
return False
26-
27-
2814
class DependencyItemStatus(object):
2915
"""Status of a test item in a dependency manager.
3016
"""
@@ -142,15 +128,15 @@ def depends(request, other, scope='module'):
142128
def pytest_addoption(parser):
143129
parser.addini("automark_dependency",
144130
"Add the dependency marker to all tests automatically",
145-
default=False)
131+
type="bool", default=False)
146132
parser.addoption("--ignore-unknown-dependency",
147133
action="store_true", default=False,
148134
help="ignore dependencies whose outcome is not known")
149135

150136

151137
def pytest_configure(config):
152138
global _automark, _ignore_unknown
153-
_automark = _get_bool(config.getini("automark_dependency"))
139+
_automark = config.getini("automark_dependency")
154140
_ignore_unknown = config.getoption("--ignore-unknown-dependency")
155141
config.addinivalue_line("markers",
156142
"dependency(name=None, depends=[]): "

tests/conftest.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,10 @@
1-
import packaging.version
21
from pathlib import Path
32
import pytest
43

54
pytest_plugins = "pytester"
65

76
example_dir = (Path(__file__).parent / "../doc/examples").resolve()
87

9-
def require_pytest_version(minversion):
10-
pytest_version = packaging.version.parse(pytest.__version__)
11-
if pytest_version < packaging.version.parse(minversion):
12-
pytest.skip("need pytest version %s or newer" % minversion,
13-
allow_module_level=True)
14-
158
def get_example(fname):
169
path = example_dir / fname
1710
assert path.is_file()

tests/test_04_automark.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,10 @@ def test_b():
2929
""")
3030

3131

32-
def test_set_false(ctestdir):
32+
@pytest.mark.parametrize(
33+
"false_value", ["0", "no", "n", "False", "false", "f", "off"]
34+
)
35+
def test_set_false(ctestdir, false_value):
3336
"""A pytest.ini is present, automark_dependency is set to false.
3437
3538
Since automark_dependency is set to false and test_a is not
@@ -38,9 +41,9 @@ def test_set_false(ctestdir):
3841
"""
3942
ctestdir.makefile('.ini', pytest="""
4043
[pytest]
41-
automark_dependency = false
44+
automark_dependency = %s
4245
console_output_style = classic
43-
""")
46+
""" % false_value)
4447
ctestdir.makepyfile("""
4548
import pytest
4649
@@ -59,7 +62,10 @@ def test_b():
5962
""")
6063

6164

62-
def test_set_true(ctestdir):
65+
@pytest.mark.parametrize(
66+
"true_value", ["1", "yes", "y", "True", "true", "t", "on"]
67+
)
68+
def test_set_true(ctestdir, true_value):
6369
"""A pytest.ini is present, automark_dependency is set to false.
6470
6571
Since automark_dependency is set to true, the outcome of test_a
@@ -68,9 +74,9 @@ def test_set_true(ctestdir):
6874
"""
6975
ctestdir.makefile('.ini', pytest="""
7076
[pytest]
71-
automark_dependency = true
77+
automark_dependency = %s
7278
console_output_style = classic
73-
""")
79+
""" % true_value)
7480
ctestdir.makepyfile("""
7581
import pytest
7682

tests/test_09_examples_advanced.py

Lines changed: 29 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,7 @@
22
"""
33

44
import pytest
5-
from conftest import get_example, require_pytest_version
6-
7-
require_pytest_version("4.2.0")
5+
from conftest import get_example
86

97

108
def test_dyn_parametrized(ctestdir):
@@ -13,7 +11,10 @@ def test_dyn_parametrized(ctestdir):
1311
with get_example("dyn-parametrized.py").open("rt") as f:
1412
ctestdir.makepyfile(f.read())
1513
result = ctestdir.runpytest("--verbose")
16-
result.assert_outcomes(passed=11, skipped=1, failed=0, xfailed=1)
14+
try:
15+
result.assert_outcomes(passed=11, skipped=1, failed=0, xfailed=1)
16+
except TypeError:
17+
result.assert_outcomes(passed=11, skipped=1, failed=0)
1718
result.stdout.re_match_lines(r"""
1819
.*::test_child\[c0\] PASSED
1920
.*::test_child\[c1\] PASSED
@@ -22,7 +23,7 @@ def test_dyn_parametrized(ctestdir):
2223
.*::test_child\[c4\] PASSED
2324
.*::test_child\[c5\] PASSED
2425
.*::test_child\[c6\] PASSED
25-
.*::test_child\[c7\] XFAIL(?:\s+\(.*\))?
26+
.*::test_child\[c7\] (?:XFAIL(?:\s+\(.*\))?|xfail)
2627
.*::test_child\[c8\] PASSED
2728
.*::test_parent\[p0\] PASSED
2829
.*::test_parent\[p1\] PASSED
@@ -37,7 +38,10 @@ def test_group_fixture1(ctestdir):
3738
with get_example("group-fixture.py").open("rt") as f:
3839
ctestdir.makepyfile(f.read())
3940
result = ctestdir.runpytest("--verbose")
40-
result.assert_outcomes(passed=16, skipped=1, failed=0, xfailed=1)
41+
try:
42+
result.assert_outcomes(passed=16, skipped=1, failed=0, xfailed=1)
43+
except TypeError:
44+
result.assert_outcomes(passed=16, skipped=1, failed=0)
4145
result.stdout.re_match_lines(r"""
4246
.*::test_a\[1\] PASSED
4347
.*::test_b\[1\] PASSED
@@ -51,7 +55,7 @@ def test_group_fixture1(ctestdir):
5155
.*::test_b\[5\] PASSED
5256
.*::test_a\[6\] PASSED
5357
.*::test_b\[6\] PASSED
54-
.*::test_a\[7\] XFAIL(?:\s+\(.*\))?
58+
.*::test_a\[7\] (?:XFAIL(?:\s+\(.*\))?|xfail)
5559
.*::test_b\[7\] SKIPPED(?:\s+\(.*\))?
5660
.*::test_a\[8\] PASSED
5761
.*::test_b\[8\] PASSED
@@ -66,7 +70,10 @@ def test_group_fixture2(ctestdir):
6670
with get_example("group-fixture2.py").open("rt") as f:
6771
ctestdir.makepyfile(f.read())
6872
result = ctestdir.runpytest("--verbose")
69-
result.assert_outcomes(passed=24, skipped=2, failed=0, xfailed=1)
73+
try:
74+
result.assert_outcomes(passed=24, skipped=2, failed=0, xfailed=1)
75+
except TypeError:
76+
result.assert_outcomes(passed=24, skipped=2, failed=0)
7077
result.stdout.re_match_lines(r"""
7178
.*::test_a\[1\] PASSED
7279
.*::test_b\[1\] PASSED
@@ -86,7 +93,7 @@ def test_group_fixture2(ctestdir):
8693
.*::test_a\[6\] PASSED
8794
.*::test_b\[6\] PASSED
8895
.*::test_c\[6\] PASSED
89-
.*::test_a\[7\] XFAIL(?:\s+\(.*\))?
96+
.*::test_a\[7\] (?:XFAIL(?:\s+\(.*\))?|xfail)
9097
.*::test_b\[7\] SKIPPED(?:\s+\(.*\))?
9198
.*::test_c\[7\] SKIPPED(?:\s+\(.*\))?
9299
.*::test_a\[8\] PASSED
@@ -104,7 +111,10 @@ def test_all_params(ctestdir):
104111
with get_example("all_params.py").open("rt") as f:
105112
ctestdir.makepyfile(f.read())
106113
result = ctestdir.runpytest("--verbose")
107-
result.assert_outcomes(passed=20, skipped=3, failed=0, xfailed=3)
114+
try:
115+
result.assert_outcomes(passed=20, skipped=3, failed=0, xfailed=3)
116+
except TypeError:
117+
result.assert_outcomes(passed=20, skipped=3, failed=0)
108118
result.stdout.re_match_lines(r"""
109119
.*::test_a\[0\] PASSED
110120
.*::test_a\[1\] PASSED
@@ -119,18 +129,18 @@ def test_all_params(ctestdir):
119129
.*::test_a\[10\] PASSED
120130
.*::test_a\[11\] PASSED
121131
.*::test_a\[12\] PASSED
122-
.*::test_a\[13\] XFAIL(?:\s+\(.*\))?
132+
.*::test_a\[13\] (?:XFAIL(?:\s+\(.*\))?|xfail)
123133
.*::test_a\[14\] PASSED
124134
.*::test_a\[15\] PASSED
125135
.*::test_a\[16\] PASSED
126136
.*::test_b SKIPPED(?:\s+\(.*\))?
127137
.*::test_c\[0-2\] PASSED
128138
.*::test_c\[2-3\] PASSED
129139
.*::test_c\[4-4\] PASSED
130-
.*::test_c\[6-5\] XFAIL(?:\s+\(.*\))?
140+
.*::test_c\[6-5\] (?:XFAIL(?:\s+\(.*\))?|xfail)
131141
.*::test_d SKIPPED(?:\s+\(.*\))?
132142
.*::test_e\[abc\] PASSED
133-
.*::test_e\[def\] XFAIL(?:\s+\(.*\))?
143+
.*::test_e\[def\] (?:XFAIL(?:\s+\(.*\))?|xfail)
134144
.*::test_f SKIPPED(?:\s+\(.*\))?
135145
""")
136146

@@ -141,12 +151,15 @@ def test_or_dependency(ctestdir):
141151
with get_example("or_dependency.py").open("rt") as f:
142152
ctestdir.makepyfile(f.read())
143153
result = ctestdir.runpytest("--verbose")
144-
result.assert_outcomes(passed=5, skipped=1, failed=0, xfailed=2)
154+
try:
155+
result.assert_outcomes(passed=5, skipped=1, failed=0, xfailed=2)
156+
except TypeError:
157+
result.assert_outcomes(passed=5, skipped=1, failed=0)
145158
result.stdout.re_match_lines(r"""
146159
.*::test_ap PASSED
147-
.*::test_ax XFAIL(?:\s+\(.*\))?
160+
.*::test_ax (?:XFAIL(?:\s+\(.*\))?|xfail)
148161
.*::test_bp PASSED
149-
.*::test_bx XFAIL(?:\s+\(.*\))?
162+
.*::test_bx (?:XFAIL(?:\s+\(.*\))?|xfail)
150163
.*::test_c SKIPPED(?:\s+\(.*\))?
151164
.*::test_d PASSED
152165
.*::test_e PASSED

tests/test_09_examples_names.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,7 @@
22
"""
33

44
import pytest
5-
from conftest import get_example, require_pytest_version
6-
7-
require_pytest_version("4.2.0")
5+
from conftest import get_example
86

97

108
def test_nodeid(ctestdir):
@@ -13,12 +11,15 @@ def test_nodeid(ctestdir):
1311
with get_example("nodeid.py").open("rt") as f:
1412
ctestdir.makepyfile(f.read())
1513
result = ctestdir.runpytest("--verbose")
16-
result.assert_outcomes(passed=6, skipped=0, failed=0, xfailed=1)
14+
try:
15+
result.assert_outcomes(passed=6, skipped=0, failed=0, xfailed=1)
16+
except TypeError:
17+
result.assert_outcomes(passed=6, skipped=0, failed=0)
1718
result.stdout.re_match_lines(r"""
1819
.*::test_a PASSED
1920
.*::test_b\[7-True\] PASSED
2021
.*::test_b\[0-False\] PASSED
21-
.*::test_b\[-1-False\] XFAIL(?:\s+\(.*\))?
22+
.*::test_b\[-1-False\] (?:XFAIL(?:\s+\(.*\))?|xfail)
2223
.*::TestClass::test_c PASSED
2324
.*::TestClass::test_d\[order\] PASSED
2425
.*::TestClass::test_d\[disorder\] PASSED

tests/test_09_examples_scope.py

Lines changed: 17 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,7 @@
33

44
from pathlib import Path
55
import pytest
6-
from conftest import get_example, require_pytest_version
7-
8-
require_pytest_version("4.2.0")
6+
from conftest import get_example
97

108

119
def test_scope_module(ctestdir):
@@ -14,9 +12,12 @@ def test_scope_module(ctestdir):
1412
with get_example("scope_module.py").open("rt") as f:
1513
ctestdir.makepyfile(f.read())
1614
result = ctestdir.runpytest("--verbose")
17-
result.assert_outcomes(passed=2, skipped=2, failed=0, xfailed=1)
15+
try:
16+
result.assert_outcomes(passed=2, skipped=2, failed=0, xfailed=1)
17+
except TypeError:
18+
result.assert_outcomes(passed=2, skipped=2, failed=0)
1819
result.stdout.re_match_lines(r"""
19-
.*::test_a XFAIL(?:\s+\(.*\))?
20+
.*::test_a (?:XFAIL(?:\s+\(.*\))?|xfail)
2021
.*::test_b PASSED
2122
.*::test_c SKIPPED(?:\s+\(.*\))?
2223
.*::test_d PASSED
@@ -36,13 +37,16 @@ def test_scope_session(ctestdir):
3637
with (subdir / "test_mod_02.py").open("wt") as df:
3738
df.write(sf.read())
3839
result = ctestdir.runpytest("--verbose")
39-
result.assert_outcomes(passed=5, skipped=1, failed=0, xfailed=2)
40+
try:
41+
result.assert_outcomes(passed=5, skipped=1, failed=0, xfailed=2)
42+
except TypeError:
43+
result.assert_outcomes(passed=5, skipped=1, failed=0)
4044
result.stdout.re_match_lines(r"""
4145
tests/test_mod_01.py::test_a PASSED
42-
tests/test_mod_01.py::test_b XFAIL(?:\s+\(.*\))?
46+
tests/test_mod_01.py::test_b (?:XFAIL(?:\s+\(.*\))?|xfail)
4347
tests/test_mod_01.py::test_c PASSED
4448
tests/test_mod_01.py::TestClass::test_b PASSED
45-
tests/test_mod_02.py::test_a XFAIL(?:\s+\(.*\))?
49+
tests/test_mod_02.py::test_a (?:XFAIL(?:\s+\(.*\))?|xfail)
4650
tests/test_mod_02.py::test_e PASSED
4751
tests/test_mod_02.py::test_f SKIPPED(?:\s+\(.*\))?
4852
tests/test_mod_02.py::test_g PASSED
@@ -55,9 +59,12 @@ def test_scope_class(ctestdir):
5559
with get_example("scope_class.py").open("rt") as f:
5660
ctestdir.makepyfile(f.read())
5761
result = ctestdir.runpytest("--verbose")
58-
result.assert_outcomes(passed=3, skipped=2, failed=0, xfailed=1)
62+
try:
63+
result.assert_outcomes(passed=3, skipped=2, failed=0, xfailed=1)
64+
except TypeError:
65+
result.assert_outcomes(passed=3, skipped=2, failed=0)
5966
result.stdout.re_match_lines(r"""
60-
.*::test_a XFAIL(?:\s+\(.*\))?
67+
.*::test_a (?:XFAIL(?:\s+\(.*\))?|xfail)
6168
.*::TestClass1::test_b PASSED
6269
.*::TestClass2::test_a PASSED
6370
.*::TestClass2::test_c SKIPPED(?:\s+\(.*\))?

0 commit comments

Comments
 (0)