Skip to content

Commit 5e7804c

Browse files
committed
Use subtests when running sanity checks.
A subtest in Python means that it will continue running the other examples (tests, cases, whatever) and show an error for each one, rather than simply stopping on the first invalid (test, case, whatever). This should make it easier for seeing what's wrong in CI runs without needing to repeatedly run each time. A further improvement would be grouping tests "horizontally" across versions rather than just within a version, as often authors are copy pasting tests across each version it applies to. With a subtest, they'll see N failures for N versions, whereas it'd be better to see 1 failure for e.g. test description which is too long across all N versions.
1 parent 8ade923 commit 5e7804c

File tree

1 file changed

+48
-33
lines changed

1 file changed

+48
-33
lines changed

bin/jsonschema_suite

Lines changed: 48 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -40,14 +40,14 @@ def files(paths):
4040
Each test file in the provided paths, as an array of test cases.
4141
"""
4242
for path in paths:
43-
yield json.loads(path.read_text())
43+
yield path, json.loads(path.read_text())
4444

4545

4646
def cases(paths):
4747
"""
4848
Each test case within each file in the provided paths.
4949
"""
50-
for test_file in files(paths):
50+
for _, test_file in files(paths):
5151
yield from test_file
5252

5353

@@ -82,50 +82,62 @@ class SanityTests(unittest.TestCase):
8282
assert cls.remote_files, "Didn't find the remote files!"
8383
print(f"Found {len(cls.remote_files)} remote files")
8484

85+
def assertUnique(self, iterable):
86+
"""
87+
Assert that the elements of an iterable are unique.
88+
"""
89+
90+
seen, duplicated = set(), set()
91+
for each in iterable:
92+
if each in seen:
93+
duplicated.add(each)
94+
seen.add(each)
95+
self.assertFalse(duplicated, "Elements are not unique.")
96+
8597
def test_all_test_files_are_valid_json(self):
8698
"""
8799
All test files contain valid JSON.
88100
"""
89101
for path in self.test_files:
90-
try:
91-
json.loads(path.read_text())
92-
except ValueError as error:
93-
self.fail(f"{path} contains invalid JSON ({error})")
102+
with self.subTest(path=path):
103+
try:
104+
json.loads(path.read_text())
105+
except ValueError as error:
106+
self.fail(f"{path} contains invalid JSON ({error})")
94107

95108
def test_all_remote_files_are_valid_json(self):
96109
"""
97110
All remote files contain valid JSON.
98111
"""
99112
for path in self.remote_files:
100-
try:
101-
json.loads(path.read_text())
102-
except ValueError as error:
103-
self.fail(f"{path} contains invalid JSON ({error})")
113+
with self.subTest(path=path):
114+
try:
115+
json.loads(path.read_text())
116+
except ValueError as error:
117+
self.fail(f"{path} contains invalid JSON ({error})")
104118

105119
def test_all_descriptions_have_reasonable_length(self):
106120
"""
107121
All tests have reasonably long descriptions.
108122
"""
109123
for count, test in enumerate(tests(self.test_files)):
110-
description = test["description"]
111-
self.assertLess(
112-
len(description),
113-
70,
114-
f"{description!r} is too long! (keep it to less than 70 chars)"
115-
)
124+
with self.subTest(description=test["description"]):
125+
self.assertLess(
126+
len(test["description"]),
127+
70,
128+
"Description is too long (keep it to less than 70 chars)."
129+
)
116130
print(f"Found {count} tests.")
117131

118132
def test_all_descriptions_are_unique(self):
119133
"""
120134
All test cases have unique test descriptions in their tests.
121135
"""
122136
for count, case in enumerate(cases(self.test_files)):
123-
descriptions = set(test["description"] for test in case["tests"])
124-
self.assertEqual(
125-
len(descriptions),
126-
len(case["tests"]),
127-
f"{case!r} contains a duplicate description",
128-
)
137+
with self.subTest(description=case["description"]):
138+
self.assertUnique(
139+
test["description"] for test in case["tests"]
140+
)
129141
print(f"Found {count} test cases.")
130142

131143
@unittest.skipIf(jsonschema is None, "Validation library not present!")
@@ -141,12 +153,14 @@ class SanityTests(unittest.TestCase):
141153
if Validator is not None:
142154
test_files = collect(version)
143155
for case in cases(test_files):
144-
try:
145-
Validator.check_schema(case["schema"])
146-
except jsonschema.SchemaError as error:
147-
self.fail(
148-
f"{case} contains an invalid schema ({error})",
149-
)
156+
with self.subTest(case=case):
157+
try:
158+
Validator.check_schema(case["schema"])
159+
except jsonschema.SchemaError:
160+
self.fail(
161+
"Found an invalid schema."
162+
"See the traceback for details on why."
163+
)
150164
else:
151165
warnings.warn(f"No schema validator for {version.name}")
152166

@@ -157,11 +171,12 @@ class SanityTests(unittest.TestCase):
157171
"""
158172
Validator = jsonschema.validators.validator_for(TESTSUITE_SCHEMA)
159173
validator = Validator(TESTSUITE_SCHEMA)
160-
for tests in files(self.test_files):
161-
try:
162-
validator.validate(tests)
163-
except jsonschema.ValidationError as error:
164-
self.fail(str(error))
174+
for path, cases in files(self.test_files):
175+
with self.subTest(path=path):
176+
try:
177+
validator.validate(cases)
178+
except jsonschema.ValidationError as error:
179+
self.fail(str(error))
165180

166181

167182
def main(arguments):

0 commit comments

Comments
 (0)