Skip to content

Commit d497f9b

Browse files
committed
Add some basic tests for the lexer
1 parent 1c300be commit d497f9b

File tree

1 file changed

+143
-0
lines changed

1 file changed

+143
-0
lines changed
Lines changed: 143 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,145 @@
1+
"""
2+
JSON Schema Lexer tests
3+
"""
4+
5+
import pytest
6+
7+
from pygments.token import Token
8+
from jsonschema_lexer.lexer import JSONSchemaLexer
9+
10+
# Test helpers.
11+
12+
@pytest.fixture
13+
def lexer():
14+
yield JSONSchemaLexer()
15+
16+
17+
@pytest.fixture
18+
def data_types():
19+
yield JSONSchemaLexer().parsed_data_types
20+
21+
22+
@pytest.fixture
23+
def keywords():
24+
yield JSONSchemaLexer().parsed_keywords
25+
26+
27+
def assert_single_token(lexer, s, token):
28+
"""Show that a given string generates only one token."""
29+
tokens = list(lexer.get_tokens_unprocessed(s))
30+
assert len(tokens) == 1 and s==tokens[0][2] and token==tokens[0][1]
31+
32+
33+
def assert_tokens(lexer, string, expected_tokens):
34+
"""Show that a given string generates the expected tokens."""
35+
tokens = list(lexer.get_tokens_unprocessed(string))
36+
parsed_tokens = [t[1] for t in tokens]
37+
assert parsed_tokens == expected_tokens
38+
39+
40+
# Tests
41+
142
def test_it_imports():
243
import jsonschema_lexer # noqa: F401
44+
45+
46+
def test_data_type_tokens(lexer, data_types):
47+
for data_type in data_types:
48+
assert_single_token(lexer, data_type, Token.Name.Decorator)
49+
50+
51+
def test_keyword_tokens(lexer, keywords):
52+
for keyword in keywords:
53+
sample_json_schema = f"""
54+
{{
55+
{keyword}:"test"
56+
}}
57+
""".strip()
58+
assert_tokens(
59+
lexer,
60+
sample_json_schema,
61+
[
62+
Token.Punctuation,
63+
Token.Text.Whitespace,
64+
Token.Keyword,
65+
Token.Punctuation,
66+
Token.Literal.String.Double,
67+
Token.Text.Whitespace,
68+
Token.Punctuation,
69+
],
70+
)
71+
72+
73+
def test_nested_json_schema(lexer):
74+
test_json_schema = """
75+
{
76+
"$schema": "https://json-schema.org/draft/2020-12/schema",
77+
"title": "Product",
78+
"description": "A product from Acme's catalog",
79+
"type": "object",
80+
"properties": {
81+
"productId": {
82+
"description": "The unique identifier for a product",
83+
"type": "integer"
84+
}
85+
}
86+
}
87+
""".strip()
88+
assert_tokens(
89+
lexer,
90+
test_json_schema,
91+
[
92+
Token.Punctuation,
93+
Token.Text.Whitespace,
94+
Token.Keyword,
95+
Token.Punctuation,
96+
Token.Text.Whitespace,
97+
Token.Literal.String.Double,
98+
Token.Punctuation,
99+
Token.Text.Whitespace,
100+
Token.Keyword,
101+
Token.Punctuation,
102+
Token.Text.Whitespace,
103+
Token.Literal.String.Double,
104+
Token.Punctuation,
105+
Token.Text.Whitespace,
106+
Token.Keyword,
107+
Token.Punctuation,
108+
Token.Text.Whitespace,
109+
Token.Literal.String.Double,
110+
Token.Punctuation,
111+
Token.Text.Whitespace,
112+
Token.Keyword,
113+
Token.Punctuation,
114+
Token.Text.Whitespace,
115+
Token.Name.Decorator,
116+
Token.Punctuation,
117+
Token.Text.Whitespace,
118+
Token.Keyword,
119+
Token.Punctuation,
120+
Token.Text.Whitespace,
121+
Token.Punctuation,
122+
Token.Text.Whitespace,
123+
Token.Name.Tag,
124+
Token.Punctuation,
125+
Token.Text.Whitespace,
126+
Token.Punctuation,
127+
Token.Text.Whitespace,
128+
Token.Keyword,
129+
Token.Punctuation,
130+
Token.Text.Whitespace,
131+
Token.Literal.String.Double,
132+
Token.Punctuation,
133+
Token.Text.Whitespace,
134+
Token.Keyword,
135+
Token.Punctuation,
136+
Token.Text.Whitespace,
137+
Token.Name.Decorator,
138+
Token.Text.Whitespace,
139+
Token.Punctuation,
140+
Token.Text.Whitespace,
141+
Token.Punctuation,
142+
Token.Text.Whitespace,
143+
Token.Punctuation,
144+
],
145+
)

0 commit comments

Comments
 (0)