Skip to content

Commit 2ea1d47

Browse files
committed
Rearrange tests in pytest style
1 parent 42ce358 commit 2ea1d47

File tree

1 file changed

+175
-169
lines changed

1 file changed

+175
-169
lines changed

test_ipython_pygments_lexers.py

Lines changed: 175 additions & 169 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
# Copyright (c) IPython Development Team.
44
# Distributed under the terms of the Modified BSD License.
55

6-
from unittest import TestCase
76
from pygments import __version__ as pygments_version
87
from pygments.token import Token
98
from pygments.lexers import BashLexer
@@ -15,171 +14,178 @@
1514
TOKEN_WS = Token.Text.Whitespace if pyg214 else Token.Text
1615

1716

18-
class TestLexers(TestCase):
19-
"""Collection of lexers tests"""
20-
21-
def setUp(self):
22-
self.lexer = lexers.IPythonLexer()
23-
self.bash_lexer = BashLexer()
24-
25-
def testIPythonLexer(self):
26-
fragment = "!echo $HOME\n"
27-
bash_tokens = [
28-
(Token.Operator, "!"),
29-
]
30-
bash_tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
31-
ipylex_token = list(self.lexer.get_tokens(fragment))
32-
assert bash_tokens[:-1] == ipylex_token[:-1]
33-
34-
fragment_2 = "!" + fragment
35-
tokens_2 = [
36-
(Token.Operator, "!!"),
37-
] + bash_tokens[1:]
38-
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
39-
40-
fragment_2 = "\t %%!\n" + fragment[1:]
41-
tokens_2 = [
42-
(Token.Text, "\t "),
43-
(Token.Operator, "%%!"),
44-
(Token.Text, "\n"),
45-
] + bash_tokens[1:]
46-
assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
47-
48-
fragment_2 = "x = " + fragment
49-
tokens_2 = [
50-
(Token.Name, "x"),
51-
(Token.Text, " "),
52-
(Token.Operator, "="),
53-
(Token.Text, " "),
54-
] + bash_tokens
55-
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
56-
57-
fragment_2 = "x, = " + fragment
58-
tokens_2 = [
59-
(Token.Name, "x"),
60-
(Token.Punctuation, ","),
61-
(Token.Text, " "),
62-
(Token.Operator, "="),
63-
(Token.Text, " "),
64-
] + bash_tokens
65-
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
66-
67-
fragment_2 = "x, = %sx " + fragment[1:]
68-
tokens_2 = [
69-
(Token.Name, "x"),
70-
(Token.Punctuation, ","),
71-
(Token.Text, " "),
72-
(Token.Operator, "="),
73-
(Token.Text, " "),
74-
(Token.Operator, "%"),
75-
(Token.Keyword, "sx"),
76-
(TOKEN_WS, " "),
77-
] + bash_tokens[1:]
78-
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
79-
80-
fragment_2 = "f = %R function () {}\n"
81-
tokens_2 = [
82-
(Token.Name, "f"),
83-
(Token.Text, " "),
84-
(Token.Operator, "="),
85-
(Token.Text, " "),
86-
(Token.Operator, "%"),
87-
(Token.Keyword, "R"),
88-
(Token.Text, " function () {}\n"),
89-
]
90-
assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
91-
92-
fragment_2 = "\t%%xyz\n$foo\n"
93-
tokens_2 = [
94-
(Token.Text, "\t"),
95-
(Token.Operator, "%%"),
96-
(Token.Keyword, "xyz"),
97-
(Token.Text, "\n$foo\n"),
98-
]
99-
assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
100-
101-
fragment_2 = "%system?\n"
102-
tokens_2 = [
103-
(Token.Operator, "%"),
104-
(Token.Keyword, "system"),
105-
(Token.Operator, "?"),
106-
(Token.Text, "\n"),
107-
]
108-
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
109-
110-
fragment_2 = "x != y\n"
111-
tokens_2 = [
112-
(Token.Name, "x"),
113-
(Token.Text, " "),
114-
(Token.Operator, "!="),
115-
(Token.Text, " "),
116-
(Token.Name, "y"),
117-
(Token.Text, "\n"),
118-
]
119-
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
120-
121-
fragment_2 = " ?math.sin\n"
122-
tokens_2 = [
123-
(Token.Text, " "),
124-
(Token.Operator, "?"),
125-
(Token.Text, "math.sin"),
126-
(Token.Text, "\n"),
127-
]
128-
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
129-
130-
fragment = " *int*?\n"
131-
tokens = [
132-
(Token.Text, " *int*"),
133-
(Token.Operator, "?"),
134-
(Token.Text, "\n"),
135-
]
136-
assert tokens == list(self.lexer.get_tokens(fragment))
137-
138-
fragment = "%%writefile -a foo.py\nif a == b:\n pass"
139-
tokens = [
140-
(Token.Operator, "%%writefile"),
141-
(Token.Text, " -a foo.py\n"),
142-
(Token.Keyword, "if"),
143-
(Token.Text, " "),
144-
(Token.Name, "a"),
145-
(Token.Text, " "),
146-
(Token.Operator, "=="),
147-
(Token.Text, " "),
148-
(Token.Name, "b"),
149-
(Token.Punctuation, ":"),
150-
(TOKEN_WS, "\n"),
151-
(Token.Text, " "),
152-
(Token.Keyword, "pass"),
153-
(TOKEN_WS, "\n"),
154-
]
155-
assert tokens == list(self.lexer.get_tokens(fragment))
156-
157-
fragment = "%%timeit\nmath.sin(0)"
158-
tokens = [
159-
(Token.Operator, "%%timeit"),
160-
(Token.Text, "\n"),
161-
(Token.Name, "math"),
162-
(Token.Operator, "."),
163-
(Token.Name, "sin"),
164-
(Token.Punctuation, "("),
165-
(Token.Literal.Number.Integer, "0"),
166-
(Token.Punctuation, ")"),
167-
(TOKEN_WS, "\n"),
168-
]
169-
assert tokens == list(self.lexer.get_tokens(fragment))
170-
171-
fragment = "%%HTML\n<div>foo</div>"
172-
tokens = [
173-
(Token.Operator, "%%HTML"),
174-
(Token.Text, "\n"),
175-
(Token.Punctuation, "<"),
176-
(Token.Name.Tag, "div"),
177-
(Token.Punctuation, ">"),
178-
(Token.Text, "foo"),
179-
(Token.Punctuation, "<"),
180-
(Token.Punctuation, "/"),
181-
(Token.Name.Tag, "div"),
182-
(Token.Punctuation, ">"),
183-
(Token.Text, "\n"),
184-
]
185-
assert tokens == list(self.lexer.get_tokens(fragment))
17+
def test_plain_python():
18+
lexer = lexers.IPythonLexer()
19+
fragment_2 = "x != y\n"
20+
tokens_2 = [
21+
(Token.Name, "x"),
22+
(Token.Text, " "),
23+
(Token.Operator, "!="),
24+
(Token.Text, " "),
25+
(Token.Name, "y"),
26+
(Token.Text, "\n"),
27+
]
28+
assert tokens_2[:-1] == list(lexer.get_tokens(fragment_2))[:-1]
29+
30+
31+
def test_shell_commands():
32+
lexer = lexers.IPythonLexer()
33+
bash_lexer = BashLexer()
34+
fragment = "!echo $HOME\n"
35+
bash_tokens = [
36+
(Token.Operator, "!"),
37+
]
38+
bash_tokens.extend(bash_lexer.get_tokens(fragment[1:]))
39+
ipylex_token = list(lexer.get_tokens(fragment))
40+
assert bash_tokens[:-1] == ipylex_token[:-1]
41+
42+
fragment_2 = "!" + fragment
43+
tokens_2 = [
44+
(Token.Operator, "!!"),
45+
] + bash_tokens[1:]
46+
assert tokens_2[:-1] == list(lexer.get_tokens(fragment_2))[:-1]
47+
48+
fragment_2 = "\t %%!\n" + fragment[1:]
49+
tokens_2 = [
50+
(Token.Text, "\t "),
51+
(Token.Operator, "%%!"),
52+
(Token.Text, "\n"),
53+
] + bash_tokens[1:]
54+
assert tokens_2 == list(lexer.get_tokens(fragment_2))
55+
56+
fragment_2 = "x = " + fragment
57+
tokens_2 = [
58+
(Token.Name, "x"),
59+
(Token.Text, " "),
60+
(Token.Operator, "="),
61+
(Token.Text, " "),
62+
] + bash_tokens
63+
assert tokens_2[:-1] == list(lexer.get_tokens(fragment_2))[:-1]
64+
65+
fragment_2 = "x, = " + fragment
66+
tokens_2 = [
67+
(Token.Name, "x"),
68+
(Token.Punctuation, ","),
69+
(Token.Text, " "),
70+
(Token.Operator, "="),
71+
(Token.Text, " "),
72+
] + bash_tokens
73+
assert tokens_2[:-1] == list(lexer.get_tokens(fragment_2))[:-1]
74+
75+
fragment_2 = "x, = %sx " + fragment[1:]
76+
tokens_2 = [
77+
(Token.Name, "x"),
78+
(Token.Punctuation, ","),
79+
(Token.Text, " "),
80+
(Token.Operator, "="),
81+
(Token.Text, " "),
82+
(Token.Operator, "%"),
83+
(Token.Keyword, "sx"),
84+
(TOKEN_WS, " "),
85+
] + bash_tokens[1:]
86+
assert tokens_2[:-1] == list(lexer.get_tokens(fragment_2))[:-1]
87+
88+
89+
def test_magics():
90+
lexer = lexers.IPythonLexer()
91+
fragment_2 = "f = %R function () {}\n"
92+
tokens_2 = [
93+
(Token.Name, "f"),
94+
(Token.Text, " "),
95+
(Token.Operator, "="),
96+
(Token.Text, " "),
97+
(Token.Operator, "%"),
98+
(Token.Keyword, "R"),
99+
(Token.Text, " function () {}\n"),
100+
]
101+
assert tokens_2 == list(lexer.get_tokens(fragment_2))
102+
103+
fragment_2 = "%system?\n"
104+
tokens_2 = [
105+
(Token.Operator, "%"),
106+
(Token.Keyword, "system"),
107+
(Token.Operator, "?"),
108+
(Token.Text, "\n"),
109+
]
110+
assert tokens_2[:-1] == list(lexer.get_tokens(fragment_2))[:-1]
111+
112+
113+
def test_help():
114+
lexer = lexers.IPythonLexer()
115+
fragment_2 = " ?math.sin\n"
116+
tokens_2 = [
117+
(Token.Text, " "),
118+
(Token.Operator, "?"),
119+
(Token.Text, "math.sin"),
120+
(Token.Text, "\n"),
121+
]
122+
assert tokens_2[:-1] == list(lexer.get_tokens(fragment_2))[:-1]
123+
124+
fragment = " *int*?\n"
125+
tokens = [
126+
(Token.Text, " *int*"),
127+
(Token.Operator, "?"),
128+
(Token.Text, "\n"),
129+
]
130+
assert tokens == list(lexer.get_tokens(fragment))
131+
132+
133+
def test_cell_magics():
134+
lexer = lexers.IPythonLexer()
135+
fragment = "%%writefile -a foo.py\nif a == b:\n pass"
136+
tokens = [
137+
(Token.Operator, "%%writefile"),
138+
(Token.Text, " -a foo.py\n"),
139+
(Token.Keyword, "if"),
140+
(Token.Text, " "),
141+
(Token.Name, "a"),
142+
(Token.Text, " "),
143+
(Token.Operator, "=="),
144+
(Token.Text, " "),
145+
(Token.Name, "b"),
146+
(Token.Punctuation, ":"),
147+
(TOKEN_WS, "\n"),
148+
(Token.Text, " "),
149+
(Token.Keyword, "pass"),
150+
(TOKEN_WS, "\n"),
151+
]
152+
assert tokens == list(lexer.get_tokens(fragment))
153+
154+
fragment = "%%timeit\nmath.sin(0)"
155+
tokens = [
156+
(Token.Operator, "%%timeit"),
157+
(Token.Text, "\n"),
158+
(Token.Name, "math"),
159+
(Token.Operator, "."),
160+
(Token.Name, "sin"),
161+
(Token.Punctuation, "("),
162+
(Token.Literal.Number.Integer, "0"),
163+
(Token.Punctuation, ")"),
164+
(TOKEN_WS, "\n"),
165+
]
166+
assert tokens == list(lexer.get_tokens(fragment))
167+
168+
fragment = "%%HTML\n<div>foo</div>"
169+
tokens = [
170+
(Token.Operator, "%%HTML"),
171+
(Token.Text, "\n"),
172+
(Token.Punctuation, "<"),
173+
(Token.Name.Tag, "div"),
174+
(Token.Punctuation, ">"),
175+
(Token.Text, "foo"),
176+
(Token.Punctuation, "<"),
177+
(Token.Punctuation, "/"),
178+
(Token.Name.Tag, "div"),
179+
(Token.Punctuation, ">"),
180+
(Token.Text, "\n"),
181+
]
182+
assert tokens == list(lexer.get_tokens(fragment))
183+
184+
fragment_2 = "\t%%xyz\n$foo\n"
185+
tokens_2 = [
186+
(Token.Text, "\t"),
187+
(Token.Operator, "%%"),
188+
(Token.Keyword, "xyz"),
189+
(Token.Text, "\n$foo\n"),
190+
]
191+
assert tokens_2 == list(lexer.get_tokens(fragment_2))

0 commit comments

Comments
 (0)