Skip to content

Commit 42ce358

Browse files
committed
Fix some tests
1 parent 9060d92 commit 42ce358

File tree

2 files changed

+16
-16
lines changed

2 files changed

+16
-16
lines changed

ipython_pygments_lexers.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -115,10 +115,6 @@
115115
r"(?s)(\s*)(%%pypy)([^\n]*\n)(.*)",
116116
bygroups(Text, Operator, Text, using(Python3Lexer)),
117117
),
118-
(
119-
r"(?s)(\s*)(%%python)([^\n]*\n)(.*)",
120-
bygroups(Text, Operator, Text, using(Python3Lexer)),
121-
),
122118
(
123119
r"(?s)(\s*)(%%python2)([^\n]*\n)(.*)",
124120
bygroups(Text, Operator, Text, using(Python2Lexer)),
@@ -127,16 +123,20 @@
127123
r"(?s)(\s*)(%%python3)([^\n]*\n)(.*)",
128124
bygroups(Text, Operator, Text, using(Python3Lexer)),
129125
),
126+
(
127+
r"(?s)(\s*)(%%python)([^\n]*\n)(.*)",
128+
bygroups(Text, Operator, Text, using(Python3Lexer)),
129+
),
130130
(
131131
r"(?s)(\s*)(%%ruby)([^\n]*\n)(.*)",
132132
bygroups(Text, Operator, Text, using(RubyLexer)),
133133
),
134134
(
135-
r"(?s)(\s*)(%%time)([^\n]*\n)(.*)",
135+
r"(?s)(\s*)(%%timeit)([^\n]*\n)(.*)",
136136
bygroups(Text, Operator, Text, using(Python3Lexer)),
137137
),
138138
(
139-
r"(?s)(\s*)(%%timeit)([^\n]*\n)(.*)",
139+
r"(?s)(\s*)(%%time)([^\n]*\n)(.*)",
140140
bygroups(Text, Operator, Text, using(Python3Lexer)),
141141
),
142142
(

test_ipython_pygments_lexers.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@
1212

1313
pyg214 = tuple(int(x) for x in pygments_version.split(".")[:2]) >= (2, 14)
1414

15+
TOKEN_WS = Token.Text.Whitespace if pyg214 else Token.Text
16+
1517

1618
class TestLexers(TestCase):
1719
"""Collection of lexers tests"""
@@ -71,10 +73,8 @@ def testIPythonLexer(self):
7173
(Token.Text, " "),
7274
(Token.Operator, "%"),
7375
(Token.Keyword, "sx"),
74-
(Token.Text, " "),
76+
(TOKEN_WS, " "),
7577
] + bash_tokens[1:]
76-
if tokens_2[7] == (Token.Text, " ") and pyg214: # pygments 2.14+
77-
tokens_2[7] = (Token.Text.Whitespace, " ")
7878
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
7979

8080
fragment_2 = "f = %R function () {}\n"
@@ -147,26 +147,26 @@ def testIPythonLexer(self):
147147
(Token.Text, " "),
148148
(Token.Name, "b"),
149149
(Token.Punctuation, ":"),
150-
(Token.Text, "\n"),
150+
(TOKEN_WS, "\n"),
151151
(Token.Text, " "),
152152
(Token.Keyword, "pass"),
153-
(Token.Text, "\n"),
153+
(TOKEN_WS, "\n"),
154154
]
155-
if tokens[10] == (Token.Text, "\n") and pyg214: # pygments 2.14+
156-
tokens[10] = (Token.Text.Whitespace, "\n")
157-
assert tokens[:-1] == list(self.lexer.get_tokens(fragment))[:-1]
155+
assert tokens == list(self.lexer.get_tokens(fragment))
158156

159157
fragment = "%%timeit\nmath.sin(0)"
160158
tokens = [
161-
(Token.Operator, "%%timeit\n"),
159+
(Token.Operator, "%%timeit"),
160+
(Token.Text, "\n"),
162161
(Token.Name, "math"),
163162
(Token.Operator, "."),
164163
(Token.Name, "sin"),
165164
(Token.Punctuation, "("),
166165
(Token.Literal.Number.Integer, "0"),
167166
(Token.Punctuation, ")"),
168-
(Token.Text, "\n"),
167+
(TOKEN_WS, "\n"),
169168
]
169+
assert tokens == list(self.lexer.get_tokens(fragment))
170170

171171
fragment = "%%HTML\n<div>foo</div>"
172172
tokens = [

0 commit comments

Comments
 (0)