50
50
51
51
line_re = re .compile ('.*?\n ' )
52
52
53
- __all__ = ['build_ipy_lexer' , 'IPython3Lexer' , 'IPythonLexer' ,
54
- 'IPythonPartialTracebackLexer' , 'IPythonTracebackLexer' ,
55
- 'IPythonConsoleLexer' , 'IPyLexer' ]
56
-
57
-
58
- def build_ipy_lexer (python3 ):
59
- """Builds IPython lexers depending on the value of `python3`.
60
-
61
- The lexer inherits from an appropriate Python lexer and then adds
62
- information about IPython specific keywords (i.e. magic commands,
63
- shell commands, etc.)
64
-
65
- Parameters
66
- ----------
67
- python3 : bool
68
- If `True`, then build an IPython lexer from a Python 3 lexer.
69
-
70
- """
71
- # It would be nice to have a single IPython lexer class which takes
72
- # a boolean `python3`. But since there are two Python lexer classes,
73
- # we will also have two IPython lexer classes.
74
- if python3 :
75
- PyLexer = Python3Lexer
76
- name = 'IPython3'
77
- aliases = ['ipython3' ]
78
- doc = """IPython3 Lexer"""
79
- else :
80
- PyLexer = PythonLexer
81
- name = 'IPython'
82
- aliases = ['ipython2' , 'ipython' ]
83
- doc = """IPython Lexer"""
84
-
85
- ipython_tokens = [
86
- (r'(?s)(\s*)(%%capture)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
87
- (r'(?s)(\s*)(%%debug)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
88
- (r'(?is)(\s*)(%%html)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (HtmlLexer ))),
89
- (r'(?s)(\s*)(%%javascript)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (JavascriptLexer ))),
90
- (r'(?s)(\s*)(%%js)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (JavascriptLexer ))),
91
- (r'(?s)(\s*)(%%latex)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (TexLexer ))),
92
- (r'(?s)(\s*)(%%perl)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PerlLexer ))),
93
- (r'(?s)(\s*)(%%prun)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
94
- (r'(?s)(\s*)(%%pypy)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
95
- (r'(?s)(\s*)(%%python)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
96
- (r'(?s)(\s*)(%%python2)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PythonLexer ))),
97
- (r'(?s)(\s*)(%%python3)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (Python3Lexer ))),
98
- (r'(?s)(\s*)(%%ruby)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (RubyLexer ))),
99
- (r'(?s)(\s*)(%%time)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
100
- (r'(?s)(\s*)(%%timeit)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
101
- (r'(?s)(\s*)(%%writefile)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
102
- (r'(?s)(\s*)(%%file)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
103
- (r"(?s)(\s*)(%%)(\w+)(.*)" , bygroups (Text , Operator , Keyword , Text )),
104
- (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (BashLexer ))),
105
- (r"(%%?)(\w+)(\?\??)$" , bygroups (Operator , Keyword , Operator )),
106
- (r"\b(\?\??)(\s*)$" , bygroups (Operator , Text )),
107
- (r'(%)(sx|sc|system)(.*)(\n)' , bygroups (Operator , Keyword ,
108
- using (BashLexer ), Text )),
109
- (r'(%)(\w+)(.*\n)' , bygroups (Operator , Keyword , Text )),
110
- (r'^(!!)(.+)(\n)' , bygroups (Operator , using (BashLexer ), Text )),
111
- (r'(!)(?!=)(.+)(\n)' , bygroups (Operator , using (BashLexer ), Text )),
112
- (r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)' , bygroups (Text , Operator , Text )),
113
- (r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$' , bygroups (Text , Operator , Text )),
114
- ]
115
-
116
- tokens = PyLexer .tokens .copy ()
53
+ __all__ = [
54
+ 'IPython3Lexer' ,
55
+ 'IPythonLexer' ,
56
+ 'IPythonPartialTracebackLexer' ,
57
+ 'IPythonTracebackLexer' ,
58
+ 'IPythonConsoleLexer' ,
59
+ 'IPyLexer'
60
+ ]
61
+
62
+
63
+ ipython_tokens = [
64
+ (r'(?s)(\s*)(%%capture)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (Python3Lexer ))),
65
+ (r'(?s)(\s*)(%%debug)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (Python3Lexer ))),
66
+ (r'(?is)(\s*)(%%html)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (HtmlLexer ))),
67
+ (r'(?s)(\s*)(%%javascript)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (JavascriptLexer ))),
68
+ (r'(?s)(\s*)(%%js)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (JavascriptLexer ))),
69
+ (r'(?s)(\s*)(%%latex)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (TexLexer ))),
70
+ (r'(?s)(\s*)(%%perl)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PerlLexer ))),
71
+ (r'(?s)(\s*)(%%prun)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (Python3Lexer ))),
72
+ (r'(?s)(\s*)(%%pypy)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (Python3Lexer ))),
73
+ (r'(?s)(\s*)(%%python)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (Python3Lexer ))),
74
+ (r'(?s)(\s*)(%%python2)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PythonLexer ))),
75
+ (r'(?s)(\s*)(%%python3)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (Python3Lexer ))),
76
+ (r'(?s)(\s*)(%%ruby)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (RubyLexer ))),
77
+ (r'(?s)(\s*)(%%time)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (Python3Lexer ))),
78
+ (r'(?s)(\s*)(%%timeit)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (Python3Lexer ))),
79
+ (r'(?s)(\s*)(%%writefile)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (Python3Lexer ))),
80
+ (r'(?s)(\s*)(%%file)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (Python3Lexer ))),
81
+ (r"(?s)(\s*)(%%)(\w+)(.*)" , bygroups (Text , Operator , Keyword , Text )),
82
+ (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (BashLexer ))),
83
+ (r"(%%?)(\w+)(\?\??)$" , bygroups (Operator , Keyword , Operator )),
84
+ (r"\b(\?\??)(\s*)$" , bygroups (Operator , Text )),
85
+ (r'(%)(sx|sc|system)(.*)(\n)' , bygroups (Operator , Keyword ,
86
+ using (BashLexer ), Text )),
87
+ (r'(%)(\w+)(.*\n)' , bygroups (Operator , Keyword , Text )),
88
+ (r'^(!!)(.+)(\n)' , bygroups (Operator , using (BashLexer ), Text )),
89
+ (r'(!)(?!=)(.+)(\n)' , bygroups (Operator , using (BashLexer ), Text )),
90
+ (r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)' , bygroups (Text , Operator , Text )),
91
+ (r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$' , bygroups (Text , Operator , Text )),
92
+ ]
93
+
94
+
95
+ class IPython3Lexer (Python3Lexer ):
96
+ """IPython code lexer (based on Python 3)"""
97
+ name = "IPython"
98
+ aliases = ["ipython" , "ipython3" ]
99
+
100
+ tokens = Python3Lexer .tokens .copy ()
117
101
tokens ['root' ] = ipython_tokens + tokens ['root' ]
118
102
119
- attrs = {'name' : name , 'aliases' : aliases , 'filenames' : [],
120
- '__doc__' : doc , 'tokens' : tokens }
121
-
122
- return type (name , (PyLexer ,), attrs )
123
103
124
-
125
- IPython3Lexer = build_ipy_lexer (python3 = True )
126
- IPythonLexer = build_ipy_lexer (python3 = False )
104
+ IPythonLexer = IPython3Lexer
127
105
128
106
129
107
class IPythonPartialTracebackLexer (RegexLexer ):
@@ -192,7 +170,7 @@ class IPythonTracebackLexer(DelegatingLexer):
192
170
# lexer.
193
171
#
194
172
name = 'IPython Traceback'
195
- aliases = ['ipythontb' ]
173
+ aliases = ['ipythontb' , 'ipython3tb' ]
196
174
197
175
def __init__ (self , ** options ):
198
176
"""
@@ -202,18 +180,7 @@ def __init__(self, **options):
202
180
# note we need a __init__ doc, as otherwise it inherits the doc from the super class
203
181
# which will fail the documentation build as it references section of the pygments docs that
204
182
# do not exists when building IPython's docs.
205
- self .python3 = get_bool_opt (options , 'python3' , False )
206
- if self .python3 :
207
- self .aliases = ['ipython3tb' ]
208
- else :
209
- self .aliases = ['ipython2tb' , 'ipythontb' ]
210
-
211
- if self .python3 :
212
- IPyLexer = IPython3Lexer
213
- else :
214
- IPyLexer = IPythonLexer
215
-
216
- DelegatingLexer .__init__ (self , IPyLexer ,
183
+ DelegatingLexer .__init__ (self , IPython3Lexer ,
217
184
IPythonPartialTracebackLexer , ** options )
218
185
219
186
class IPythonConsoleLexer (Lexer ):
@@ -246,7 +213,7 @@ class IPythonConsoleLexer(Lexer):
246
213
247
214
"""
248
215
name = 'IPython console session'
249
- aliases = ['ipythonconsole' ]
216
+ aliases = ['ipythonconsole' , 'ipython3console' ]
250
217
mimetypes = ['text/x-ipython-console' ]
251
218
252
219
# The regexps used to determine what is input and what is output.
@@ -271,9 +238,6 @@ def __init__(self, **options):
271
238
272
239
Parameters
273
240
----------
274
- python3 : bool
275
- If `True`, then the console inputs are parsed using a Python 3
276
- lexer. Otherwise, they are parsed using a Python 2 lexer.
277
241
in1_regex : RegexObject
278
242
The compiled regular expression used to detect the start
279
243
of inputs. Although the IPython configuration setting may have a
@@ -289,12 +253,6 @@ def __init__(self, **options):
289
253
then the default output prompt is assumed.
290
254
291
255
"""
292
- self .python3 = get_bool_opt (options , 'python3' , False )
293
- if self .python3 :
294
- self .aliases = ['ipython3console' ]
295
- else :
296
- self .aliases = ['ipython2console' , 'ipythonconsole' ]
297
-
298
256
in1_regex = options .get ('in1_regex' , self .in1_regex )
299
257
in2_regex = options .get ('in2_regex' , self .in2_regex )
300
258
out_regex = options .get ('out_regex' , self .out_regex )
@@ -319,15 +277,8 @@ def __init__(self, **options):
319
277
320
278
Lexer .__init__ (self , ** options )
321
279
322
- if self .python3 :
323
- pylexer = IPython3Lexer
324
- tblexer = IPythonTracebackLexer
325
- else :
326
- pylexer = IPythonLexer
327
- tblexer = IPythonTracebackLexer
328
-
329
- self .pylexer = pylexer (** options )
330
- self .tblexer = tblexer (** options )
280
+ self .pylexer = IPython3Lexer (** options )
281
+ self .tblexer = IPythonTracebackLexer (** options )
331
282
332
283
self .reset ()
333
284
@@ -508,7 +459,7 @@ class IPyLexer(Lexer):
508
459
509
460
"""
510
461
name = 'IPy session'
511
- aliases = ['ipy' ]
462
+ aliases = ['ipy' , 'ipy3' ]
512
463
513
464
def __init__ (self , ** options ):
514
465
"""
@@ -518,12 +469,6 @@ def __init__(self, **options):
518
469
"""
519
470
# init docstring is necessary for docs not to fail to build do to parent
520
471
# docs referenceing a section in pygments docs.
521
- self .python3 = get_bool_opt (options , 'python3' , False )
522
- if self .python3 :
523
- self .aliases = ['ipy3' ]
524
- else :
525
- self .aliases = ['ipy2' , 'ipy' ]
526
-
527
472
Lexer .__init__ (self , ** options )
528
473
529
474
self .IPythonLexer = IPythonLexer (** options )
0 commit comments