Skip to content

Commit 2c5cc31

Browse files
authored
Py runtime: Move to relative imports (#4705)
Signed-off-by: Phil Elson <[email protected]>
1 parent c251d2d commit 2c5cc31

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

45 files changed

+234
-232
lines changed

runtime/Python3/src/antlr4/BufferedTokenStream.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@
1414
# {@link Token#HIDDEN_CHANNEL}, use a filtering token stream such a
1515
# {@link CommonTokenStream}.</p>
1616
from io import StringIO
17-
from antlr4.Token import Token
18-
from antlr4.error.Errors import IllegalStateException
17+
from .Token import Token
18+
from .error.Errors import IllegalStateException
1919

2020
# need forward declaration
2121
Lexer = None
@@ -230,7 +230,7 @@ def getHiddenTokensToRight(self, tokenIndex:int, channel:int=-1):
230230
self.lazyInit()
231231
if tokenIndex<0 or tokenIndex>=len(self.tokens):
232232
raise Exception(str(tokenIndex) + " not in 0.." + str(len(self.tokens)-1))
233-
from antlr4.Lexer import Lexer
233+
from .Lexer import Lexer
234234
nextOnChannel = self.nextTokenOnChannel(tokenIndex + 1, Lexer.DEFAULT_TOKEN_CHANNEL)
235235
from_ = tokenIndex+1
236236
# if none onchannel to right, nextOnChannel=-1 so set to = last token
@@ -245,7 +245,7 @@ def getHiddenTokensToLeft(self, tokenIndex:int, channel:int=-1):
245245
self.lazyInit()
246246
if tokenIndex<0 or tokenIndex>=len(self.tokens):
247247
raise Exception(str(tokenIndex) + " not in 0.." + str(len(self.tokens)-1))
248-
from antlr4.Lexer import Lexer
248+
from .Lexer import Lexer
249249
prevOnChannel = self.previousTokenOnChannel(tokenIndex - 1, Lexer.DEFAULT_TOKEN_CHANNEL)
250250
if prevOnChannel == tokenIndex - 1:
251251
return None
@@ -260,7 +260,7 @@ def filterForChannel(self, left:int, right:int, channel:int):
260260
for i in range(left, right+1):
261261
t = self.tokens[i]
262262
if channel==-1:
263-
from antlr4.Lexer import Lexer
263+
from .Lexer import Lexer
264264
if t.channel!= Lexer.DEFAULT_TOKEN_CHANNEL:
265265
hidden.append(t)
266266
elif t.channel==channel:

runtime/Python3/src/antlr4/CommonTokenFactory.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
# This default implementation of {@link TokenFactory} creates
99
# {@link CommonToken} objects.
1010
#
11-
from antlr4.Token import CommonToken
11+
from .Token import CommonToken
1212

1313
class TokenFactory(object):
1414

runtime/Python3/src/antlr4/CommonTokenStream.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,9 @@
2929
# channel.</p>
3030
#/
3131

32-
from antlr4.BufferedTokenStream import BufferedTokenStream
33-
from antlr4.Lexer import Lexer
34-
from antlr4.Token import Token
32+
from .BufferedTokenStream import BufferedTokenStream
33+
from .Lexer import Lexer
34+
from .Token import Token
3535

3636

3737
class CommonTokenStream(BufferedTokenStream):

runtime/Python3/src/antlr4/FileStream.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
#
1111

1212
import codecs
13-
from antlr4.InputStream import InputStream
13+
from .InputStream import InputStream
1414

1515

1616
class FileStream(InputStream):

runtime/Python3/src/antlr4/InputStream.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
#
99
# Vacuum all input from a string and then treat it like a buffer.
1010
#
11-
from antlr4.Token import Token
11+
from .Token import Token
1212

1313

1414
class InputStream (object):

runtime/Python3/src/antlr4/IntervalSet.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
#
66

77
from io import StringIO
8-
from antlr4.Token import Token
8+
from .Token import Token
99

1010
# need forward declarations
1111
IntervalSet = None

runtime/Python3/src/antlr4/LL1Analyzer.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,14 @@
33
# Use of this file is governed by the BSD 3-clause license that
44
# can be found in the LICENSE.txt file in the project root.
55
#/
6-
from antlr4.IntervalSet import IntervalSet
7-
from antlr4.Token import Token
8-
from antlr4.PredictionContext import PredictionContext, SingletonPredictionContext, PredictionContextFromRuleContext
9-
from antlr4.RuleContext import RuleContext
10-
from antlr4.atn.ATN import ATN
11-
from antlr4.atn.ATNConfig import ATNConfig
12-
from antlr4.atn.ATNState import ATNState, RuleStopState
13-
from antlr4.atn.Transition import WildcardTransition, NotSetTransition, AbstractPredicateTransition, RuleTransition
6+
from .IntervalSet import IntervalSet
7+
from .Token import Token
8+
from .PredictionContext import PredictionContext, SingletonPredictionContext, PredictionContextFromRuleContext
9+
from .RuleContext import RuleContext
10+
from .atn.ATN import ATN
11+
from .atn.ATNConfig import ATNConfig
12+
from .atn.ATNState import ATNState, RuleStopState
13+
from .atn.Transition import WildcardTransition, NotSetTransition, AbstractPredicateTransition, RuleTransition
1414

1515

1616
class LL1Analyzer (object):

runtime/Python3/src/antlr4/Lexer.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,12 @@
1515
from typing import TextIO
1616
else:
1717
from typing.io import TextIO
18-
from antlr4.CommonTokenFactory import CommonTokenFactory
19-
from antlr4.atn.LexerATNSimulator import LexerATNSimulator
20-
from antlr4.InputStream import InputStream
21-
from antlr4.Recognizer import Recognizer
22-
from antlr4.Token import Token
23-
from antlr4.error.Errors import IllegalStateException, LexerNoViableAltException, RecognitionException
18+
from .CommonTokenFactory import CommonTokenFactory
19+
from .atn.LexerATNSimulator import LexerATNSimulator
20+
from .InputStream import InputStream
21+
from .Recognizer import Recognizer
22+
from .Token import Token
23+
from .error.Errors import IllegalStateException, LexerNoViableAltException, RecognitionException
2424

2525
class TokenSource(object):
2626

runtime/Python3/src/antlr4/ListTokenSource.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,9 @@
1212
# as the EOF token for every call to {@link #nextToken} after the end of the
1313
# list is reached. Otherwise, an EOF token will be created.</p>
1414
#
15-
from antlr4.CommonTokenFactory import CommonTokenFactory
16-
from antlr4.Lexer import TokenSource
17-
from antlr4.Token import Token
15+
from .CommonTokenFactory import CommonTokenFactory
16+
from .Lexer import TokenSource
17+
from .Token import Token
1818

1919

2020
class ListTokenSource(TokenSource):

runtime/Python3/src/antlr4/Parser.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -7,20 +7,20 @@
77
from typing import TextIO
88
else:
99
from typing.io import TextIO
10-
from antlr4.BufferedTokenStream import TokenStream
11-
from antlr4.CommonTokenFactory import TokenFactory
12-
from antlr4.error.ErrorStrategy import DefaultErrorStrategy
13-
from antlr4.InputStream import InputStream
14-
from antlr4.Recognizer import Recognizer
15-
from antlr4.RuleContext import RuleContext
16-
from antlr4.ParserRuleContext import ParserRuleContext
17-
from antlr4.Token import Token
18-
from antlr4.Lexer import Lexer
19-
from antlr4.atn.ATNDeserializer import ATNDeserializer
20-
from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions
21-
from antlr4.error.Errors import UnsupportedOperationException, RecognitionException
22-
from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher
23-
from antlr4.tree.Tree import ParseTreeListener, TerminalNode, ErrorNode
10+
from .BufferedTokenStream import TokenStream
11+
from .CommonTokenFactory import TokenFactory
12+
from .error.ErrorStrategy import DefaultErrorStrategy
13+
from .InputStream import InputStream
14+
from .Recognizer import Recognizer
15+
from .RuleContext import RuleContext
16+
from .ParserRuleContext import ParserRuleContext
17+
from .Token import Token
18+
from .Lexer import Lexer
19+
from .atn.ATNDeserializer import ATNDeserializer
20+
from .atn.ATNDeserializationOptions import ATNDeserializationOptions
21+
from .error.Errors import UnsupportedOperationException, RecognitionException
22+
from .tree.ParseTreePatternMatcher import ParseTreePatternMatcher
23+
from .tree.Tree import ParseTreeListener, TerminalNode, ErrorNode
2424

2525
class TraceListener(ParseTreeListener):
2626
__slots__ = '_parser'

0 commit comments

Comments
 (0)