Skip to content

Commit 09da7a4

Browse files
authored
[antlr4-python3-runtime] Fixed multiple Incomplete (#15038)
1 parent 58da238 commit 09da7a4

File tree

13 files changed

+282
-227
lines changed

13 files changed

+282
-227
lines changed
Lines changed: 14 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,18 @@
1-
from _typeshed import Incomplete
2-
31
from antlr4.error.Errors import IllegalStateException as IllegalStateException
2+
from antlr4.Lexer import Lexer as ActualLexer, TokenSource
43
from antlr4.Token import Token as Token
54

6-
Lexer: Incomplete
5+
Lexer: None
76

87
class TokenStream: ...
98

109
class BufferedTokenStream(TokenStream):
1110
__slots__ = ("tokenSource", "tokens", "index", "fetchedEOF")
12-
tokenSource: Incomplete
13-
tokens: Incomplete
11+
tokenSource: TokenSource
12+
tokens: list[Token]
1413
index: int
1514
fetchedEOF: bool
16-
def __init__(self, tokenSource: Lexer) -> None: ...
15+
def __init__(self, tokenSource: ActualLexer | None) -> None: ...
1716
def mark(self) -> int: ...
1817
def release(self, marker: int) -> None: ...
1918
def reset(self) -> None: ...
@@ -26,15 +25,15 @@ class BufferedTokenStream(TokenStream):
2625
def LA(self, i: int) -> int: ...
2726
def LB(self, k: int) -> Token | None: ...
2827
def LT(self, k: int) -> Token | None: ...
29-
def adjustSeekIndex(self, i: int): ...
28+
def adjustSeekIndex(self, i: int) -> int: ...
3029
def lazyInit(self) -> None: ...
3130
def setup(self) -> None: ...
32-
def setTokenSource(self, tokenSource: Lexer): ...
33-
def nextTokenOnChannel(self, i: int, channel: int): ...
34-
def previousTokenOnChannel(self, i: int, channel: int): ...
35-
def getHiddenTokensToRight(self, tokenIndex: int, channel: int = -1): ...
36-
def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = -1): ...
37-
def filterForChannel(self, left: int, right: int, channel: int): ...
38-
def getSourceName(self): ...
39-
def getText(self, start: int | None = None, stop: int | None = None): ...
31+
def setTokenSource(self, tokenSource: ActualLexer | None) -> None: ...
32+
def nextTokenOnChannel(self, i: int, channel: int) -> int: ...
33+
def previousTokenOnChannel(self, i: int, channel: int) -> int: ...
34+
def getHiddenTokensToRight(self, tokenIndex: int, channel: int = -1) -> list[Token] | None: ...
35+
def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = -1) -> list[Token] | None: ...
36+
def filterForChannel(self, left: int, right: int, channel: int) -> list[Token] | None: ...
37+
def getSourceName(self) -> str: ...
38+
def getText(self, start: int | None = None, stop: int | None = None) -> str: ...
4039
def fill(self) -> None: ...
Lines changed: 15 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,23 @@
1-
from _typeshed import Incomplete
2-
1+
from antlr4.InputStream import InputStream
2+
from antlr4.Lexer import TokenSource
33
from antlr4.Token import CommonToken as CommonToken
44

55
class TokenFactory: ...
66

77
class CommonTokenFactory(TokenFactory):
88
__slots__ = "copyText"
9-
DEFAULT: Incomplete
10-
copyText: Incomplete
9+
DEFAULT: CommonTokenFactory | None
10+
copyText: bool
1111
def __init__(self, copyText: bool = False) -> None: ...
1212
def create(
13-
self, source: tuple[Incomplete, ...], type: int, text: str, channel: int, start: int, stop: int, line: int, column: int
14-
): ...
15-
def createThin(self, type: int, text: str): ...
13+
self,
14+
source: tuple[TokenSource, InputStream],
15+
type: int,
16+
text: str,
17+
channel: int,
18+
start: int,
19+
stop: int,
20+
line: int,
21+
column: int,
22+
) -> CommonToken: ...
23+
def createThin(self, type: int, text: str) -> CommonToken: ...

stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,10 @@
1-
from _typeshed import Incomplete
2-
31
from antlr4.BufferedTokenStream import BufferedTokenStream as BufferedTokenStream
42
from antlr4.Lexer import Lexer as Lexer
53
from antlr4.Token import Token as Token
64

75
class CommonTokenStream(BufferedTokenStream):
86
__slots__ = "channel"
9-
channel: Incomplete
7+
channel: int
108
def __init__(self, lexer: Lexer, channel: int = 0) -> None: ...
119
def adjustSeekIndex(self, i: int) -> int: ...
1210
def LB(self, k: int) -> Token | None: ...
Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
1-
from _typeshed import Incomplete
2-
31
from antlr4.InputStream import InputStream as InputStream
42

53
class FileStream(InputStream):
64
__slots__ = "fileName"
7-
fileName: Incomplete
5+
fileName: str
86
def __init__(self, fileName: str, encoding: str = "ascii", errors: str = "strict") -> None: ...
9-
def readDataFrom(self, fileName: str, encoding: str, errors: str = "strict"): ...
7+
def readDataFrom(self, fileName: str, encoding: str, errors: str = "strict") -> str: ...
Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,24 @@
1-
from _typeshed import Incomplete
1+
from typing import Literal
22

33
from antlr4.Token import Token as Token
44

55
class InputStream:
66
__slots__ = ("name", "strdata", "_index", "data", "_size")
77
name: str
8-
strdata: Incomplete
9-
data: Incomplete
8+
strdata: str
9+
data: list[int]
10+
_index: int
11+
_size: int
1012
def __init__(self, data: str) -> None: ...
1113
@property
12-
def index(self): ...
14+
def index(self) -> int: ...
1315
@property
14-
def size(self): ...
16+
def size(self) -> int: ...
1517
def reset(self) -> None: ...
1618
def consume(self) -> None: ...
17-
def LA(self, offset: int): ...
18-
def LT(self, offset: int): ...
19-
def mark(self): ...
20-
def release(self, marker: int): ...
21-
def seek(self, _index: int): ...
22-
def getText(self, start: int, stop: int): ...
19+
def LA(self, offset: int) -> int: ...
20+
def LT(self, offset: int) -> int: ...
21+
def mark(self) -> Literal[-1]: ...
22+
def release(self, marker: int) -> None: ...
23+
def seek(self, _index: int) -> None: ...
24+
def getText(self, start: int, stop: int) -> str: ...
Lines changed: 43 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
from _typeshed import Incomplete
21
from typing import TextIO
32

43
from antlr4.atn.LexerATNSimulator import LexerATNSimulator as LexerATNSimulator
@@ -10,7 +9,7 @@ from antlr4.error.Errors import (
109
)
1110
from antlr4.InputStream import InputStream as InputStream
1211
from antlr4.Recognizer import Recognizer as Recognizer
13-
from antlr4.Token import Token as Token
12+
from antlr4.Token import CommonToken, Token as Token
1413

1514
class TokenSource: ...
1615

@@ -34,47 +33,62 @@ class Lexer(Recognizer, TokenSource):
3433
DEFAULT_MODE: int
3534
MORE: int
3635
SKIP: int
37-
DEFAULT_TOKEN_CHANNEL: Incomplete
38-
HIDDEN: Incomplete
36+
DEFAULT_TOKEN_CHANNEL: int
37+
HIDDEN: int
3938
MIN_CHAR_VALUE: int
4039
MAX_CHAR_VALUE: int
40+
_input: InputStream
41+
_output: TextIO
42+
_factory: CommonTokenFactory
43+
_tokenFactorySourcePair: tuple[TokenSource, InputStream]
44+
_interp: LexerATNSimulator
45+
_token: Token | None
46+
_tokenStartCharIndex: int
47+
_tokenStartLine: int
48+
_tokenStartColumn: int
49+
_hitEOF: bool
50+
_channel: int
51+
_type: int
52+
_modeStack: list[int]
53+
_mode: int
54+
_text: str | None
4155
def __init__(self, input: InputStream, output: TextIO = ...) -> None: ...
4256
def reset(self) -> None: ...
43-
def nextToken(self): ...
57+
def nextToken(self) -> Token | None: ...
4458
def skip(self) -> None: ...
4559
def more(self) -> None: ...
46-
def mode(self, m: int): ...
47-
def pushMode(self, m: int): ...
48-
def popMode(self): ...
60+
def mode(self, m: int) -> None: ...
61+
def pushMode(self, m: int) -> None: ...
62+
def popMode(self) -> int: ...
4963
@property
50-
def inputStream(self): ...
64+
def inputStream(self) -> InputStream: ...
5165
@inputStream.setter
52-
def inputStream(self, input: InputStream): ...
66+
def inputStream(self, input: InputStream) -> None: ...
5367
@property
54-
def sourceName(self): ...
55-
def emitToken(self, token: Token): ...
56-
def emit(self): ...
57-
def emitEOF(self): ...
68+
def sourceName(self) -> str: ...
69+
def emitToken(self, token: Token) -> None: ...
70+
def emit(self) -> CommonToken: ...
71+
def emitEOF(self) -> CommonToken: ...
5872
@property
59-
def type(self): ...
73+
def type(self) -> int: ...
6074
@type.setter
61-
def type(self, type: int): ...
75+
def type(self, type: int) -> None: ...
6276
@property
63-
def line(self): ...
77+
def line(self) -> int: ...
6478
@line.setter
65-
def line(self, line: int): ...
79+
def line(self, line: int) -> None: ...
6680
@property
67-
def column(self): ...
81+
def column(self) -> int: ...
6882
@column.setter
69-
def column(self, column: int): ...
70-
def getCharIndex(self): ...
83+
def column(self, column: int) -> None: ...
84+
def getCharIndex(self) -> int: ...
7185
@property
72-
def text(self): ...
86+
def text(self) -> str: ...
7387
@text.setter
74-
def text(self, txt: str): ...
75-
def getAllTokens(self): ...
76-
def notifyListeners(self, e: LexerNoViableAltException): ...
77-
def getErrorDisplay(self, s: str): ...
78-
def getErrorDisplayForChar(self, c: str): ...
79-
def getCharErrorDisplay(self, c: str): ...
80-
def recover(self, re: RecognitionException): ...
88+
def text(self, txt: str) -> None: ...
89+
def getAllTokens(self) -> list[Token]: ...
90+
def notifyListeners(self, e: LexerNoViableAltException) -> None: ...
91+
def getErrorDisplay(self, s: str) -> str: ...
92+
def getErrorDisplayForChar(self, c: str) -> str: ...
93+
def getCharErrorDisplay(self, c: str) -> str: ...
94+
def recover(self, re: RecognitionException) -> None: ...
Lines changed: 47 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
from _typeshed import Incomplete
2-
from typing import TextIO
2+
from typing import Literal, TextIO
33

44
from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions as ATNDeserializationOptions
55
from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer
6+
from antlr4.atn.ParserATNSimulator import ParserATNSimulator
67
from antlr4.BufferedTokenStream import TokenStream as TokenStream
78
from antlr4.CommonTokenFactory import TokenFactory as TokenFactory
89
from antlr4.error.Errors import (
@@ -16,6 +17,7 @@ from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext
1617
from antlr4.Recognizer import Recognizer as Recognizer
1718
from antlr4.RuleContext import RuleContext as RuleContext
1819
from antlr4.Token import Token as Token
20+
from antlr4.tree.ParseTreePattern import ParseTreePattern
1921
from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher as ParseTreePatternMatcher
2022
from antlr4.tree.Tree import ErrorNode as ErrorNode, ParseTreeListener as ParseTreeListener, TerminalNode as TerminalNode
2123

@@ -39,48 +41,59 @@ class Parser(Recognizer):
3941
"_parseListeners",
4042
"_syntaxErrors",
4143
)
42-
bypassAltsAtnCache: Incomplete
44+
_input: TokenStream
45+
_output: TextIO
46+
_errHandler: DefaultErrorStrategy
47+
_precedenceStack: list[int]
48+
_ctx: ParserRuleContext | None
49+
_tracer: TraceListener | None
50+
_parseListeners: list[ParseTreeListener]
51+
_syntaxErrors: int
52+
_interp: ParserATNSimulator
53+
bypassAltsAtnCache: dict[Incomplete, Incomplete]
4354
buildParseTrees: bool
4455
def __init__(self, input: TokenStream, output: TextIO = ...) -> None: ...
4556
def reset(self) -> None: ...
46-
def match(self, ttype: int): ...
47-
def matchWildcard(self): ...
48-
def getParseListeners(self): ...
49-
def addParseListener(self, listener: ParseTreeListener): ...
50-
def removeParseListener(self, listener: ParseTreeListener): ...
57+
def match(self, ttype: int) -> Token: ...
58+
def matchWildcard(self) -> Token: ...
59+
def getParseListeners(self) -> list[ParseTreeListener]: ...
60+
def addParseListener(self, listener: ParseTreeListener) -> None: ...
61+
def removeParseListener(self, listener: ParseTreeListener) -> None: ...
5162
def removeParseListeners(self) -> None: ...
5263
def triggerEnterRuleEvent(self) -> None: ...
5364
def triggerExitRuleEvent(self) -> None: ...
54-
def getNumberOfSyntaxErrors(self): ...
55-
def getTokenFactory(self): ...
56-
def setTokenFactory(self, factory: TokenFactory): ...
65+
def getNumberOfSyntaxErrors(self) -> int: ...
66+
def getTokenFactory(self) -> TokenFactory: ...
67+
def setTokenFactory(self, factory: TokenFactory) -> None: ...
5768
def getATNWithBypassAlts(self): ...
58-
def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer | None = None): ...
59-
def getInputStream(self): ...
60-
def setInputStream(self, input: InputStream): ...
61-
def getTokenStream(self): ...
62-
def setTokenStream(self, input: TokenStream): ...
63-
def getCurrentToken(self): ...
64-
def notifyErrorListeners(self, msg: str, offendingToken: Token | None = None, e: RecognitionException | None = None): ...
65-
def consume(self): ...
69+
def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer | None = None) -> ParseTreePattern: ...
70+
def getInputStream(self) -> InputStream: ...
71+
def setInputStream(self, input: InputStream) -> None: ...
72+
def getTokenStream(self) -> TokenStream: ...
73+
def setTokenStream(self, input: TokenStream) -> None: ...
74+
def getCurrentToken(self) -> Token | None: ...
75+
def notifyErrorListeners(
76+
self, msg: str, offendingToken: Token | None = None, e: RecognitionException | None = None
77+
) -> None: ...
78+
def consume(self) -> None: ...
6679
def addContextToParseTree(self) -> None: ...
67-
state: Incomplete
68-
def enterRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ...
80+
state: int
81+
def enterRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int) -> None: ...
6982
def exitRule(self) -> None: ...
70-
def enterOuterAlt(self, localctx: ParserRuleContext, altNum: int): ...
71-
def getPrecedence(self): ...
72-
def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ...
73-
def pushNewRecursionContext(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ...
74-
def unrollRecursionContexts(self, parentCtx: ParserRuleContext): ...
75-
def getInvokingContext(self, ruleIndex: int): ...
76-
def precpred(self, localctx: RuleContext, precedence: int): ...
77-
def inContext(self, context: str): ...
78-
def isExpectedToken(self, symbol: int): ...
83+
def enterOuterAlt(self, localctx: ParserRuleContext, altNum: int) -> None: ...
84+
def getPrecedence(self) -> int: ...
85+
def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int) -> None: ...
86+
def pushNewRecursionContext(self, localctx: ParserRuleContext, state: int, ruleIndex: int) -> None: ...
87+
def unrollRecursionContexts(self, parentCtx: ParserRuleContext) -> None: ...
88+
def getInvokingContext(self, ruleIndex: int) -> RuleContext | None: ...
89+
def precpred(self, localctx: RuleContext, precedence: int) -> bool: ...
90+
def inContext(self, context: str) -> Literal[False]: ...
91+
def isExpectedToken(self, symbol: int) -> bool: ...
7992
def getExpectedTokens(self): ...
8093
def getExpectedTokensWithinCurrentRule(self): ...
81-
def getRuleIndex(self, ruleName: str): ...
82-
def getRuleInvocationStack(self, p: RuleContext | None = None): ...
83-
def getDFAStrings(self): ...
94+
def getRuleIndex(self, ruleName: str) -> int: ...
95+
def getRuleInvocationStack(self, p: RuleContext | None = None) -> list[str]: ...
96+
def getDFAStrings(self) -> list[str]: ...
8497
def dumpDFA(self) -> None: ...
85-
def getSourceName(self): ...
86-
def setTrace(self, trace: bool): ...
98+
def getSourceName(self) -> str: ...
99+
def setTrace(self, trace: bool) -> None: ...

stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -28,19 +28,19 @@ class ParserInterpreter(Parser):
2828
"_parentContextStack",
2929
"pushRecursionContextStates",
3030
)
31-
grammarFileName: Incomplete
32-
atn: Incomplete
33-
tokenNames: Incomplete
34-
ruleNames: Incomplete
35-
decisionToDFA: Incomplete
36-
sharedContextCache: Incomplete
37-
pushRecursionContextStates: Incomplete
31+
grammarFileName: str
32+
atn: ATN
33+
tokenNames: list[Incomplete]
34+
ruleNames: list[str]
35+
decisionToDFA: list[DFA]
36+
sharedContextCache: PredictionContextCache
37+
pushRecursionContextStates: set[int]
3838
def __init__(
3939
self, grammarFileName: str, tokenNames: list[str], ruleNames: list[str], atn: ATN, input: TokenStream
4040
) -> None: ...
41-
state: Incomplete
42-
def parse(self, startRuleIndex: int): ...
43-
def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ...
44-
def getATNState(self): ...
45-
def visitState(self, p: ATNState): ...
46-
def visitRuleStopState(self, p: ATNState): ...
41+
state: int
42+
def parse(self, startRuleIndex: int) -> ParserRuleContext | None: ...
43+
def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int) -> None: ...
44+
def getATNState(self) -> ATNState: ...
45+
def visitState(self, p: ATNState) -> None: ...
46+
def visitRuleStopState(self, p: ATNState) -> None: ...

0 commit comments

Comments
 (0)