Skip to content

Commit 046fee5

Browse files
committed
Move TokenKind into separate file to solve cycle import
Replicates graphql/graphql-js@3a71d3e
1 parent 608e972 commit 046fee5

File tree

5 files changed

+114
-105
lines changed

5 files changed

+114
-105
lines changed

graphql/language/__init__.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,9 @@
88

99
from .location import get_location, SourceLocation
1010

11-
from .lexer import Lexer, TokenKind, Token
11+
from .token_kind import TokenKind
12+
13+
from .lexer import Lexer
1214

1315
from .parser import parse, parse_type, parse_value
1416

@@ -27,6 +29,7 @@
2729

2830
from .ast import (
2931
Location,
32+
Token,
3033
Node,
3134
# Each kind of AST node
3235
NameNode,
@@ -99,9 +102,8 @@
99102
__all__ = [
100103
"get_location",
101104
"SourceLocation",
102-
"Lexer",
103105
"TokenKind",
104-
"Token",
106+
"Lexer",
105107
"parse",
106108
"parse_value",
107109
"parse_type",
@@ -116,6 +118,7 @@
116118
"REMOVE",
117119
"IDLE",
118120
"Location",
121+
"Token",
119122
"DirectiveLocation",
120123
"Node",
121124
"NameNode",

graphql/language/ast.py

Lines changed: 70 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
1-
from copy import deepcopy
1+
from copy import copy, deepcopy
22
from enum import Enum
33
from typing import List, NamedTuple, Optional, Union
44

5-
from .lexer import Token
65
from .source import Source
6+
from .token_kind import TokenKind
77
from ..pyutils import camel_to_snake
88

99
__all__ = [
1010
"Location",
11+
"Token",
1112
"Node",
1213
"NameNode",
1314
"DocumentNode",
@@ -65,6 +66,73 @@
6566
]
6667

6768

69+
class Token:
70+
__slots__ = ("kind", "start", "end", "line", "column", "prev", "next", "value")
71+
72+
def __init__(
73+
self,
74+
kind: TokenKind,
75+
start: int,
76+
end: int,
77+
line: int,
78+
column: int,
79+
prev: "Token" = None,
80+
value: str = None,
81+
) -> None:
82+
self.kind = kind
83+
self.start, self.end = start, end
84+
self.line, self.column = line, column
85+
self.prev: Optional[Token] = prev
86+
self.next: Optional[Token] = None
87+
self.value: Optional[str] = value
88+
89+
def __str__(self):
90+
return self.desc
91+
92+
def __repr__(self):
93+
"""Print a simplified form when appearing in repr() or inspect()."""
94+
return f"<Token {self.desc} {self.line}/{self.column}>"
95+
96+
def __inspect__(self):
97+
return repr(self)
98+
99+
def __eq__(self, other):
100+
if isinstance(other, Token):
101+
return (
102+
self.kind == other.kind
103+
and self.start == other.start
104+
and self.end == other.end
105+
and self.line == other.line
106+
and self.column == other.column
107+
and self.value == other.value
108+
)
109+
elif isinstance(other, str):
110+
return other == self.desc
111+
return False
112+
113+
def __copy__(self):
114+
"""Create a shallow copy of the token"""
115+
return self.__class__(
116+
self.kind,
117+
self.start,
118+
self.end,
119+
self.line,
120+
self.column,
121+
self.prev,
122+
self.value,
123+
)
124+
125+
def __deepcopy__(self, memo):
126+
"""Allow only shallow copies to avoid recursion."""
127+
return copy(self)
128+
129+
@property
130+
def desc(self) -> str:
131+
"""A helper property to describe a token as a string for debugging"""
132+
kind, value = self.kind.value, self.value
133+
return f"{kind} {value!r}" if value else kind
134+
135+
68136
class Location(NamedTuple):
69137
"""AST Location
70138

graphql/language/lexer.py

Lines changed: 5 additions & 99 deletions
Original file line numberDiff line numberDiff line change
@@ -1,106 +1,12 @@
1-
from copy import copy
2-
from enum import Enum
3-
from typing import List, Optional
1+
from typing import List
42

53
from ..error import GraphQLSyntaxError
6-
from .source import Source
4+
from .ast import Token
75
from .block_string import dedent_block_string_value
6+
from .source import Source
7+
from .token_kind import TokenKind
88

9-
__all__ = ["Lexer", "TokenKind", "Token", "is_punctuator_token"]
10-
11-
12-
class TokenKind(Enum):
13-
"""Each kind of token"""
14-
15-
SOF = "<SOF>"
16-
EOF = "<EOF>"
17-
BANG = "!"
18-
DOLLAR = "$"
19-
AMP = "&"
20-
PAREN_L = "("
21-
PAREN_R = ")"
22-
SPREAD = "..."
23-
COLON = ":"
24-
EQUALS = "="
25-
AT = "@"
26-
BRACKET_L = "["
27-
BRACKET_R = "]"
28-
BRACE_L = "{"
29-
PIPE = "|"
30-
BRACE_R = "}"
31-
NAME = "Name"
32-
INT = "Int"
33-
FLOAT = "Float"
34-
STRING = "String"
35-
BLOCK_STRING = "BlockString"
36-
COMMENT = "Comment"
37-
38-
39-
class Token:
40-
__slots__ = ("kind", "start", "end", "line", "column", "prev", "next", "value")
41-
42-
def __init__(
43-
self,
44-
kind: TokenKind,
45-
start: int,
46-
end: int,
47-
line: int,
48-
column: int,
49-
prev: "Token" = None,
50-
value: str = None,
51-
) -> None:
52-
self.kind = kind
53-
self.start, self.end = start, end
54-
self.line, self.column = line, column
55-
self.prev: Optional[Token] = prev
56-
self.next: Optional[Token] = None
57-
self.value: Optional[str] = value
58-
59-
def __str__(self):
60-
return self.desc
61-
62-
def __repr__(self):
63-
"""Print a simplified form when appearing in repr() or inspect()."""
64-
return f"<Token {self.desc} {self.line}/{self.column}>"
65-
66-
def __inspect__(self):
67-
return repr(self)
68-
69-
def __eq__(self, other):
70-
if isinstance(other, Token):
71-
return (
72-
self.kind == other.kind
73-
and self.start == other.start
74-
and self.end == other.end
75-
and self.line == other.line
76-
and self.column == other.column
77-
and self.value == other.value
78-
)
79-
elif isinstance(other, str):
80-
return other == self.desc
81-
return False
82-
83-
def __copy__(self):
84-
"""Create a shallow copy of the token"""
85-
return self.__class__(
86-
self.kind,
87-
self.start,
88-
self.end,
89-
self.line,
90-
self.column,
91-
self.prev,
92-
self.value,
93-
)
94-
95-
def __deepcopy__(self, memo):
96-
"""Allow only shallow copies to avoid recursion."""
97-
return copy(self)
98-
99-
@property
100-
def desc(self) -> str:
101-
"""A helper property to describe a token as a string for debugging"""
102-
kind, value = self.kind.value, self.value
103-
return f"{kind} {value!r}" if value else kind
9+
__all__ = ["Lexer", "is_punctuator_token"]
10410

10511

10612
_punctuator_tokens = frozenset(

graphql/language/parser.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,8 +57,10 @@
5757
VariableNode,
5858
)
5959
from .directive_locations import DirectiveLocation
60-
from .lexer import Lexer, Token, TokenKind
60+
from .ast import Token
61+
from .lexer import Lexer
6162
from .source import Source
63+
from .token_kind import TokenKind
6264
from ..error import GraphQLError, GraphQLSyntaxError
6365
from ..pyutils import inspect
6466

graphql/language/token_kind.py

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
from enum import Enum
2+
3+
__all__ = ["TokenKind"]
4+
5+
6+
class TokenKind(Enum):
7+
"""The different kinds of tokens that the lexer emits"""
8+
9+
SOF = "<SOF>"
10+
EOF = "<EOF>"
11+
BANG = "!"
12+
DOLLAR = "$"
13+
AMP = "&"
14+
PAREN_L = "("
15+
PAREN_R = ")"
16+
SPREAD = "..."
17+
COLON = ":"
18+
EQUALS = "="
19+
AT = "@"
20+
BRACKET_L = "["
21+
BRACKET_R = "]"
22+
BRACE_L = "{"
23+
PIPE = "|"
24+
BRACE_R = "}"
25+
NAME = "Name"
26+
INT = "Int"
27+
FLOAT = "Float"
28+
STRING = "String"
29+
BLOCK_STRING = "BlockString"
30+
COMMENT = "Comment"

0 commit comments

Comments
 (0)