Skip to content

Commit 0e33fa1

Browse files
committed
More rigid tests of AST classes
1 parent 22d4ea8 commit 0e33fa1

File tree

4 files changed

+154
-5
lines changed

4 files changed

+154
-5
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ The current version 3.0.0a1 of GraphQL-core is up-to-date
1616
with GraphQL.js version 14.4.0.
1717

1818
All parts of the API are covered by an extensive test suite
19-
of currently 1891 unit tests.
19+
of currently 1906 unit tests.
2020

2121

2222
## Documentation

src/graphql/language/ast.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ def __str__(self):
9191

9292
def __repr__(self):
9393
"""Print a simplified form when appearing in repr() or inspect()."""
94-
return f"<Token {self.desc} {self.line}/{self.column}>"
94+
return f"<Token {self.desc} {self.line}:{self.column}>"
9595

9696
def __inspect__(self):
9797
return repr(self)
@@ -168,6 +168,9 @@ def __eq__(self, other):
168168
return self.start == other[0] and self.end == other[1]
169169
return False
170170

171+
def __ne__(self, other):
172+
return not self.__eq__(other)
173+
171174
def __hash__(self):
172175
return hash((self.start, self.end))
173176

tests/language/test_ast.py

Lines changed: 148 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,126 @@
1-
from copy import copy
1+
from copy import copy, deepcopy
22

3-
from graphql.language import Node
3+
from graphql.language import Location, Node, Source, Token, TokenKind
4+
from graphql.pyutils import inspect
45

56

67
class SampleTestNode(Node):
78
__slots__ = "alpha", "beta"
89

910

11+
def describe_token_class():
12+
def initializes():
13+
prev = Token(TokenKind.EQUALS, 10, 11, 1, 2)
14+
token = Token(
15+
kind=TokenKind.NAME,
16+
start=11,
17+
end=12,
18+
line=1,
19+
column=3,
20+
prev=prev,
21+
value="n",
22+
)
23+
assert prev.kind == TokenKind.EQUALS
24+
assert prev.start == 10
25+
assert prev.end == 11
26+
assert prev.line == 1
27+
assert prev.column == 2
28+
assert token.kind == TokenKind.NAME
29+
assert token.start == 11
30+
assert token.end == 12
31+
assert token.line == 1
32+
assert token.column == 3
33+
assert token.prev is prev
34+
assert token.value == "n"
35+
36+
def can_stringify():
37+
token = Token(TokenKind.NAME, 1, 2, 1, 2, value="test")
38+
assert str(token) == "Name 'test'"
39+
assert token.desc == str(token)
40+
41+
def has_representation_with_line_and_column():
42+
token = Token(TokenKind.NAME, 1, 2, 1, 2, value="test")
43+
assert repr(token) == "<Token Name 'test' 1:2>"
44+
assert inspect(token) == repr(token)
45+
46+
def can_check_equality():
47+
token1 = Token(TokenKind.NAME, 1, 2, 1, 2, value="test")
48+
token2 = Token(TokenKind.NAME, 1, 2, 1, 2, value="test")
49+
assert token2 == token1
50+
assert not token2 != token1
51+
token3 = Token(TokenKind.NAME, 1, 2, 1, 2, value="text")
52+
assert token3 != token1
53+
token4 = Token(TokenKind.NAME, 1, 4, 1, 2, value="test")
54+
assert token4 != token1
55+
token5 = Token(TokenKind.NAME, 1, 2, 1, 4, value="test")
56+
assert token5 != token1
57+
58+
def can_hash():
59+
token1 = Token(TokenKind.NAME, 1, 2, 1, 2, value="hash")
60+
token2 = Token(TokenKind.NAME, 1, 2, 1, 2, value="hash")
61+
assert token2 == token1
62+
assert hash(token2) == hash(token1)
63+
token3 = Token(TokenKind.NAME, 1, 2, 1, 2, value="bash")
64+
assert token3 != token1
65+
assert hash(token3) != hash(token1)
66+
67+
def can_copy():
68+
token1 = Token(TokenKind.NAME, 1, 2, 1, 2, value="copy")
69+
token2 = copy(token1)
70+
assert token2 == token1
71+
assert token2 is not token1
72+
73+
74+
def describe_location_class():
75+
token1 = Token(TokenKind.NAME, 1, 2, 1, 2)
76+
token2 = Token(TokenKind.NAME, 2, 3, 1, 3)
77+
source = Source("source")
78+
79+
def initializes():
80+
loc = Location(1, 2, token1, token2, source)
81+
assert loc.start == 1
82+
assert loc.end == 2
83+
assert loc.start_token is token1
84+
assert loc.end_token is token2
85+
assert loc.source is source
86+
87+
def can_stringify_with_start_and_end():
88+
loc = Location(1, 2, token1, token2, source)
89+
assert str(loc) == "1:2"
90+
91+
def has_representation_with_start_and_end():
92+
loc = Location(1, 2, token1, token2, source)
93+
assert repr(loc) == "<Location 1:2>"
94+
assert inspect(loc) == repr(loc)
95+
96+
def can_check_equality():
97+
loc1 = Location(1, 2, token1, token2, source)
98+
loc2 = Location(1, 2, token1, token2, source)
99+
assert loc2 == loc1
100+
loc3 = Location(3, 2, token1, token2, source)
101+
assert loc3 != loc1
102+
loc4 = Location(1, 4, token1, token2, source)
103+
assert loc4 != loc1
104+
105+
def can_check_equality_with_tuple_or_list():
106+
loc = Location(1, 2, token1, token2, source)
107+
assert loc == (1, 2)
108+
assert loc == [1, 2]
109+
assert not loc != (1, 2)
110+
assert not loc != [1, 2]
111+
assert loc != (3, 2)
112+
assert loc != [1, 4]
113+
114+
def can_hash():
115+
loc1 = Location(1, 2, token1, token2, source)
116+
loc2 = Location(1, 2, token1, token2, source)
117+
assert loc2 == loc1
118+
assert hash(loc2) == hash(loc1)
119+
loc3 = Location(1, 3, token1, token2, source)
120+
assert loc3 != loc1
121+
assert hash(loc3) != hash(loc1)
122+
123+
10124
def describe_node_class():
11125
def initializes_with_keywords():
12126
node = SampleTestNode(alpha=1, beta=2, loc=0)
@@ -32,17 +146,49 @@ def can_check_equality():
32146
node = SampleTestNode(alpha=1, beta=2)
33147
node2 = SampleTestNode(alpha=1, beta=2)
34148
assert node2 == node
149+
assert not node2 != node
35150
node2 = SampleTestNode(alpha=1, beta=1)
36151
assert node2 != node
37152
node2 = Node(alpha=1, beta=2)
38153
assert node2 != node
39154

155+
def can_hash():
156+
node = SampleTestNode(alpha=1, beta=2)
157+
node2 = SampleTestNode(alpha=1, beta=2)
158+
assert node == node2
159+
assert node2 is not node
160+
assert hash(node2) == hash(node)
161+
node3 = SampleTestNode(alpha=1, beta=3)
162+
assert node3 != node
163+
assert hash(node3) != hash(node)
164+
40165
def can_create_shallow_copy():
41166
node = SampleTestNode(alpha=1, beta=2)
42167
node2 = copy(node)
43168
assert node2 is not node
44169
assert node2 == node
45170

171+
def shallow_copy_is_really_shallow():
172+
node = SampleTestNode(alpha=1, beta=2)
173+
node2 = SampleTestNode(alpha=node, beta=node)
174+
node3 = copy(node2)
175+
assert node3 is not node2
176+
assert node3 == node2
177+
assert node3.alpha is node2.alpha
178+
assert node3.beta is node2.beta
179+
180+
def can_create_deep_copy():
181+
alpha = SampleTestNode(alpha=1, beta=2)
182+
beta = SampleTestNode(alpha=3, beta=4)
183+
node = SampleTestNode(alpha=alpha, beta=beta)
184+
node2 = deepcopy(node)
185+
assert node2 is not node
186+
assert node2 == node
187+
assert node2.alpha == alpha
188+
assert node2.alpha is not alpha
189+
assert node2.alpha == alpha
190+
assert node2.beta is not beta
191+
46192
def provides_snake_cased_kind_as_class_attribute():
47193
assert SampleTestNode.kind == "sample_test"
48194

tests/language/test_lexer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ def can_be_stringified_or_pyutils_inspected():
4545
token = lex_one("foo")
4646
assert token.desc == "Name 'foo'"
4747
assert str(token) == token.desc
48-
assert repr(token) == "<Token Name 'foo' 1/1>"
48+
assert repr(token) == "<Token Name 'foo' 1:1>"
4949
assert inspect(token) == repr(token)
5050

5151
# noinspection PyArgumentEqualDefault

0 commit comments

Comments
 (0)