Skip to content

Commit a003731

Browse files
committed
Convert Location to be a normal class
Also use a constructor that is compatible with the original. Replicates graphql/graphql-js@ec5fbb0
1 parent 8c70882 commit a003731

File tree

3 files changed

+38
-26
lines changed

3 files changed

+38
-26
lines changed

src/graphql/language/ast.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from copy import copy, deepcopy
22
from enum import Enum
3-
from typing import List, NamedTuple, Optional, Union
3+
from typing import List, Optional, Union
44

55
from .source import Source
66
from .token_kind import TokenKind
@@ -138,19 +138,28 @@ def desc(self) -> str:
138138
return f"{kind} {value!r}" if value else kind
139139

140140

141-
class Location(NamedTuple):
141+
class Location:
142142
"""AST Location
143143
144144
Contains a range of UTF-8 character offsets and token references that identify the
145145
region of the source from which the AST derived.
146146
"""
147147

148+
__slots__ = ("start", "end", "start_token", "end_token", "source")
149+
148150
start: int # character offset at which this Node begins
149151
end: int # character offset at which this Node ends
150152
start_token: Token # Token at which this Node begins
151153
end_token: Token # Token at which this Node ends.
152154
source: Source # Source document the AST represents
153155

156+
def __init__(self, start_token: Token, end_token: Token, source: Source):
157+
self.start = start_token.start
158+
self.end = end_token.end
159+
self.start_token = start_token
160+
self.end_token = end_token
161+
self.source = source
162+
154163
def __str__(self):
155164
return f"{self.start}:{self.end}"
156165

src/graphql/language/parser.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -959,9 +959,7 @@ def loc(self, start_token: Token) -> Optional[Location]:
959959
if not self._no_location:
960960
end_token = self._lexer.last_token
961961
source = self._lexer.source
962-
return Location(
963-
start_token.start, end_token.end, start_token, end_token, source
964-
)
962+
return Location(start_token, end_token, source)
965963
return None
966964

967965
def peek(self, kind: TokenKind) -> bool:

tests/language/test_ast.py

Lines changed: 26 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -80,48 +80,53 @@ def describe_location_class():
8080
source = Source("source")
8181

8282
def initializes():
83-
loc = Location(1, 2, token1, token2, source)
84-
assert loc.start == 1
85-
assert loc.end == 2
83+
loc = Location(token1, token2, source)
84+
assert loc.start == token1.start
85+
assert loc.end == token2.end
8686
assert loc.start_token is token1
8787
assert loc.end_token is token2
8888
assert loc.source is source
8989

9090
def can_stringify_with_start_and_end():
91-
loc = Location(1, 2, token1, token2, source)
92-
assert str(loc) == "1:2"
91+
loc = Location(token1, token2, source)
92+
assert str(loc) == "1:3"
9393

9494
def has_representation_with_start_and_end():
95-
loc = Location(1, 2, token1, token2, source)
96-
assert repr(loc) == "<Location 1:2>"
95+
loc = Location(token1, token2, source)
96+
assert repr(loc) == "<Location 1:3>"
9797
assert inspect(loc) == repr(loc)
9898

9999
def can_check_equality():
100-
loc1 = Location(1, 2, token1, token2, source)
101-
loc2 = Location(1, 2, token1, token2, source)
100+
loc1 = Location(token1, token2, source)
101+
loc2 = Location(token1, token2, source)
102102
assert loc2 == loc1
103-
loc3 = Location(3, 2, token1, token2, source)
103+
loc3 = Location(token1, token1, source)
104104
assert loc3 != loc1
105-
loc4 = Location(1, 4, token1, token2, source)
105+
loc4 = Location(token2, token2, source)
106106
assert loc4 != loc1
107+
assert loc4 != loc3
107108

108109
def can_check_equality_with_tuple_or_list():
109-
loc = Location(1, 2, token1, token2, source)
110-
assert loc == (1, 2)
111-
assert loc == [1, 2]
112-
assert not loc != (1, 2)
113-
assert not loc != [1, 2]
114-
assert loc != (3, 2)
115-
assert loc != [1, 4]
110+
loc = Location(token1, token2, source)
111+
assert loc == (1, 3)
112+
assert loc == [1, 3]
113+
assert not loc != (1, 3)
114+
assert not loc != [1, 3]
115+
assert loc != (1, 2)
116+
assert loc != [2, 3]
116117

117118
def can_hash():
118-
loc1 = Location(1, 2, token1, token2, source)
119-
loc2 = Location(1, 2, token1, token2, source)
119+
loc1 = Location(token1, token2, source)
120+
loc2 = Location(token1, token2, source)
120121
assert loc2 == loc1
121122
assert hash(loc2) == hash(loc1)
122-
loc3 = Location(1, 3, token1, token2, source)
123+
loc3 = Location(token1, token1, source)
123124
assert loc3 != loc1
124125
assert hash(loc3) != hash(loc1)
126+
loc4 = Location(token2, token2, source)
127+
assert loc4 != loc1
128+
assert hash(loc4) != hash(loc1)
129+
assert hash(loc4) != hash(loc3)
125130

126131

127132
def describe_node_class():

0 commit comments

Comments
 (0)