Skip to content

Commit 2de9352

Browse files
committed
Use TokenKind enum inside lexer/parser tests
Replicates graphql/graphql-js@aba1066
1 parent ec998aa commit 2de9352

File tree

2 files changed

+21
-12
lines changed

2 files changed

+21
-12
lines changed

tests/language/test_lexer.py

+18-10
Original file line numberDiff line numberDiff line change
@@ -322,8 +322,8 @@ def lex_reports_useful_unknown_character_error():
322322

323323
# noinspection PyArgumentEqualDefault
324324
def lex_reports_useful_information_for_dashes_in_names():
325-
q = "a-b"
326-
lexer = Lexer(Source(q))
325+
source = Source("a-b")
326+
lexer = Lexer(source)
327327
first_token = lexer.advance()
328328
assert first_token == Token(TokenKind.NAME, 0, 1, 1, 1, None, "a")
329329
with raises(GraphQLSyntaxError) as exc_info:
@@ -335,7 +335,15 @@ def lex_reports_useful_information_for_dashes_in_names():
335335
assert error.locations == [(1, 3)]
336336

337337
def produces_double_linked_list_of_tokens_including_comments():
338-
lexer = Lexer(Source("{\n #comment\n field\n }"))
338+
source = Source(
339+
"""
340+
{
341+
#comment
342+
field
343+
}
344+
"""
345+
)
346+
lexer = Lexer(source)
339347
start_token = lexer.token
340348
while True:
341349
end_token = lexer.advance()
@@ -350,11 +358,11 @@ def produces_double_linked_list_of_tokens_including_comments():
350358
assert not tokens or tok.prev == tokens[-1]
351359
tokens.append(tok)
352360
tok = tok.next
353-
assert [tok.kind.value for tok in tokens] == [
354-
"<SOF>",
355-
"{",
356-
"Comment",
357-
"Name",
358-
"}",
359-
"<EOF>",
361+
assert [tok.kind for tok in tokens] == [
362+
TokenKind.SOF,
363+
TokenKind.BRACE_L,
364+
TokenKind.COMMENT,
365+
TokenKind.NAME,
366+
TokenKind.BRACE_R,
367+
TokenKind.EOF,
360368
]

tests/language/test_parser.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
StringValueNode,
2323
ValueNode,
2424
Token,
25+
TokenKind,
2526
parse,
2627
parse_type,
2728
parse_value,
@@ -376,10 +377,10 @@ def contains_references_to_start_and_end_tokens():
376377
result = parse("{ id }")
377378
start_token = result.loc.start_token
378379
assert isinstance(start_token, Token)
379-
assert start_token.desc == "<SOF>"
380+
assert start_token.kind == TokenKind.SOF
380381
end_token = result.loc.end_token
381382
assert isinstance(end_token, Token)
382-
assert end_token.desc == "<EOF>"
383+
assert end_token.kind == TokenKind.EOF
383384

384385

385386
def describe_parse_value():

0 commit comments

Comments
 (0)