diff --git a/src/parse.py b/src/parse.py index bdbef78..feef213 100644 --- a/src/parse.py +++ b/src/parse.py @@ -32,6 +32,21 @@ ) +# Represents a stream of consumable tokens +class TokenStream: + def __init__(self, tokens): + self.tokens = tokens.copy() + + def __repr__(self): + return "TokenStream(tokens %s)" % (self.tokens) # pragma: no mutate + + def pop(self): + if self.tokens: + return self.tokens.pop(0) + else: + return None + + # Converts tokens to syntax def import_tokens(tokens): output = [] diff --git a/tests/test_parse.py b/tests/test_parse.py index 5a1029d..3bebe34 100644 --- a/tests/test_parse.py +++ b/tests/test_parse.py @@ -67,6 +67,23 @@ return (input, tokens) +# Tests a token stream reads tokens correctly +# We expect the following behaviour: +# - The input token list is copied +# - All tokens are popped in order +# - None tokens are returned at the end of the stream +@given(lists(test_tokenize.draw_token_random())) +def test_parse_token_stream(tokens): + stream = parse.TokenStream(tokens) + read = [] + token = stream.pop() + while token is not None: + read.append(token) + token = stream.pop() + assert read == tokens + assert stream.pop() is None + + # Tests importing tokens works correctly # We expect the following behaviour: # - Each token is converted to a Syntax