diff --git a/src/parse.py b/src/parse.py index 283d4dc..64d21b6 100644 --- a/src/parse.py +++ b/src/parse.py @@ -5,7 +5,7 @@ # Removes whitespace tokens -def remove_whitespace(tokens): +def strip_whitespace(tokens): output = [] for t in tokens: if t.type not in [tokenize.TokenType.SPACE, tokenize.TokenType.NEWLINE]: @@ -15,5 +15,5 @@ # Parses tokens def parse(tokens): - stripped = remove_whitespace(tokens) + stripped = strip_whitespace(tokens) return stripped diff --git a/tests/test_parse.py b/tests/test_parse.py index 5982c36..83046d7 100644 --- a/tests/test_parse.py +++ b/tests/test_parse.py @@ -23,21 +23,21 @@ return (input, tokens) -# Tests is remove_whitespace works correctly +# Tests strip_whitespace works correctly # We expect the following behaviour: # - No tokens are modified # - Tokens of type SPACE or NEWLINE are removed from the output @given(draw_tokens_whitespace()) -def test_parse_remove_whitespace(test_data): +def test_parse_strip_whitespace(test_data): (input, tokens) = test_data - assert parse.remove_whitespace(input) == tokens + assert parse.strip_whitespace(input) == tokens # Tests the parser wrapper works correctly # We expect the following behaviour: -# - Whitespace tokens are removed +# - Whitespace tokens are stripped @given(lists(test_tokenize.draw_token_random())) def test_parse_fuzz(tokens): - stripped = parse.remove_whitespace(tokens) + stripped = parse.strip_whitespace(tokens) parsed = parse.parse(tokens) assert stripped == parsed