diff --git a/tests/test_parse.py b/tests/test_parse.py index 8383a42..ac11651 100644 --- a/tests/test_parse.py +++ b/tests/test_parse.py @@ -49,6 +49,17 @@ return tokens +# Wrapper to report ParseError nicely to Hypothesis +def safe_tokenize(code, filename): + tokens = [] + try: + tokenizer = parse.Tokenizer(code, filename) + tokens = tokenizer.tokenize() + except parse.ParseError as e: + raise AssertionError("ParseError thrown: %s" % (e)) + return tokens + + class SampleToken: def __init__(self, code, type, value): self.code = code @@ -119,12 +130,7 @@ # Test that we can lex tokens correctly @given(draw_token()) def test_lexer_token(token): - tokens = [] - try: - tokenizer = parse.Tokenizer(token.code, "") - tokens = tokenizer.tokenize() - except parse.ParseError as e: - raise AssertionError("ParseError thrown: %s" % (e)) + tokens = safe_tokenize(token.code, "") assert tokens[0].type == token.type assert tokens[0].value == token.value assert tokens[0].location.line == 1 @@ -139,8 +145,7 @@ text_tokens = split_by(text, lexer_whitespace) assume("BeginNote" not in text_tokens and "EndNote" not in text_tokens) code = "BeginNote" + space1 + text + space2 + "EndNote" - tokenizer = parse.Tokenizer(code, "") - tokens = tokenizer.tokenize() + tokens = safe_tokenize(code, "") assert tokens[0].type == "EOF" @@ -148,8 +153,7 @@ @given(lists(sampled_from(reserved_words), min_size=2)) def test_lexer_conjoined_words(words): word = "".join(words) - tokenizer = parse.Tokenizer(word, "") - tokens = tokenizer.tokenize() + tokens = safe_tokenize(word, "") assert tokens[0].type == "symbol" assert tokens[0].value == word assert tokens[0].location.line == 1 @@ -162,8 +166,7 @@ @given(text(alphabet=characters(blacklist_characters="\n"))) def test_lexer_shebang(shebang): code = "#!" + shebang + "\n" - tokenizer = parse.Tokenizer(code, "") - tokens = tokenizer.tokenize() + tokens = safe_tokenize(code, "") assert tokens[0].type == "EOF" assert tokens[0].location.line == 2 assert tokens[0].location.column == 0 @@ -172,8 +175,7 @@ # Test with no data at all def test_parser_empty(): - tokenizer = parse.Tokenizer("", "") - tokens = tokenizer.tokenize() + tokens = safe_tokenize("", "") assert tokens[0].type == "EOF" assert tokens[0].location.line == 1 assert tokens[0].location.column == 0