diff --git a/tests/test_parse.py b/tests/test_parse.py index 1a84f39..3fd037e 100644 --- a/tests/test_parse.py +++ b/tests/test_parse.py @@ -59,6 +59,13 @@ return token +@composite +def draw_token_by_value(draw, value): + location = draw(test_tokenize.draw_token_location()) + type = tokenize.SyntaxType.TOKEN + return tokenize.Syntax(value, location, type) + + # Draws tokens to make a valid text string and its value @composite def draw_syntax_text_valid(draw): @@ -66,19 +73,11 @@ value = "" for token in tokens: value += token.value - s_value = draw(test_tokenize.draw_token_keyword()) - s_value = "StartText" - s_location = draw(test_tokenize.draw_token_location()) - s_type = tokenize.SyntaxType.TOKEN - start = tokenize.Syntax(s_value, s_location, s_type) - e_value = draw(test_tokenize.draw_token_keyword()) - e_value = "EndText" - e_location = draw(test_tokenize.draw_token_location()) - e_type = tokenize.SyntaxType.TOKEN - end = tokenize.Syntax(e_value, e_location, e_type) + start = draw(draw_token_by_value("StartText")) + end = draw(draw_token_by_value("EndText")) all_tokens = [start] + tokens + [end] text_value = value.strip("\n\t ") - result = tokenize.Syntax(text_value, s_location, tokenize.SyntaxType.TEXT) + result = tokenize.Syntax(text_value, start.location, tokenize.SyntaxType.TEXT) return (all_tokens, result) @@ -139,11 +138,7 @@ @composite def draw_syntax_text_invalid_extrastarttext(draw): (tokens, _) = draw(draw_syntax_text_valid()) - s_value = draw(test_tokenize.draw_token_keyword()) - s_value = "StartText" - s_location = draw(test_tokenize.draw_token_location()) - s_type = tokenize.SyntaxType.TOKEN - start = tokenize.Syntax(s_value, s_location, s_type) + start = draw(draw_token_by_value("StartText")) pos = draw(integers(min_value=1, max_value=(len(tokens) - 1))) new_tokens = tokens[0:pos] + [start] + tokens[pos:] return new_tokens