diff --git a/tests/test_token.py b/tests/test_token.py index 01c4b70..e7887c9 100644 --- a/tests/test_token.py +++ b/tests/test_token.py @@ -47,6 +47,11 @@ return TokenLocation(line, offset, filename) +# Static token location +def static_token_location(): + return TokenLocation(1234, 4321, "Hello world") + + # Test token location structure @template_test_structure( TokenLocation, diff --git a/tests/test_tokenize.py b/tests/test_tokenize.py index 31f589c..20cb4c7 100644 --- a/tests/test_tokenize.py +++ b/tests/test_tokenize.py @@ -15,7 +15,7 @@ from src import tokenize from src.token import Token, TokenLocation -from tests.test_token import draw_token_location +from tests.test_token import static_token_location # Values considered spaces @@ -47,7 +47,7 @@ # Draws a space token @composite def draw_token_space(draw): - location = draw(draw_token_location()) + location = static_token_location() value = draw(sampled_from(valid_spaces)) return Token(value, location) @@ -55,7 +55,7 @@ # Draws a new line token @composite def draw_token_newline(draw): - location = draw(draw_token_location()) + location = static_token_location() value = draw(sampled_from(valid_newlines)) return Token(value, location) @@ -64,7 +64,7 @@ @composite def draw_token_nospace(draw): reserved = valid_spaces + single_newlines - location = draw(draw_token_location()) + location = static_token_location() chars = characters(blacklist_characters=reserved) value = draw(text(alphabet=chars, min_size=1)) for v in multi_newlines: @@ -126,8 +126,7 @@ spaces = draw(lists(locationed, min_size=1)) tokens += merge_crlf(spaces) else: - strategy = draw_token_nospace - locationed = draw_token_splitted(strategy()) + locationed = draw_token_splitted(draw_token_nospace()) tokens.append(draw(locationed)) drawing_whitespace = not drawing_whitespace for t in tokens: