diff --git a/tests/test_tokenize.py b/tests/test_tokenize.py index 82f92f0..83d7b25 100644 --- a/tests/test_tokenize.py +++ b/tests/test_tokenize.py @@ -112,7 +112,7 @@ return tokenize.Token(token.value, location, type) -# Generates an alternating sequence of tokens +# Generates an alternating sequence of whitespace and non-whitespace @composite def draw_tokens_list(draw): output = [] @@ -194,6 +194,7 @@ assert tokenize.classify_tokens(input) == tokens +# Draw random source code for fuzzing @composite def draw_source_fuzz(draw): strategies = [ @@ -207,6 +208,7 @@ return input +# Test that the tokenize function behaves as we expect @given(draw_source_fuzz(), text()) def test_tokenize_fuzz(source, filename): split = tokenize.split_tokens(source)