diff --git a/tests/test_tokenize.py b/tests/test_tokenize.py index 6639f9a..594fe99 100644 --- a/tests/test_tokenize.py +++ b/tests/test_tokenize.py @@ -242,10 +242,27 @@ assert tokenize.locate_tokens(input, filename) == located +# Draws a token and possibly adds garbage +@composite +def draw_token_classified_garbled(draw): + token = draw(draw_token_classified()) + value = token.value + type = token.type + if draw(booleans()): + value = draw(text(min_size=1)) + value + type = tokenize.TokenType.UNKNOWN + if draw(booleans()): + value = value + draw(text(min_size=1)) + type = tokenize.TokenType.UNKNOWN + if value[0:2] == "#!": + type = tokenize.TokenType.SHEBANG + return tokenize.Token(value, token.location, type) + + # Generates a list of classified tokens and incorrectly classified tokens @composite def draw_tokens_classified(draw): - tokens = draw(lists(draw_token_classified())) + tokens = draw(lists(draw_token_classified_garbled())) input = [] for t in tokens: type = draw(draw_token_type()) @@ -260,7 +277,7 @@ assert tokenize.classify_tokens(input) == tokens -# Draw random source code for fuzzing +# Draw random source code that might be invalid for fuzzing @composite def draw_source_fuzz(draw): tokens = draw(lists(draw_token_classified()))