diff --git a/src/tokenize.py b/src/tokenize.py index 39684c0..c76378f 100644 --- a/src/tokenize.py +++ b/src/tokenize.py @@ -114,3 +114,11 @@ new = Token(t.value, t.location, type) new_tokens.append(new) return new_tokens + + +# Tokenizes source code +def tokenize(source, filename): + split = split_tokens(source) + located = locate_tokens(split, filename) + classified = classify_tokens(located) + return classified diff --git a/tests/test_tokenize.py b/tests/test_tokenize.py index 52b46e5..b99125e 100644 --- a/tests/test_tokenize.py +++ b/tests/test_tokenize.py @@ -192,3 +192,25 @@ def test_tokenize_classification(test_data): (input, tokens) = test_data assert tokenize.classify_tokens(input) == tokens + + +@composite +def draw_source_fuzz(draw): + strategies = [ + draw_token_nonwhitespace(), + draw_token_whitespace(), + ] + tokens = draw(lists(one_of(strategies))) + input = "" + for t in tokens: + input += t.value + return input + + +@given(draw_source_fuzz(), text()) +def test_tokenize_fuzz(source, filename): + split = tokenize.split_tokens(source) + located = tokenize.locate_tokens(split, filename) + classified = tokenize.classify_tokens(located) + tokenized = tokenize.tokenize(source, filename) + assert classified == tokenized