diff --git a/src/tokenize.py b/src/tokenize.py index 8286a0f..cb207d6 100644 --- a/src/tokenize.py +++ b/src/tokenize.py @@ -48,6 +48,7 @@ NEWLINE = enum.auto() # pragma: no mutate BOOL = enum.auto() # pragma: no mutate KEYWORD = enum.auto() # pragma: no mutate + SHEBANG = enum.auto() # pragma: no mutate # Represents a tokenizer token @@ -129,6 +130,8 @@ type = TokenType.BOOL elif t.value in keywords: type = TokenType.KEYWORD + elif t.value[0:2] == "#!": + type = TokenType.SHEBANG else: type = TokenType.UNKNOWN new = Token(t.value, t.location, type) diff --git a/tests/test_tokenize.py b/tests/test_tokenize.py index c7976d1..6639f9a 100644 --- a/tests/test_tokenize.py +++ b/tests/test_tokenize.py @@ -68,6 +68,7 @@ value = draw(text(alphabet=chars, min_size=1)) assume(value not in ["True", "False"]) assume(value not in keywords) + assume(value[0:2] != "#!") type = tokenize.TokenType.UNKNOWN return tokenize.Token(value, token.location, type) @@ -112,6 +113,15 @@ return tokenize.Token(value, token.location, type) +# Draws a shebang token +@composite +def draw_token_shebang(draw): + token = draw(draw_token_random()) + value = "#!" + draw(text()) + type = tokenize.TokenType.SHEBANG + return tokenize.Token(value, token.location, type) + + # Draws a classified token @composite def draw_token_classified(draw): @@ -121,6 +131,7 @@ draw_token_newline(), draw_token_bool(), draw_token_keyword(), + draw_token_shebang(), ] token = draw(one_of(strategies)) return token