diff --git a/src/tokenize.py b/src/tokenize.py index 1f35428..62a045b 100644 --- a/src/tokenize.py +++ b/src/tokenize.py @@ -28,13 +28,9 @@ ) -# Default locations for symbols -default_location = SymbolLocation(1, 1, "default-location.txt") # pragma: no mutate - - # Represents a tokenizer symbol class Symbol: - def __init__(self, value, location=default_location): + def __init__(self, value, location): self.value = value self.location = location @@ -55,19 +51,20 @@ symbols = [] current = input[0] curr_whitespace = is_whitespace(input[0]) + location = SymbolLocation(1, 1, "") for c in input[1:]: c_whitespace = is_whitespace(c) if c_whitespace != curr_whitespace: # Flush current buffer and switch modes - symbols.append(Symbol(current)) + symbols.append(Symbol(current, location)) current = c curr_whitespace = c_whitespace elif curr_whitespace: # Whitespace mode appends each character - symbols.append(Symbol(current)) + symbols.append(Symbol(current, location)) current = c else: # Symbol mode builds the current buffer current += c - symbols.append(Symbol(current)) + symbols.append(Symbol(current, location)) return symbols diff --git a/tests/test_tokenize.py b/tests/test_tokenize.py index f63258c..807d7c1 100644 --- a/tests/test_tokenize.py +++ b/tests/test_tokenize.py @@ -52,13 +52,16 @@ def draw_symbol_nonwhitespace(draw): chars = characters(blacklist_characters=whitespace) value = draw(text(alphabet=chars, min_size=1)) - return tokenize.Symbol(value) + location = tokenize.SymbolLocation(1, 1, "") + return tokenize.Symbol(value, location) # Draws a tokenizer whitespace symbol @composite def draw_symbol_whitespace(draw): - return tokenize.Symbol(draw(sampled_from(whitespace))) + value = draw(sampled_from(whitespace)) + location = tokenize.SymbolLocation(1, 1, "") + return tokenize.Symbol(value, location) # Generates an alternating sequence of symbols