diff --git a/parse.py b/parse.py index b6b2981..96ef191 100644 --- a/parse.py +++ b/parse.py @@ -5,12 +5,13 @@ import ast_types class ParseLocation: - def __init__(self, line, column): + def __init__(self, line, column, file): self.line = line self.column = column + self.file = file def __repr__(self): - return "ParseLocation(line %i, column %i)" % (self.line, self.column) + return "ParseLocation(line %i, column %i, file '%s')" % (self.line, self.column, self.file) class ParseContext: def __init__(self, parent, context, location): @@ -42,11 +43,12 @@ return symbol == " " or symbol == "\t" or symbol == "\n" class Tokenizer: - def __init__(self, input): + def __init__(self, input, filename): self.code = input self.pos = 0 self.pos_line = 1 self.pos_column = 1 + self.filename = filename def next(self): if self.pos >= len(self.code): @@ -84,7 +86,7 @@ def skip_note(self, line, column): log.log(log.LEXER, log.TRACE, "Skipping tokens until EndNote") - context = ParseContext(None, "reading note", ParseLocation(line, column)) + context = ParseContext(None, "reading note", ParseLocation(line, column, self.filename)) (token, _, _) = self.read_token() while token and token != "EndNote": (token, _, _) = self.read_token() @@ -93,7 +95,7 @@ def read_text(self, line, column): log.log(log.LEXER, log.TRACE, "Reading characters until EndText") - context = ParseContext(None, "reading text", ParseLocation(line, column)) + context = ParseContext(None, "reading text", ParseLocation(line, column, self.filename)) start = self.pos (token, _, _) = self.read_token() while token and token != "EndText": @@ -130,12 +132,12 @@ else: type = "symbol" value = token - tok = Token(type, value, ParseLocation(line, column)) + tok = Token(type, value, ParseLocation(line, column, self.filename)) log.log(log.LEXER, log.DEBUG, "Appending %s" % (tok)) tokens.append(tok) (token, line, column) = self.read_token() log.log(log.LEXER, log.TRACE, "Done tokenizing, adding EOF") - tokens.append(Token("EOF", None, ParseLocation(self.pos_line, self.pos_column))) + tokens.append(Token("EOF", None, ParseLocation(self.pos_line, self.pos_column, self.filename))) log.log(log.LEXER, log.DEBUG, "Tokens are %s" % (tokens)) return tokens @@ -303,7 +305,7 @@ def parse_file(filename): code = open(filename).read() try: - tokenizer = Tokenizer(code) + tokenizer = Tokenizer(code, filename) tokens = tokenizer.tokenize() parser = Parser(tokens) return parser.parse_file()