Newer
Older
NewLang / parse.py
# SPDX-License-Identifier: MIT
# Copyright 2021 Jookia <contact@jookia.org>

import log

class Token:
    def __init__(self, type, value, line, column):
        self.type = type
        self.value = value
        self.line = line
        self.column = column

    def __repr__(self):
        return "Token(type %s, value '%s', line %i column %i)" % (self.type, self.value, self.line, self.column)

def is_whitespace(symbol):
    return symbol == " " or symbol == "\t" or symbol == "\n"

class Tokenizer:
    def __init__(self, input):
        self.code = input
        self.pos = 0
        self.pos_line = 1
        self.pos_column = 1

    def next(self):
        if self.pos >= len(self.code):
            log.log(log.LEXER, log.TRACE, "Reached end of file")
            return None
        else:
            symbol = self.code[self.pos]
            pos_string = "line %i column %i" % (self.pos_line, self.pos_column)
            if symbol == '\n':
                log.log(log.LEXER, log.TRACE, "Read new line at %s" % (pos_string))
                self.pos_line += 1
                self.pos_column = 1
            else:
                log.log(log.LEXER, log.TRACE, "Read character %s at %s" % (symbol, pos_string))
                self.pos_column += 1
            self.pos += 1
            return symbol

    def read_token(self):
        token = ""
        symbol = self.next()
        while is_whitespace(symbol):
            log.log(log.LEXER, log.TRACE, "Skipping whitespace")
            symbol = self.next()
        if not symbol:
            log.log(log.LEXER, log.TRACE, "No token to read")
            return (None, self.pos_line, self.pos_column)
        line = self.pos_line
        column = self.pos_column - 1 # pos_column is one ahead
        while not is_whitespace(symbol):
            token += symbol
            symbol = self.next()
        log.log(log.LEXER, log.TRACE, "Read token '%s' at line %i column %i" % (token, line, column))
        return (token, line, column)

    def skip_note(self):
        log.log(log.LEXER, log.TRACE, "Skipping tokens until EndNote")
        (token, _, _) = self.read_token()
        while token and token != "EndNote":
            (token, _, _) = self.read_token()

    def read_text(self):
        log.log(log.LEXER, log.TRACE, "Reading characters until EndText")
        start = self.pos
        (token, _, _) = self.read_token()
        while token and token != "EndText":
            (token, _, _) = self.read_token()
        if not token:
            return self.code[start:self.pos]
        else:
            return self.code[start:self.pos - len(" EndText") - 1]

    def skip_shebang(self):
        log.log(log.LEXER, log.TRACE, "Skipping shebang")
        next_line = self.code.find('\n') + 1
        self.code = self.code[next_line:]
        self.pos_line = 2

    def tokenize(self):
        if self.code[0:2] == '#!':
            self.skip_shebang()
        keywords = ["NewLang", "Done", "Set", "To", "EndSet",
                    "If", "Then", "Else", "EndIf"]
        tokens = []
        (token, line, column) = self.read_token()
        while token:
            if token == "BeginNote":
                self.skip_note()
                (token, line, column) = self.read_token()
                continue
            elif token == "BeginText":
                type = "text"
                value = self.read_text()
            elif token in keywords:
                type = "keyword"
                value = token
            else:
                type = "symbol"
                value = token
            tok = Token(type, value, line, column)
            log.log(log.LEXER, log.DEBUG, "Appending %s" % (tok))
            tokens.append(tok)
            (token, line, column) = self.read_token()
        log.log(log.LEXER, log.TRACE, "Done tokenizing, adding EOF")
        tokens.append(Token("EOF", None, self.pos_line, self.pos_column))
        log.log(log.LEXER, log.DEBUG, "Tokens are %s" % (tokens))
        return tokens

class Reference:
    def __init__(self, value):
        self.value = value

    def __repr__(self):
        return "Reference('%s')" % (self.value)

class Text:
    def __init__(self, value):
        self.value = value

    def __repr__(self):
        return "Text('%s')" % (self.value)

class Statement:
    def __init__(self, subject, verb, arguments):
        self.subject = subject
        self.verb = verb
        self.arguments = arguments

    def __repr__(self):
        return "Statement(subject %s, verb '%s', arguments %s)" % (self.subject, self.verb, self.arguments)

class Set:
    def __init__(self, subject, statement):
        self.subject = subject
        self.statement = statement

    def __repr__(self):
        return "Set(subject %s, statement %s)" % (self.subject, self.statement)

class Conditional:
    def __init__(self, test, success, failure):
        self.test = test
        self.success = success
        self.failure = failure

    def __repr__(self):
        return "Conditional(test %s, success %s, failure %s)" % (self.test, self.success, self.failure)

class ParseContext:
    def __init__(self, parent, context, token):
        self.parent = parent
        self.context = context
        self.line = token.line
        self.column = token.column

    def __repr__(self):
        return "ParseContext(parent %s, context '%s', line %i, column %i)" % (self.parent, self.context, self.line, self.column)

class ParseError(BaseException):
    def __init__(self, context, error):
        self.context = context
        self.error = error

    def __repr__(self):
        return "ParseError(context %s, error '%s')" % (self.context, self.error)

class Parser:
    def __init__(self, tokens):
        self.tokens = tokens
        self.pos = 0

    def next(self):
        token = self.tokens[self.pos]
        if self.pos < (len(self.tokens) - 1):
            self.pos += 1
        log.log(log.PARSER, log.TRACE, "Read %s" % (token))
        return token

    def peek(self):
        token = self.tokens[self.pos]
        log.log(log.PARSER, log.TRACE, "Peeked %s" % (token))
        return token

    def eof(self):
        return self.tokens[self.pos].type == "EOF"

    def create_context(self, context, text):
        return ParseContext(context, text, self.tokens[self.pos])

    def create_error(self, context, text):
        return ParseError(context, text)

    def parse_version(self, context):
        log.log(log.PARSER, log.TRACE, "Parsing version identifier...")
        context = self.create_context(context, "parsing version identifier")
        token = self.next()
        if token.type != "keyword" or token.value != "NewLang":
            raise self.create_error(context, "Expected 'NewLang' keyword, got '%s'" % (token.value))
        token = self.next()
        version = token.value
        if version != "0":
            raise self.create_error(context, "Unknown version '%s'" % (version))
        log.log(log.PARSER, log.DEBUG, "Parsed version %s" % (version))
        return version

    def parse_value(self, context, subject, type, value):
        log.log(log.PARSER, log.TRACE, "Parsing value...")
        if type == "symbol":
            ret = Reference(value)
        elif type == "text":
            ret = Text(value)
        else:
            raise self.create_error(context, "Unexpected token type %s" % (type))
        log.log(log.PARSER, log.TRACE, "Parsed value, AST is %s" % (ret))
        return ret

    def parse_arguments(self, context, terminator):
        log.log(log.PARSER, log.TRACE, "Parsing arguments until '%s'..." % (terminator))
        context = self.create_context(context, "parsing statement arguments")
        args = []
        arg_num = 1
        while True:
            log.log(log.PARSER, log.TRACE, "Parsing argument %i..." % (arg_num))
            arg_context = self.create_context(context, "parsing argument %i" % (arg_num))
            token = self.next()
            arg_num += 1
            if token.type == "keyword":
                if token.value == terminator:
                    log.log(log.PARSER, log.TRACE, "Parsed arguments, AST is %s" % (args))
                    return args
                else:
                    raise self.create_error(meta_context, "Expected %s, got %s" % (terminator, token.value))
            else:
                arg = self.parse_value(arg_context, "argument", token.type, token.value)
                log.log(log.PARSER, log.TRACE, "Parsed argument %s" % (arg))
                args.append(arg)

    def parse_statement(self, context, terminator, type):
        log.log(log.PARSER, log.TRACE, "Parsing %s statement until '%s'..." % (type, terminator))
        meta_context = self.create_context(context, "parsing %s statement" % (type))
        log.log(log.PARSER, log.TRACE, "Parsing statement subject")
        context = self.create_context(meta_context, "parsing subject")
        token = self.next()
        subject = self.parse_value(context, "subject", token.type, token.value)
        log.log(log.PARSER, log.TRACE, "Parsing statement verb...")
        context = self.create_context(meta_context, "parsing statement verb")
        token = self.next()
        if token.type == "keyword":
            if token.value == terminator:
                verb = None
            else:
                raise self.create_error(meta_context, "Expected %s, got %s" % (terminator, token.value))
        elif token.type == "symbol":
            verb = token.value
        else:
            raise self.create_error(context, "Expected symbol, got %s" % (token.type))
        log.log(log.PARSER, log.TRACE, "Parsing statement arguments...")
        if verb:
            arguments = self.parse_arguments(meta_context, terminator)
        else:
            arguments = []
        statement = Statement(subject, verb, arguments)
        log.log(log.PARSER, log.DEBUG, "Parsed statement, AST is %s" % (statement))
        return statement

    def parse_set(self, context):
        log.log(log.PARSER, log.TRACE, "Parsing set subject...")
        meta_context = self.create_context(context, "parsing set directive")
        self.next() # Skip 'Set'
        context = self.create_context(meta_context, "parsing subject")
        token = self.next()
        if token.type != "symbol":
            raise self.create_error(context, "Expected symbol, got %s" % (token.type))
        subject = token.value
        context = self.create_context(meta_context, "parsing set separator")
        token = self.next()
        if token.type != "keyword" or token.value != "To":
            pretty_value = token.value
            if token.type != "keyword":
                pretty_value = "'%s'" % (pretty_value)
            raise self.create_error(context, "Expected To, got %s" % (pretty_value))
        log.log(log.PARSER, log.TRACE, "Parsing set value...")
        ast = self.parse_statement(meta_context, "EndSet", "set value")
        set = Set(subject, ast)
        log.log(log.PARSER, log.DEBUG, "Parsed set, AST is %s" % (set))
        return set

    def parse_if(self, context):
        log.log(log.PARSER, log.TRACE, "Parsing if test condition...")
        context = self.create_context(context, "parsing if directive")
        self.next() # Skip 'If'
        test = self.parse_statement(context, "Then", "test condition")
        log.log(log.PARSER, log.TRACE, "Parsing if success statement...")
        success = self.parse_statement(context, "Else", "success")
        log.log(log.PARSER, log.TRACE, "Parsing if failure statement...")
        failure = self.parse_statement(context, "EndIf", "failure")
        conditional = Conditional(test, success, failure)
        log.log(log.PARSER, log.DEBUG, "Parsed if, AST is %s" % (conditional))
        return conditional

    def parse_directive(self, context):
        token = self.peek()
        if token.type != "keyword" and token.type != "symbol":
            raise self.create_error(context, "Expected keyword or symbol, got %s" % (token.type))
        if token.type == "keyword":
            if token.value == "Set":
                return self.parse_set(context)
            elif token.value == "If":
                return self.parse_if(context)
            else:
                raise self.create_error(context, "Unexpected keyword %s" % (token.value))
        else:
            ast = self.parse_statement(context, "Done", "command")
            return ast

    def parse_file(self):
        log.log(log.PARSER, log.TRACE, "Parsing file...")
        ast = []
        version = self.parse_version(None)
        while not self.eof():
            log.log(log.PARSER, log.TRACE, "Parsing next directive in file...")
            ast.append(self.parse_directive(None))
        log.log(log.PARSER, log.DEBUG, "Parsed file, AST is %s" % (ast))
        return ast

def parse_file(filename):
    code = open(filename).read()
    tokenizer = Tokenizer(code)
    tokens = tokenizer.tokenize()
    try:
        parser = Parser(tokens)
        return parser.parse_file()
    except ParseError as e:
        print("Parse error: %s" % (e.error))
        context = e.context
        while context:
            print("While %s at line %i column %i" % (context.context, context.line, context.column))
            context = context.parent
        print("While parsing file %s" % (filename))
        return None