Newer
Older
NewLang / tests / test_token.py
# SPDX-License-Identifier: LGPL-2.1-only
# Copyright 2022 Jookia <contact@jookia.org>

from hypothesis import given, assume
from hypothesis.strategies import (
    booleans,
    composite,
    integers,
    lists,
    one_of,
    sampled_from,
    text,
)

from src.token import Token, TokenLocation, TokenStream
from tests.templates import template_test_structure

# Keywords recognized by the language
keywords = [
    "Done",
    "Set",
    "To",
    "EndSet",
    "If",
    "Then",
    "Else",
    "EndIf",
    "StartNote",
    "EndNote",
    "StartText",
    "EndText",
]

# Literals recognized by the language
literals = [
    "True",
    "False",
]


# Draws a random token location
@composite
def draw_token_location(draw):
    line = draw(integers())
    offset = draw(integers())
    filename = draw(text())
    return TokenLocation(line, offset, filename)


# Test token location structure
@template_test_structure(
    TokenLocation,
    draw_token_location(),
    line=integers(),
    offset=integers(),
    file=text(),
)
def test_token_location_structure():
    pass


# Draws a token with a specific value but random location
@composite
def draw_token_by_value(draw, value):
    location = draw(draw_token_location())
    return Token(value, location)


# Draws an unknown token
@composite
def draw_token_unknown(draw):
    location = draw(draw_token_location())
    value = draw(text(min_size=1))
    assume(value not in literals)
    assume(value not in keywords)
    return Token(value, location)


# Draws a bool token
@composite
def draw_token_bool(draw):
    location = draw(draw_token_location())
    if draw(booleans()):
        value = "True"
    else:
        value = "False"
    return Token(value, location)


# Draws a keyword token
@composite
def draw_token_keyword(draw):
    location = draw(draw_token_location())
    value = draw(sampled_from(keywords))
    return Token(value, location)


# All strategies used to generate tokens
all_strategies = [
    draw_token_unknown(),
    draw_token_bool(),
    draw_token_keyword(),
]


# Draws a token and possibly add garbage
# This is to ensure that tokens must completely match a value
@composite
def draw_token_garbled(draw):
    token = draw(one_of(all_strategies))
    value = token.value
    if draw(booleans()):
        value = draw(text(min_size=1)) + value
    if draw(booleans()):
        value = value + draw(text(min_size=1))
    return Token(value, token.location)


# Draws a random token
@composite
def draw_token_random(draw):
    strategies = all_strategies + [draw_token_garbled()]
    token = draw(one_of(strategies))
    return token


# Test token structure
@template_test_structure(
    Token,
    draw_token_random(),
    value=text(),
    location=draw_token_location(),
)
def test_token_token_structure():
    pass


# Tests that a token stream pops items correctly
# We expect the following behaviour:
# - All items are popped in order
# - None is returned at the end of the stream
@given(lists(draw_token_random()))
def test_token_token_stream_pop(tokens):
    stream = TokenStream(tokens.copy())
    read = []
    token = stream.pop()
    while token is not None:
        read.append(token)
        token = stream.pop()
    assert read == tokens
    assert stream.pop() is None


# Tests that a token stream peeks items correctly
# We expect the following behaviour:
# - Peeking does not pop any values
# - None is returned at the end of the stream
@given(lists(draw_token_random()), integers(min_value=0, max_value=100))
def test_token_token_stream_peek(tokens, times):
    stream = TokenStream(tokens.copy())
    token_count = len(stream.tokens)
    if token_count == 0:
        real_times = times
        expected = None
    else:
        real_times = times % token_count
        expected = tokens[0]
    for _ in range(0, real_times):
        token = stream.peek()
        assert token == expected


# Tests that peeking and popping don't influence each other
# We expect the following behaviour:
# - Peeking does not influence the next pop call
# - Popping does not influence the next peep call
@given(lists(draw_token_random()))
def test_token_token_stream_mixed(tokens):
    stream = TokenStream(tokens.copy())
    read = []
    token = True
    while token is not None:
        peeked = stream.peek()
        token = stream.pop()
        read.append(token)
        assert peeked == token
    assert read[:-1] == tokens  # Skip None at end
    assert stream.pop() is None