Files
blackroad/roadc/tests/test_lexer.py
Alexa Amundson 78fbe80f2a Initial monorepo — everything BlackRoad in one place
bin/       230 CLI tools (ask-*, br-*, agent-*, roadid, carpool)
scripts/   99 automation scripts
fleet/     Node configs and deployment
workers/   Cloudflare Worker sources (roadpay, road-search, squad webhooks)
roadc/     RoadC programming language
roadnet/   Mesh network (5 APs, WireGuard)
operator/  Memory system scripts
config/    System configs
dotfiles/  Shell configs
docs/      Documentation

BlackRoad OS — Pave Tomorrow.

RoadChain-SHA2048: d1a24f55318d338b
RoadChain-Identity: alexa@sovereign
RoadChain-Full: d1a24f55318d338b24b60bad7be39286379c76ae5470817482100cb0ddbbcb97e147d07ac7243da0a9f0363e4e5c833d612b9c0df3a3cd20802465420278ef74875a5b77f55af6fe42a931b8b635b3d0d0b6bde9abf33dc42eea52bc03c951406d8cbe49f1a3d29b26a94dade05e9477f34a7d4d4c6ec4005c3c2ac54e73a68440c512c8e83fd9b1fe234750b898ef8f4032c23db173961fe225e67a0432b5293a9714f76c5c57ed5fdf35b9fb40fd73c03ebf88b7253c6a0575f5afb6a6b49b3bda310602fb1ef676859962dad2aebbb2875814b30eee0a8ba195e482d4cbc91d8819e7f38f6db53e8063401649c77bb994371473cabfb917fb53e8cbe73d60
2026-03-14 17:08:41 -05:00

87 lines
2.8 KiB
Python

"""Tests for the RoadC lexer."""
import pytest
from lexer import Lexer, TokenType
class TestLiterals:
def test_integer(self):
tokens = Lexer("42").tokenize()
assert any(t.type == TokenType.INTEGER and t.value == 42 for t in tokens)
def test_float(self):
tokens = Lexer("3.14").tokenize()
assert any(t.type == TokenType.FLOAT and t.value == 3.14 for t in tokens)
def test_string_double_quotes(self):
tokens = Lexer('"hello"').tokenize()
assert any(t.type == TokenType.STRING for t in tokens)
def test_string_single_quotes(self):
tokens = Lexer("'hello'").tokenize()
assert any(t.type == TokenType.STRING for t in tokens)
def test_boolean_true(self):
tokens = Lexer("true").tokenize()
assert any(t.type == TokenType.BOOLEAN and t.value is True for t in tokens)
def test_boolean_false(self):
tokens = Lexer("false").tokenize()
assert any(t.type == TokenType.BOOLEAN and t.value is False for t in tokens)
def test_color_literal(self):
tokens = Lexer("#FF1D6C").tokenize()
assert any(t.type == TokenType.COLOR for t in tokens)
class TestKeywords:
@pytest.mark.parametrize("keyword,token_type", [
("let", TokenType.LET),
("var", TokenType.VAR),
("const", TokenType.CONST),
("fun", TokenType.FUN),
("if", TokenType.IF),
("elif", TokenType.ELIF),
("else", TokenType.ELSE),
("while", TokenType.WHILE),
("for", TokenType.FOR),
("return", TokenType.RETURN),
("match", TokenType.MATCH),
])
def test_keyword_recognized(self, keyword, token_type):
tokens = Lexer(keyword).tokenize()
assert any(t.type == token_type for t in tokens)
def test_identifier(self):
tokens = Lexer("myVar").tokenize()
assert any(t.type == TokenType.IDENTIFIER and t.value == "myVar" for t in tokens)
class TestOperators:
@pytest.mark.parametrize("op", ["+", "-", "*", "/", "==", "!=", "<", ">", "<=", ">="])
def test_operator_tokenized(self, op):
tokens = Lexer(f"a {op} b").tokenize()
types = [t.type for t in tokens]
assert TokenType.IDENTIFIER in types
class TestIndentation:
def test_indent_dedent(self):
source = "if true:\n x = 1\n"
tokens = Lexer(source).tokenize()
types = [t.type for t in tokens]
assert TokenType.INDENT in types
def test_colon_before_block(self):
source = "fun main():\n pass\n"
tokens = Lexer(source).tokenize()
types = [t.type for t in tokens]
assert TokenType.COLON in types
class TestComments:
def test_comment_ignored(self):
tokens = Lexer("x = 1 # this is a comment").tokenize()
values = [t.value for t in tokens if t.type == TokenType.IDENTIFIER]
assert "comment" not in values