Add parsing for defers

This commit is contained in:
germax26 2024-08-11 12:59:19 +10:00
parent 659c79373d
commit 2cee127d8f
Signed by: germax26
SSH Key Fingerprint: SHA256:N3w+8798IMWBt7SYH8G1C0iJlIa2HIIcRCXwILT5FvM
4 changed files with 24 additions and 14 deletions

View File

@ -543,3 +543,7 @@ class Import(Statement):
class TypeDefinition(Statement): class TypeDefinition(Statement):
name: str name: str
expression: TypeExpression expression: TypeExpression
@dataclass
class DeferStatement(Statement):
statement: Statement

View File

@ -656,6 +656,8 @@ def interpret_statements(statements: List_[Statement], program: ProgramState) ->
program.modules[file.str] = module program.modules[file.str] = module
case TypeDefinition(name, expression_): case TypeDefinition(name, expression_):
program.declare_and_assign_variable(name, TypeObject(calculate_type_expression(expression_, program))) program.declare_and_assign_variable(name, TypeObject(calculate_type_expression(expression_, program)))
case DeferStatement(statement=statement):
assert False, "TODO: Defers are not implemented"
case _: case _:
assert False, ("Unimplemented", statement) assert False, ("Unimplemented", statement)
return NothingResult() return NothingResult()

View File

@ -358,6 +358,9 @@ def parse_statement(lexer: Lexer) -> Statement:
type_expression = parse_type(lexer) type_expression = parse_type(lexer)
lexer.assert_token(SymbolToken(Symbol.Semicolon)) lexer.assert_token(SymbolToken(Symbol.Semicolon))
return TypeDefinition(name, type_expression) return TypeDefinition(name, type_expression)
elif lexer.take_token(KeywordToken(Keyword.Defer)):
statement = parse_statement(lexer)
return DeferStatement(statement)
elif lexer.check_tokenkind(KeywordToken) and not lexer.check_tokens(KeywordToken(Keyword.Return), KeywordToken(Keyword.Lambda)): elif lexer.check_tokenkind(KeywordToken) and not lexer.check_tokens(KeywordToken(Keyword.Return), KeywordToken(Keyword.Lambda)):
assert False, ("Unimplemented", lexer.next_token(), lexer.next_token(), lexer.next_token()) assert False, ("Unimplemented", lexer.next_token(), lexer.next_token(), lexer.next_token())
elif lexer.take_token(SymbolToken(Symbol.OpenCurly)): elif lexer.take_token(SymbolToken(Symbol.OpenCurly)):

View File

@ -23,6 +23,7 @@ class Keyword(Enum):
Lambda = 'lambda' Lambda = 'lambda'
Import = 'import' Import = 'import'
Type = 'type' Type = 'type'
Defer = 'defer'
class Symbol(Enum): class Symbol(Enum):
Open = '(' Open = '('