Make structure instantiation StructType.{args...}
instead of StructType{args...}
This commit is contained in:
parent
f3ed26f131
commit
e48d50f1e6
@ -130,26 +130,17 @@ def parse_primary(lexer: Lexer) -> Expression:
|
||||
else:
|
||||
base_expression = Variable(parse_identifier(lexer))
|
||||
|
||||
while (token := lexer.take_tokens(SymbolToken(Symbol.Open), SymbolToken(Symbol.OpenSquare), SymbolToken(Symbol.Dot), SymbolToken(Symbol.OpenCurly))):
|
||||
while (token := lexer.take_tokens(SymbolToken(Symbol.Open), SymbolToken(Symbol.OpenSquare), SymbolToken(Symbol.Dot))):
|
||||
match token.contents:
|
||||
case SymbolToken(symbol):
|
||||
match symbol:
|
||||
case Symbol.Dot:
|
||||
field = parse_identifier(lexer)
|
||||
next_token = lexer.next_token()
|
||||
match next_token.contents:
|
||||
case IdentifierToken(identifier=field):
|
||||
base_expression = FieldAccess(base_expression, field)
|
||||
case Symbol.Open:
|
||||
if lexer.take_token(SymbolToken(Symbol.Close)):
|
||||
base_expression = FunctionCall(base_expression, [])
|
||||
else:
|
||||
arguments: List[Expression] = [parse_expression(lexer)]
|
||||
while lexer.take_token(SymbolToken(Symbol.Comma)):
|
||||
arguments.append(parse_expression(lexer))
|
||||
lexer.assert_token(SymbolToken(Symbol.Close))
|
||||
base_expression = FunctionCall(base_expression, arguments)
|
||||
case Symbol.OpenSquare:
|
||||
index = parse_expression(lexer)
|
||||
lexer.assert_token(SymbolToken(Symbol.CloseSquare))
|
||||
base_expression = ArrayAccess(base_expression, index)
|
||||
case SymbolToken(symbol=symbol):
|
||||
match symbol:
|
||||
case Symbol.OpenCurly:
|
||||
if lexer.take_token(SymbolToken(Symbol.CloseCurly)):
|
||||
base_expression = StructInstantiation(base_expression, [])
|
||||
@ -163,6 +154,23 @@ def parse_primary(lexer: Lexer) -> Expression:
|
||||
while lexer.take_token(SymbolToken(Symbol.Comma)): struct_arguments.append(parse_argument())
|
||||
lexer.assert_token(SymbolToken(Symbol.CloseCurly))
|
||||
base_expression = StructInstantiation(base_expression, struct_arguments)
|
||||
case _:
|
||||
raise SyntaxError(f"{next_token.loc}: Unexpected symbol: {repr(str(symbol))}")
|
||||
case _:
|
||||
raise SyntaxError(f"{next_token.loc}: Unexpected: {next_token.contents}")
|
||||
case Symbol.Open:
|
||||
if lexer.take_token(SymbolToken(Symbol.Close)):
|
||||
base_expression = FunctionCall(base_expression, [])
|
||||
else:
|
||||
arguments: List[Expression] = [parse_expression(lexer)]
|
||||
while lexer.take_token(SymbolToken(Symbol.Comma)):
|
||||
arguments.append(parse_expression(lexer))
|
||||
lexer.assert_token(SymbolToken(Symbol.Close))
|
||||
base_expression = FunctionCall(base_expression, arguments)
|
||||
case Symbol.OpenSquare:
|
||||
index = parse_expression(lexer)
|
||||
lexer.assert_token(SymbolToken(Symbol.CloseSquare))
|
||||
base_expression = ArrayAccess(base_expression, index)
|
||||
case _: assert False, ("Unimplemented", symbol)
|
||||
case _: assert False, ("Unimplemented", token)
|
||||
|
||||
@ -301,7 +309,6 @@ def parse_statement(lexer: Lexer) -> Statement:
|
||||
return DoWhileStatement(body, condition)
|
||||
elif lexer.take_token(KeywordToken(Keyword.Match)):
|
||||
value = parse_expression(lexer)
|
||||
lexer.assert_token(KeywordToken(Keyword.In)) # to prevent it from parsing it as a struct instantiation
|
||||
lexer.assert_token(SymbolToken(Symbol.OpenCurly))
|
||||
|
||||
cases: List[Tuple[Expression, Statement]] = []
|
||||
|
@ -62,6 +62,8 @@ class Symbol(Enum):
|
||||
Tilde = '~'
|
||||
Carot = '^'
|
||||
|
||||
def __str__(self) -> str: return self._value_
|
||||
|
||||
@dataclass
|
||||
class KeywordToken:
|
||||
keyword: Keyword
|
||||
|
Loading…
Reference in New Issue
Block a user