1365 lines
49 KiB
C
1365 lines
49 KiB
C
#include "parser.h"
|
||
#include "ast_printer.h"
|
||
#include <stdlib.h>
|
||
#include <stdio.h>
|
||
#include <string.h>
|
||
|
||
char* token_type_to_string(TokenType type) {
|
||
switch (type) {
|
||
case TOKEN_EOF: return "eof";
|
||
case TOKEN_IDENTIFIER: return "identifier";
|
||
case TOKEN_INT: return "int";
|
||
case TOKEN_FLOAT: return "float";
|
||
case TOKEN_STRING: return "string";
|
||
case TOKEN_KEYWORD: return "keyword";
|
||
case TOKEN_ASSIGN: return "=";
|
||
case TOKEN_PLUS: return "+";
|
||
case TOKEN_MINUS: return "-";
|
||
case TOKEN_MUL: return "*";
|
||
case TOKEN_DIV: return "/";
|
||
case TOKEN_MOD: return "%";
|
||
case TOKEN_PLUS_PLUS: return "++";
|
||
case TOKEN_MINUS_MINUS: return "--";
|
||
case TOKEN_LPAREN: return "(";
|
||
case TOKEN_RPAREN: return ")";
|
||
case TOKEN_LBRACE: return "{";
|
||
case TOKEN_RBRACE: return "}";
|
||
case TOKEN_LBRACKET: return "[";
|
||
case TOKEN_RBRACKET: return "]";
|
||
case TOKEN_COMMA: return ",";
|
||
case TOKEN_SEMICOLON: return ";";
|
||
case TOKEN_COLON: return ":";
|
||
case TOKEN_DOT: return ".";
|
||
case TOKEN_LT: return "<";
|
||
case TOKEN_GT: return ">";
|
||
case TOKEN_LE: return "<=";
|
||
case TOKEN_GE: return ">=";
|
||
case TOKEN_EQ: return "=";
|
||
case TOKEN_EQ_EQ: return "==";
|
||
case TOKEN_NE: return "!=";
|
||
case TOKEN_AND: return "and";
|
||
case TOKEN_OR: return "or";
|
||
case TOKEN_NOT: return "not";
|
||
case TOKEN_BITAND: return "&";
|
||
case TOKEN_BITOR: return "|";
|
||
case TOKEN_BITXOR: return "^";
|
||
case TOKEN_TILDE: return "~";
|
||
case TOKEN_SHL: return "<<";
|
||
case TOKEN_SHR: return ">>";
|
||
case TOKEN_IF: return "if";
|
||
case TOKEN_ELSE: return "else";
|
||
case TOKEN_WHILE: return "while";
|
||
case TOKEN_FOR: return "for";
|
||
case TOKEN_RETURN: return "return";
|
||
case TOKEN_BREAK: return "break";
|
||
case TOKEN_CONTINUE: return "continue";
|
||
case TOKEN_LET: return "let";
|
||
case TOKEN_CONST: return "const";
|
||
case TOKEN_TRUE: return "true";
|
||
case TOKEN_FALSE: return "false";
|
||
case TOKEN_NULL: return "null";
|
||
case TOKEN_IMPORT: return "import";
|
||
case TOKEN_AS: return "as";
|
||
case TOKEN_IN: return "in";
|
||
case TOKEN_NATIVE: return "native";
|
||
case TOKEN_FUNC: return "func";
|
||
case TOKEN_BOOL: return "bool";
|
||
case TOKEN_MAP: return "map";
|
||
case TOKEN_ARRAY: return "array";
|
||
case TOKEN_INT_LITERAL: return "int_literal";
|
||
case TOKEN_FLOAT_LITERAL: return "float_literal";
|
||
case TOKEN_STRING_LITERAL: return "string_literal";
|
||
case TOKEN_BOOL_LITERAL: return "bool_literal";
|
||
case TOKEN_STAR: return "*";
|
||
case TOKEN_SLASH: return "/";
|
||
case TOKEN_BANG: return "!";
|
||
case TOKEN_AT: return "@";
|
||
case TOKEN_THROW: return "throw";
|
||
case TOKEN_ERROR: return "error";
|
||
default: return "unknown" ;
|
||
}
|
||
}
|
||
|
||
void parser_init(Parser* parser, Lexer* lexer, char* filename)
|
||
{
|
||
parser->lexer = lexer;
|
||
parser->filename = filename;
|
||
parser->had_error = 0;
|
||
parser->error_count = 0;
|
||
parser->errors = NULL;
|
||
parser->current_token = lexer_next_token(lexer);
|
||
}
|
||
|
||
void parser_free(Parser* parser) {
|
||
free(parser->filename);
|
||
|
||
// 释放错误信息数组
|
||
if (parser->errors) {
|
||
for (int i = 0; i < parser->error_count; i++) {
|
||
free(parser->errors[i].message);
|
||
}
|
||
free(parser->errors);
|
||
}
|
||
}
|
||
|
||
Token consume_token(Parser* parser) {
|
||
Token previous_token = parser->current_token;
|
||
parser->current_token = lexer_next_token(parser->lexer);
|
||
return previous_token;
|
||
}
|
||
|
||
int match_token(Parser* parser, TokenType type) {
|
||
return parser->current_token.type == type;
|
||
}
|
||
|
||
Token expect_token(Parser* parser, TokenType type) {
|
||
if (!match_token(parser, type)) {
|
||
char message[256];
|
||
snprintf(message, sizeof(message), "Expected '%s', got '%s'",
|
||
token_type_to_string(type),
|
||
token_type_to_string(parser->current_token.type));
|
||
parser_error_at_current(parser, message);
|
||
}
|
||
return consume_token(parser);
|
||
}
|
||
|
||
|
||
// 解析函数
|
||
/*
|
||
* func func_name(p: int): int {}
|
||
*/
|
||
|
||
ASTNode* parse_function_declaration(Parser* parser) {
|
||
ASTNode* node = create_node(NODE_FUNC_DECL);
|
||
set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
|
||
// 处理native修饰符
|
||
if (match_token(parser, TOKEN_NATIVE)) {
|
||
consume_token(parser);
|
||
node->value = strdup("native");
|
||
} else {
|
||
node->value = NULL;
|
||
}
|
||
|
||
// 解析func关键字
|
||
expect_token(parser, TOKEN_FUNC);
|
||
|
||
// 解析函数名
|
||
Token identifier = consume_token(parser);
|
||
ASTNode* id_node = create_node(NODE_IDENTIFIER);
|
||
set_node_position(id_node, identifier.line, identifier.column);
|
||
id_node->value = strdup(identifier.value);
|
||
add_child(node, id_node);
|
||
|
||
// 解析参数列表
|
||
expect_token(parser, TOKEN_LPAREN);
|
||
ASTNode* params_node = create_node(NODE_PARAM_LIST);
|
||
set_node_position(params_node, parser->lexer->line, parser->lexer->column);
|
||
while (!match_token(parser, TOKEN_RPAREN)) {
|
||
// 创建单个参数节点
|
||
ASTNode* param_node = create_node(NODE_PARAM);
|
||
set_node_position(param_node, parser->lexer->line, parser->lexer->column);
|
||
|
||
// 解析参数名
|
||
Token param = consume_token(parser);
|
||
ASTNode* id_node = create_node(NODE_IDENTIFIER);
|
||
set_node_position(id_node, param.line, param.column);
|
||
id_node->value = strdup(param.value);
|
||
add_child(param_node, id_node);
|
||
|
||
// 处理类型
|
||
if (match_token(parser, TOKEN_COLON)) {
|
||
consume_token(parser);
|
||
Token type = consume_token(parser);
|
||
ASTNode* type_node = create_node(NODE_TYPE_IDENTIFIER);
|
||
type_node->value = strdup(type.value);
|
||
set_node_position(type_node, type.line, type.column);
|
||
|
||
// 处理数组类型
|
||
while (match_token(parser, TOKEN_LBRACKET)) {
|
||
consume_token(parser); // 消费左中括号
|
||
type_node->type = NODE_ARRAY_TYPE;
|
||
|
||
// 解析数组大小(如果有)
|
||
if (!match_token(parser, TOKEN_RBRACKET)) {
|
||
Token size_token = consume_token(parser);
|
||
ASTNode* size_node = create_node(NODE_INT_LITERAL);
|
||
size_node->value = strdup(size_token.value);
|
||
set_node_position(size_node, size_token.line, size_token.column);
|
||
add_child(type_node, size_node);
|
||
}
|
||
|
||
expect_token(parser, TOKEN_RBRACKET);
|
||
}
|
||
|
||
add_child(param_node, type_node);
|
||
}
|
||
|
||
// 将参数节点添加到参数列表
|
||
add_child(params_node, param_node);
|
||
|
||
// 处理逗号
|
||
if (match_token(parser, TOKEN_COMMA)) {
|
||
consume_token(parser);
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
expect_token(parser, TOKEN_RPAREN);
|
||
add_child(node, params_node);
|
||
|
||
// 处理返回值类型
|
||
if (match_token(parser, TOKEN_COLON)) {
|
||
consume_token(parser);
|
||
Token return_type = consume_token(parser);
|
||
ASTNode* return_type_node = create_node(NODE_TYPE_IDENTIFIER);
|
||
return_type_node->value = strdup(return_type.value);
|
||
set_node_position(return_type_node, return_type.line, return_type.column);
|
||
|
||
// 处理数组类型
|
||
while (match_token(parser, TOKEN_LBRACKET)) {
|
||
consume_token(parser); // 消费左中括号
|
||
return_type_node->type = NODE_ARRAY_TYPE; // 标记为数组类型
|
||
|
||
// 解析数组大小(如果有)
|
||
if (!match_token(parser, TOKEN_RBRACKET)) {
|
||
Token size_token = consume_token(parser);
|
||
ASTNode* size_node = create_node(NODE_INT_LITERAL);
|
||
size_node->value = strdup(size_token.value);
|
||
set_node_position(size_node, size_token.line, size_token.column);
|
||
add_child(return_type_node, size_node);
|
||
}
|
||
|
||
expect_token(parser, TOKEN_RBRACKET);
|
||
}
|
||
|
||
add_child(node, return_type_node);
|
||
}
|
||
|
||
// 处理分号或函数体
|
||
if (match_token(parser, TOKEN_SEMICOLON)) {
|
||
consume_token(parser);
|
||
} else {
|
||
// 解析函数体
|
||
ASTNode* body = parse_block(parser);
|
||
if (body != NULL) {
|
||
add_child(node, body);
|
||
}
|
||
}
|
||
|
||
return node;
|
||
}
|
||
// 解析变量申明
|
||
// let/const var: int/空 = exp
|
||
ASTNode* parse_var_declaration(Parser* parser) {
|
||
// 记录声明类型(let/const)
|
||
char* decl_type;
|
||
if (match_token(parser, TOKEN_LET)) {
|
||
consume_token(parser);
|
||
decl_type = strdup("let");
|
||
} else if (match_token(parser, TOKEN_CONST)) {
|
||
consume_token(parser);
|
||
decl_type = strdup("const");
|
||
} else {
|
||
parser_error_at_current(parser, "Expected let or const");
|
||
return NULL;
|
||
}
|
||
|
||
// 创建第一个变量声明节点
|
||
ASTNode* first_var = create_node(NODE_VAR_DECL);
|
||
set_node_position(first_var, parser->lexer->line, parser->lexer->column);
|
||
first_var->value = decl_type;
|
||
|
||
// 获取标识符
|
||
Token identifier = consume_token(parser);
|
||
ASTNode* id_node = create_node(NODE_IDENTIFIER);
|
||
set_node_position(id_node, identifier.line, identifier.column);
|
||
id_node->value = strdup(identifier.value);
|
||
add_child(first_var, id_node);
|
||
|
||
// 处理类型
|
||
ASTNode* type_node = NULL;
|
||
if (match_token(parser, TOKEN_COLON)) {
|
||
consume_token(parser); // 消费冒号
|
||
// 解析类型标识符
|
||
Token type = consume_token(parser);
|
||
type_node = create_node(NODE_TYPE_IDENTIFIER);
|
||
type_node->value = strdup(type.value);
|
||
set_node_position(type_node, type.line, type.column);
|
||
|
||
// 处理数组类型
|
||
while (match_token(parser, TOKEN_LBRACKET)) {
|
||
consume_token(parser); // 消费左中括号
|
||
type_node->type = NODE_ARRAY_TYPE; // 标记为数组类型
|
||
|
||
// 解析数组大小(如果有)
|
||
if (!match_token(parser, TOKEN_RBRACKET)) {
|
||
Token size_token = consume_token(parser);
|
||
ASTNode* size_node = create_node(NODE_INT_LITERAL);
|
||
size_node->value = strdup(size_token.value);
|
||
set_node_position(size_node, size_token.line, size_token.column);
|
||
add_child(type_node, size_node);
|
||
}
|
||
|
||
expect_token(parser, TOKEN_RBRACKET);
|
||
}
|
||
|
||
// 处理泛型类型参数
|
||
if (match_token(parser, TOKEN_LT)) {
|
||
consume_token(parser); // 消费 <
|
||
ASTNode* generic_params = create_node(NODE_GENERIC_PARAMS);
|
||
set_node_position(generic_params, parser->lexer->line, parser->lexer->column);
|
||
|
||
do {
|
||
Token param = consume_token(parser);
|
||
ASTNode* param_node = create_node(NODE_TYPE_IDENTIFIER);
|
||
param_node->value = strdup(param.value);
|
||
set_node_position(param_node, param.line, param.column);
|
||
add_child(generic_params, param_node);
|
||
|
||
if (match_token(parser, TOKEN_COMMA)) {
|
||
consume_token(parser);
|
||
}
|
||
} while (!match_token(parser, TOKEN_GT));
|
||
|
||
expect_token(parser, TOKEN_GT);
|
||
add_child(type_node, generic_params);
|
||
}
|
||
|
||
add_child(first_var, type_node);
|
||
}
|
||
|
||
// 处理值
|
||
if (match_token(parser, TOKEN_EQ)) {
|
||
consume_token(parser); // 消费等号
|
||
|
||
// 解析初始化表达式
|
||
ASTNode* init_expr = parse_expression(parser);
|
||
if (init_expr != NULL) {
|
||
add_child(first_var, init_expr);
|
||
|
||
// 如果没有显式类型注解,根据初始化表达式推断类型
|
||
if (type_node == NULL) {
|
||
type_node = create_node(NODE_TYPE_IDENTIFIER);
|
||
set_node_position(type_node, init_expr->line, init_expr->column);
|
||
|
||
// 根据表达式类型推断变量类型
|
||
switch (init_expr->type) {
|
||
case NODE_INT_LITERAL:
|
||
type_node->value = strdup("i32"); // 默认整数类型为i32
|
||
break;
|
||
case NODE_FLOAT_LITERAL:
|
||
type_node->value = strdup("f64"); // 默认浮点类型为f64
|
||
break;
|
||
case NODE_STRING_LITERAL:
|
||
type_node->value = strdup("string");
|
||
break;
|
||
case NODE_BOOL_LITERAL:
|
||
type_node->value = strdup("bool");
|
||
break;
|
||
case NODE_ARRAY_LITERAL:
|
||
type_node->value = strdup("array");
|
||
break;
|
||
case NODE_MAP_LITERAL:
|
||
type_node->value = strdup("map");
|
||
break;
|
||
default:
|
||
type_node->value = strdup("any");
|
||
}
|
||
add_child(first_var, type_node);
|
||
}
|
||
}
|
||
}
|
||
|
||
// 检查是否有更多变量声明
|
||
if (!match_token(parser, TOKEN_COMMA)) {
|
||
// 单变量声明,直接返回
|
||
if (match_token(parser, TOKEN_SEMICOLON)) {
|
||
consume_token(parser);
|
||
}
|
||
return first_var;
|
||
}
|
||
|
||
// 多变量声明,创建列表节点
|
||
ASTNode* list_node = create_node(NODE_VAR_DECL_LIST);
|
||
set_node_position(list_node, parser->lexer->line, parser->lexer->column);
|
||
list_node->value = decl_type;
|
||
add_child(list_node, first_var);
|
||
|
||
// 处理后续变量
|
||
while (match_token(parser, TOKEN_COMMA)) {
|
||
consume_token(parser); // 消费逗号
|
||
|
||
ASTNode* var_node = create_node(NODE_VAR_DECL);
|
||
set_node_position(var_node, parser->lexer->line, parser->lexer->column);
|
||
var_node->value = strdup(decl_type);
|
||
|
||
// 获取标识符
|
||
identifier = consume_token(parser);
|
||
id_node = create_node(NODE_IDENTIFIER);
|
||
set_node_position(id_node, identifier.line, identifier.column);
|
||
id_node->value = strdup(identifier.value);
|
||
add_child(var_node, id_node);
|
||
|
||
// 处理类型
|
||
if (match_token(parser, TOKEN_COLON)) {
|
||
consume_token(parser); // 消费冒号
|
||
Token type = consume_token(parser);
|
||
ASTNode* type_node = create_node(NODE_TYPE_IDENTIFIER);
|
||
type_node->value = strdup(type.value);
|
||
set_node_position(type_node, type.line, type.column);
|
||
|
||
// 处理数组维度
|
||
if (match_token(parser, TOKEN_LBRACKET)) {
|
||
consume_token(parser); // 消费左中括号
|
||
|
||
// 解析数组大小
|
||
if (!match_token(parser, TOKEN_RBRACKET)) {
|
||
Token size_token = consume_token(parser);
|
||
ASTNode* size_node = create_node(NODE_INT_LITERAL);
|
||
size_node->value = strdup(size_token.value);
|
||
set_node_position(size_node, size_token.line, size_token.column);
|
||
add_child(type_node, size_node);
|
||
}
|
||
|
||
expect_token(parser, TOKEN_RBRACKET);
|
||
}
|
||
|
||
add_child(var_node, type_node);
|
||
}
|
||
|
||
// 处理值
|
||
if (match_token(parser, TOKEN_EQ)) {
|
||
consume_token(parser); // 消费等号
|
||
ASTNode* init_expr = parse_expression(parser);
|
||
if (init_expr != NULL) {
|
||
add_child(var_node, init_expr);
|
||
}
|
||
}
|
||
|
||
add_child(list_node, var_node);
|
||
}
|
||
|
||
// 可选择的分号
|
||
if (match_token(parser, TOKEN_SEMICOLON)) {
|
||
consume_token(parser);
|
||
}
|
||
return list_node;
|
||
}
|
||
|
||
// 解析键值对字面量
|
||
/*
|
||
* {a:6}
|
||
*/
|
||
ASTNode* parse_key_value_literal(Parser* parser) {
|
||
ASTNode* node = create_node(NODE_MAP_LITERAL);
|
||
set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
// 解析左大括号
|
||
expect_token(parser, TOKEN_LBRACE);
|
||
// 解析键值对
|
||
do {
|
||
// 创建键值对节点
|
||
ASTNode* entry_node = create_node(NODE_MAP_ENTRY);
|
||
set_node_position(entry_node, parser->lexer->line, parser->lexer->column);
|
||
|
||
// 解析键
|
||
Token key = consume_token(parser);
|
||
ASTNode* key_node = create_node(NODE_IDENTIFIER);
|
||
set_node_position(key_node, key.line, key.column);
|
||
key_node->value = strdup(key.value);
|
||
add_child(entry_node, key_node);
|
||
|
||
// 解析冒号
|
||
expect_token(parser, TOKEN_COLON);
|
||
|
||
// 解析值
|
||
ASTNode* value_node = parse_expression(parser);
|
||
add_child(entry_node, value_node);
|
||
|
||
// 将键值对节点添加到主节点
|
||
add_child(node, entry_node);
|
||
|
||
// 处理逗号
|
||
if (match_token(parser, TOKEN_COMMA)) {
|
||
consume_token(parser); // 消费逗号
|
||
}
|
||
} while (!match_token(parser, TOKEN_RBRACE));
|
||
expect_token(parser, TOKEN_RBRACE);
|
||
return node;
|
||
}
|
||
|
||
|
||
// 解析数组字面量
|
||
/*
|
||
* [1, 2, 3]
|
||
*/
|
||
ASTNode* parse_array_literal(Parser* parser) {
|
||
ASTNode* node = create_node(NODE_ARRAY_LITERAL);
|
||
set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
// 解析左中括号
|
||
expect_token(parser, TOKEN_LBRACKET);
|
||
|
||
// 解析元素
|
||
do {
|
||
// 创建数组元素节点
|
||
//ASTNode* item_node = create_node(NODE_ARRAY_ITEM);
|
||
//set_node_position(item_node, parser->lexer->line, parser->lexer->column);
|
||
|
||
// 解析元素值
|
||
ASTNode* value_node = parse_expression(parser);
|
||
add_child(node, value_node);
|
||
|
||
// 将元素节点添加到数组
|
||
//add_child(node, item_node);
|
||
|
||
// 处理逗号
|
||
if (match_token(parser, TOKEN_COMMA)) {
|
||
consume_token(parser); // 消费逗号
|
||
}
|
||
} while (!match_token(parser, TOKEN_RBRACKET));
|
||
expect_token(parser, TOKEN_RBRACKET);
|
||
return node;
|
||
}
|
||
|
||
// 解析try-catch语句
|
||
/*
|
||
* try {
|
||
* // code
|
||
* } catch (e) {
|
||
* // handler
|
||
* }
|
||
*/
|
||
// 解析try-catch结构
|
||
ASTNode* parse_try_catch_statement(Parser* parser) {
|
||
ASTNode* node = create_node(NODE_TRY_STMT);
|
||
set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
|
||
// 解析try块
|
||
expect_token(parser, TOKEN_TRY);
|
||
ASTNode* try_block = parse_block(parser);
|
||
add_child(node, try_block);
|
||
|
||
// 解析catch块
|
||
expect_token(parser, TOKEN_CATCH);
|
||
expect_token(parser, TOKEN_LPAREN);
|
||
|
||
// 解析异常变量
|
||
Token exception_var = expect_token(parser, TOKEN_IDENTIFIER);
|
||
ASTNode* var_node = create_node(NODE_IDENTIFIER);
|
||
set_node_position(var_node, exception_var.line, exception_var.column);
|
||
var_node->value = strdup(exception_var.value);
|
||
|
||
// 创建catch块节点
|
||
ASTNode* catch_node = create_node(NODE_CATCH_BLOCK);
|
||
set_node_position(catch_node, parser->lexer->line, parser->lexer->column);
|
||
add_child(catch_node, var_node);
|
||
|
||
expect_token(parser, TOKEN_RPAREN);
|
||
ASTNode* catch_block = parse_block(parser);
|
||
add_child(catch_node, catch_block);
|
||
add_child(node, catch_node);
|
||
return node;
|
||
}
|
||
|
||
ASTNode* parse_block(Parser* parser) {
|
||
ASTNode* node = create_node(NODE_BLOCK);
|
||
set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
expect_token(parser, TOKEN_LBRACE);
|
||
while (!match_token(parser, TOKEN_RBRACE) && !match_token(parser, TOKEN_EOF)) {
|
||
ASTNode* stmt = parse_statement(parser);
|
||
if (stmt != NULL) {
|
||
add_child(node, stmt);
|
||
}
|
||
|
||
}
|
||
expect_token(parser, TOKEN_RBRACE);
|
||
return node;
|
||
}
|
||
|
||
// 解析if语句
|
||
/*
|
||
* if (a >= 6 or b >= 7) {...} else if (a > 7) {...} else {...}
|
||
* 等价于
|
||
* if (a >= 6 or b >= 7) {...} else {
|
||
* if (a > 7) {...} else {...}
|
||
* }
|
||
*/
|
||
ASTNode* parse_if_statement(Parser* parser) {
|
||
ASTNode* node = create_node(NODE_IF_STMT);
|
||
set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
expect_token(parser, TOKEN_IF);
|
||
expect_token(parser, TOKEN_LPAREN);
|
||
|
||
// 解析条件表达式
|
||
ASTNode* cond_expr = parse_expression(parser);
|
||
add_child(node, cond_expr);
|
||
expect_token(parser, TOKEN_RPAREN);
|
||
|
||
// 解析if体
|
||
if (match_token(parser, TOKEN_LBRACE)) {
|
||
ASTNode* if_body = parse_block(parser);
|
||
add_child(node, if_body);
|
||
}
|
||
|
||
// 处理else if和else
|
||
while (match_token(parser, TOKEN_ELSE)) {
|
||
consume_token(parser); // 消费else
|
||
|
||
if (match_token(parser, TOKEN_IF)) {
|
||
// 处理else if分支
|
||
consume_token(parser); // 消费if
|
||
ASTNode* else_if_node = create_node(NODE_ELSE_IF);
|
||
set_node_position(else_if_node, parser->lexer->line, parser->lexer->column);
|
||
|
||
expect_token(parser, TOKEN_LPAREN);
|
||
ASTNode* else_if_cond = parse_expression(parser);
|
||
add_child(else_if_node, else_if_cond);
|
||
expect_token(parser, TOKEN_RPAREN);
|
||
|
||
if (match_token(parser, TOKEN_LBRACE)) {
|
||
ASTNode* else_if_body = parse_block(parser);
|
||
add_child(else_if_node, else_if_body);
|
||
}
|
||
|
||
add_child(node, else_if_node);
|
||
} else if (match_token(parser, TOKEN_LBRACE)) {
|
||
// 处理else分支
|
||
ASTNode* else_node = create_node(NODE_ELSE_BLOCK);
|
||
set_node_position(else_node, parser->lexer->line, parser->lexer->column);
|
||
ASTNode* else_body = parse_block(parser);
|
||
add_child(else_node, else_body);
|
||
add_child(node, else_node);
|
||
break;
|
||
}
|
||
}
|
||
return node;
|
||
}
|
||
|
||
// 解析for语句
|
||
/*
|
||
* for (let i:int = 0, j:int = 0; i < 10; i++) {...}
|
||
*/
|
||
ASTNode* parse_for_statement(Parser* parser) {
|
||
ASTNode* node = create_node(NODE_FOR_STMT);
|
||
set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
|
||
expect_token(parser, TOKEN_FOR);
|
||
expect_token(parser, TOKEN_LPAREN);
|
||
|
||
// 解析初始化语句
|
||
ASTNode* init_list = create_node(NODE_FOR_INIT);
|
||
set_node_position(init_list, parser->lexer->line, parser->lexer->column);
|
||
|
||
ASTNode* init_stmt = parse_statement(parser);
|
||
add_child(init_list, init_stmt);
|
||
add_child(node, init_list);
|
||
|
||
// 解析条件表达式
|
||
ASTNode* cond_node = create_node(NODE_FOR_CONDITION);
|
||
set_node_position(cond_node, parser->lexer->line, parser->lexer->column);
|
||
if (!match_token(parser, TOKEN_SEMICOLON)) {
|
||
ASTNode* cond_expr = parse_expression(parser);
|
||
add_child(cond_node, cond_expr);
|
||
} else {
|
||
ASTNode* empty_expr = create_node(NODE_EXPR_STMT);
|
||
set_node_position(empty_expr, parser->lexer->line, parser->lexer->column);
|
||
add_child(cond_node, empty_expr);
|
||
}
|
||
add_child(node, cond_node);
|
||
|
||
// 解析更新语句
|
||
ASTNode* update_node = create_node(NODE_FOR_UPDATE);
|
||
set_node_position(update_node, parser->lexer->line, parser->lexer->column);
|
||
if (!match_token(parser, TOKEN_RPAREN)) {
|
||
ASTNode* update_stmt = parse_expression(parser);
|
||
add_child(update_node, update_stmt);
|
||
} else {
|
||
ASTNode* empty_stmt = create_node(NODE_EXPR_STMT);
|
||
set_node_position(empty_stmt, parser->lexer->line, parser->lexer->column);
|
||
add_child(update_node, empty_stmt);
|
||
}
|
||
add_child(node, update_node);
|
||
|
||
expect_token(parser, TOKEN_RPAREN);
|
||
|
||
// 解析循环体
|
||
ASTNode* body = parse_block(parser);
|
||
add_child(node, body);
|
||
return node;
|
||
}
|
||
|
||
// 解析while语句
|
||
ASTNode* parse_while_statement(Parser* parser) {
|
||
ASTNode* node = create_node(NODE_WHILE_STMT);
|
||
set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
|
||
expect_token(parser, TOKEN_WHILE);
|
||
expect_token(parser, TOKEN_LPAREN);
|
||
|
||
// 解析条件表达式
|
||
ASTNode* cond_expr = parse_expression(parser);
|
||
add_child(node, cond_expr);
|
||
expect_token(parser, TOKEN_RPAREN);
|
||
|
||
// 解析循环体
|
||
ASTNode* body = parse_block(parser);
|
||
add_child(node, body);
|
||
return node;
|
||
}
|
||
|
||
ASTNode* parse_expression_statement(Parser* parser) {
|
||
//ASTNode* node = create_node(NODE_EXPR_STMT);
|
||
//set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
|
||
ASTNode* expr = parse_expression(parser);
|
||
set_node_position(expr, parser->lexer->line, parser->lexer->column);
|
||
//add_child(node, expr);
|
||
|
||
if (match_token(parser, TOKEN_SEMICOLON)) consume_token(parser);
|
||
return expr;
|
||
}
|
||
|
||
// 解析return语句
|
||
ASTNode* parse_return_statement(Parser* parser) {
|
||
ASTNode* node = create_node(NODE_RETURN_STMT);
|
||
set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
expect_token(parser, TOKEN_RETURN);
|
||
// 如果有返回值
|
||
if (!match_token(parser, TOKEN_SEMICOLON)) add_child(node, parse_expression(parser));
|
||
if (match_token(parser, TOKEN_SEMICOLON)) consume_token(parser);
|
||
return node;
|
||
}
|
||
|
||
ASTNode* parse_break_statement(Parser* parser) {
|
||
ASTNode* node = create_node(NODE_BREAK_STMT);
|
||
set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
expect_token(parser, TOKEN_BREAK);
|
||
if (match_token(parser, TOKEN_SEMICOLON)) consume_token(parser);
|
||
return node;
|
||
}
|
||
|
||
ASTNode* parse_continue_statement(Parser* parser) {
|
||
ASTNode* node = create_node(NODE_CONTINUE_STMT);
|
||
set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
expect_token(parser, TOKEN_CONTINUE);
|
||
if (match_token(parser, TOKEN_SEMICOLON)) consume_token(parser);
|
||
return node;
|
||
}
|
||
|
||
ASTNode* parse_import_statement(Parser* parser) {
|
||
ASTNode* node = create_node(NODE_IMPORT_STMT);
|
||
set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
expect_token(parser, TOKEN_IMPORT);
|
||
|
||
// 解析导入路径
|
||
ASTNode* path_node = create_node(NODE_IMPORT_PATH);
|
||
set_node_position(path_node, parser->lexer->line, parser->lexer->column);
|
||
|
||
// 解析第一个标识符
|
||
Token identifier = consume_token(parser);
|
||
ASTNode* id_node = create_node(NODE_IDENTIFIER);
|
||
set_node_position(id_node, identifier.line, identifier.column);
|
||
id_node->value = strdup(identifier.value);
|
||
add_child(path_node, id_node);
|
||
|
||
// 解析后续的路径部分
|
||
while (match_token(parser, TOKEN_DOT)) {
|
||
consume_token(parser); // 消费点号
|
||
|
||
// 处理连续的点号(..)
|
||
if (match_token(parser, TOKEN_DOT)) {
|
||
consume_token(parser); // 消费第二个点号
|
||
ASTNode* parent_dir_node = create_node(NODE_IMPORT_PATH);
|
||
set_node_position(parent_dir_node, parser->lexer->line, parser->lexer->column);
|
||
add_child(path_node, parent_dir_node);
|
||
} else {
|
||
// 解析常规路径部分
|
||
identifier = consume_token(parser);
|
||
id_node = create_node(NODE_IDENTIFIER);
|
||
set_node_position(id_node, identifier.line, identifier.column);
|
||
id_node->value = strdup(identifier.value);
|
||
add_child(path_node, id_node);
|
||
}
|
||
}
|
||
|
||
add_child(node, path_node);
|
||
|
||
// 处理别名(as)
|
||
if (match_token(parser, TOKEN_AS)) {
|
||
consume_token(parser); // 消费as关键字
|
||
Token alias = consume_token(parser);
|
||
ASTNode* alias_node = create_node(NODE_IMPORT_ALIAS);
|
||
set_node_position(alias_node, alias.line, alias.column);
|
||
alias_node->value = strdup(alias.value);
|
||
add_child(node, alias_node);
|
||
}
|
||
|
||
if (match_token(parser, TOKEN_SEMICOLON)) consume_token(parser);
|
||
return node;
|
||
}
|
||
|
||
|
||
// 解析注解
|
||
/*
|
||
* @name 或 @name(param1, param2, ...) 或 @name(key1=value1, key2=value2, ...)
|
||
*/
|
||
ASTNode* parse_annotation(Parser* parser) {
|
||
ASTNode* node = create_node(NODE_ANNOTATION);
|
||
set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
|
||
// 解析@符号
|
||
expect_token(parser, TOKEN_AT);
|
||
|
||
// 解析注解名称
|
||
Token name = expect_token(parser, TOKEN_IDENTIFIER);
|
||
node->value = strdup(name.value);
|
||
|
||
// 解析可选的参数列表
|
||
if (match_token(parser, TOKEN_LPAREN)) {
|
||
consume_token(parser); // 消费左括号
|
||
|
||
// 解析参数
|
||
while (!match_token(parser, TOKEN_RPAREN)) {
|
||
// 检查是否是命名参数 (name=value形式)
|
||
if (match_token(parser, TOKEN_IDENTIFIER)) {
|
||
Token identifier = parser->current_token;
|
||
consume_token(parser);
|
||
|
||
// 检查是否有等号
|
||
if (match_token(parser, TOKEN_EQ)) {
|
||
consume_token(parser); // 消费等号
|
||
|
||
// 创建命名参数节点
|
||
ASTNode* named_arg = create_node(NODE_NAMED_ARG);
|
||
set_node_position(named_arg, identifier.line, identifier.column);
|
||
|
||
// 添加参数名
|
||
ASTNode* name_node = create_node(NODE_IDENTIFIER);
|
||
set_node_position(name_node, identifier.line, identifier.column);
|
||
name_node->value = strdup(identifier.value);
|
||
add_child(named_arg, name_node);
|
||
|
||
// 解析参数值
|
||
ASTNode* value = parse_expression_with_precedence(parser, PREC_NONE);
|
||
add_child(named_arg, value);
|
||
|
||
// 将命名参数添加到注解
|
||
add_child(node, named_arg);
|
||
} else {
|
||
// 不是命名参数,回退并按普通参数处理
|
||
parser->lexer->current_pos--; // 回退pos
|
||
parser->lexer->column = parser->lexer->column - strlen(identifier.value);
|
||
parser->current_token = identifier; // 回退token
|
||
ASTNode* param = parse_expression(parser);
|
||
add_child(node, param);
|
||
}
|
||
} else {
|
||
// 普通参数
|
||
ASTNode* pos_arg = create_node(NODE_POS_ARG);
|
||
set_node_position(pos_arg, parser->lexer->line, parser->lexer->column);
|
||
|
||
// 解析参数值
|
||
ASTNode* value = parse_expression(parser);
|
||
add_child(pos_arg, value);
|
||
|
||
// 将位置参数添加到注解节点
|
||
add_child(node, pos_arg);
|
||
}
|
||
|
||
// 处理逗号
|
||
if (match_token(parser, TOKEN_COMMA)) {
|
||
consume_token(parser); // 消费逗号
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
|
||
expect_token(parser, TOKEN_RPAREN); // 消费右括号
|
||
}
|
||
|
||
return node;
|
||
}
|
||
|
||
// 解析语句
|
||
ASTNode* parse_statement(Parser* parser) {
|
||
// 处理注解(支持多个注解)
|
||
ASTNode* annotations = create_node(NODE_ANNOTATIONS);
|
||
set_node_position(annotations, parser->lexer->line, parser->lexer->column);
|
||
|
||
// 循环处理所有连续的注解
|
||
while (match_token(parser, TOKEN_AT)) {
|
||
ASTNode* current_annotation = parse_annotation(parser);
|
||
add_child(annotations, current_annotation);
|
||
}
|
||
|
||
// 解析实际语句
|
||
ASTNode* stmt = NULL;
|
||
|
||
if (match_token(parser, TOKEN_LET) || match_token(parser, TOKEN_CONST)) {
|
||
stmt = parse_var_declaration(parser);
|
||
} else if (match_token(parser, TOKEN_FUNC) || match_token(parser, TOKEN_NATIVE)) {
|
||
stmt = parse_function_declaration(parser);
|
||
} else if (match_token(parser, TOKEN_IF)) {
|
||
stmt = parse_if_statement(parser);
|
||
} else if (match_token(parser, TOKEN_FOR)) {
|
||
stmt = parse_for_statement(parser);
|
||
} else if (match_token(parser, TOKEN_WHILE)) {
|
||
stmt = parse_while_statement(parser);
|
||
} else if (match_token(parser, TOKEN_RETURN)) {
|
||
stmt = parse_return_statement(parser);
|
||
} else if (match_token(parser, TOKEN_BREAK)) {
|
||
stmt = parse_break_statement(parser);
|
||
} else if (match_token(parser, TOKEN_CONTINUE)) {
|
||
stmt = parse_continue_statement(parser);
|
||
} else if (match_token(parser, TOKEN_IMPORT)) {
|
||
stmt = parse_import_statement(parser);
|
||
} else if (match_token(parser, TOKEN_TRY)) {
|
||
stmt = parse_try_catch_statement(parser);
|
||
} else if (match_token(parser, TOKEN_THROW)) {
|
||
stmt = parse_throw_statement(parser);
|
||
} else {
|
||
// 表达式语句
|
||
stmt = parse_expression_statement(parser);
|
||
if (match_token(parser, TOKEN_SEMICOLON)) {
|
||
consume_token(parser); // 消费分号
|
||
}
|
||
}
|
||
|
||
// 如果有注解,将注解附加到语句上
|
||
if (annotations->children_count > 0 && stmt != NULL) {
|
||
// 将注解作为语句的第一个子节点
|
||
// 先保存原有的子节点
|
||
int original_count = stmt->children_count;
|
||
ASTNode** original_children = malloc(sizeof(ASTNode*) * original_count);
|
||
for (int i = 0; i < original_count; i++) {
|
||
original_children[i] = stmt->children[i];
|
||
}
|
||
|
||
// 清空子节点数组
|
||
stmt->children_count = 0;
|
||
|
||
// 添加注解作为第一个子节点
|
||
add_child(stmt, annotations);
|
||
|
||
// 重新添加原有的子节点
|
||
for (int i = 0; i < original_count; i++) {
|
||
add_child(stmt, original_children[i]);
|
||
}
|
||
|
||
// 释放临时数组
|
||
free(original_children);
|
||
} else {
|
||
// 如果没有注解,释放annotations节点
|
||
free(annotations);
|
||
}
|
||
|
||
return stmt;
|
||
}
|
||
|
||
ASTNode* parse_throw_statement(Parser* parser) {
|
||
ASTNode* node = create_node(NODE_THROW_STMT);
|
||
set_node_position(node, parser->lexer->line, parser->lexer->column);
|
||
consume_token(parser);
|
||
// 解析throw后面的表达式
|
||
ASTNode* expr = parse_expression(parser);
|
||
add_child(node, expr);
|
||
|
||
// 可选的分号
|
||
if (match_token(parser, TOKEN_SEMICOLON)) consume_token(parser);
|
||
|
||
return node;
|
||
}
|
||
|
||
// 解析表达式(处理逻辑运算符)
|
||
// 获取运算符的优先级
|
||
int get_precedence(TokenType type) {
|
||
switch (type) {
|
||
case TOKEN_EQ: return PREC_ASSIGNMENT;
|
||
case TOKEN_PLUS_ASSIGN:
|
||
case TOKEN_MINUS_ASSIGN:
|
||
case TOKEN_MUL_ASSIGN:
|
||
case TOKEN_DIV_ASSIGN:
|
||
case TOKEN_MOD_ASSIGN:
|
||
case TOKEN_BITAND_ASSIGN:
|
||
case TOKEN_BITOR_ASSIGN:
|
||
case TOKEN_BITXOR_ASSIGN:
|
||
case TOKEN_SHL_ASSIGN:
|
||
case TOKEN_SHR_ASSIGN: return PREC_ASSIGNMENT;
|
||
case TOKEN_QUESTION: return PREC_TERNARY;
|
||
case TOKEN_OR: return PREC_OR;
|
||
case TOKEN_AND: return PREC_AND;
|
||
case TOKEN_EQ_EQ:
|
||
case TOKEN_NE: return PREC_EQUALITY;
|
||
case TOKEN_LT:
|
||
case TOKEN_GT:
|
||
case TOKEN_LE:
|
||
case TOKEN_GE: return PREC_COMPARISON;
|
||
case TOKEN_BITAND:
|
||
case TOKEN_BITOR:
|
||
case TOKEN_BITXOR: return PREC_BITWISE;
|
||
case TOKEN_SHL:
|
||
case TOKEN_SHR: return PREC_SHIFT;
|
||
case TOKEN_PLUS:
|
||
case TOKEN_MINUS: return PREC_TERM;
|
||
case TOKEN_STAR:
|
||
case TOKEN_SLASH:
|
||
case TOKEN_MOD: return PREC_FACTOR;
|
||
case TOKEN_LPAREN:
|
||
case TOKEN_DOT:
|
||
case TOKEN_LBRACKET: return PREC_CALL; // 函数调用优先级
|
||
case TOKEN_AS: return PREC_CAST; // 类型转换优先级
|
||
default: return PREC_NONE;
|
||
}
|
||
}
|
||
|
||
// 解析一元表达式
|
||
ASTNode* parse_unary(Parser* parser) {
|
||
// 处理前缀运算符(一元运算符)
|
||
if (match_token(parser, TOKEN_MINUS) || match_token(parser, TOKEN_NOT) ||
|
||
match_token(parser, TOKEN_BANG) || match_token(parser, TOKEN_PLUS_PLUS) ||
|
||
match_token(parser, TOKEN_MINUS_MINUS) || match_token(parser, TOKEN_TILDE)) {
|
||
Token op = consume_token(parser);
|
||
ASTNode* operand = parse_unary(parser); // 递归处理嵌套的一元表达式
|
||
|
||
ASTNode* unary = create_node(NODE_UNARY_EXPR);
|
||
set_node_position(unary, op.line, op.column);
|
||
unary->value = strdup(op.value);
|
||
add_child(unary, operand);
|
||
|
||
return unary;
|
||
}
|
||
|
||
return parse_primary(parser);
|
||
}
|
||
|
||
// 解析表达式
|
||
ASTNode* parse_expression(Parser* parser) {
|
||
return parse_expression_with_precedence(parser, PREC_NONE);
|
||
}
|
||
|
||
// 根据优先级解析表达式
|
||
ASTNode* parse_expression_with_precedence(Parser* parser, int precedence) {
|
||
ASTNode* left = parse_unary(parser);
|
||
|
||
// 处理后缀表达式(自增、自减)
|
||
while (match_token(parser, TOKEN_PLUS_PLUS) || match_token(parser, TOKEN_MINUS_MINUS)) {
|
||
Token op = consume_token(parser);
|
||
ASTNode* postfix = create_node(NODE_POSTFIX_EXPR);
|
||
set_node_position(postfix, op.line, op.column);
|
||
postfix->value = strdup(op.value);
|
||
add_child(postfix, left);
|
||
left = postfix;
|
||
}
|
||
|
||
while (precedence < get_precedence(parser->current_token.type)) {
|
||
TokenType op_type = parser->current_token.type;
|
||
Token op = consume_token(parser);
|
||
|
||
// 处理类型转换
|
||
if (op_type == TOKEN_AS) {
|
||
ASTNode* cast = create_node(NODE_CAST_EXPR);
|
||
set_node_position(cast, op.line, op.column);
|
||
add_child(cast, left);
|
||
|
||
// 解析目标类型
|
||
Token type_token = consume_token(parser);
|
||
ASTNode* type_node = create_node(NODE_TYPE_IDENTIFIER);
|
||
set_node_position(type_node, type_token.line, type_token.column);
|
||
type_node->value = strdup(type_token.value);
|
||
add_child(cast, type_node);
|
||
|
||
left = cast;
|
||
continue;
|
||
}
|
||
|
||
// 处理三元运算符
|
||
if (op_type == TOKEN_QUESTION) {
|
||
// 创建三元表达式节点
|
||
ASTNode* ternary = create_node(NODE_TERNARY_EXPR);
|
||
set_node_position(ternary, op.line, op.column);
|
||
ternary->op_type = OP_TERNARY;
|
||
|
||
// 添加条件表达式
|
||
add_child(ternary, left);
|
||
|
||
// 解析真值表达式
|
||
ASTNode* true_expr = parse_expression_with_precedence(parser, PREC_NONE);
|
||
add_child(ternary, true_expr);
|
||
|
||
// 解析冒号
|
||
expect_token(parser, TOKEN_COLON);
|
||
|
||
// 解析假值表达式
|
||
ASTNode* false_expr = parse_expression_with_precedence(parser, PREC_TERNARY);
|
||
add_child(ternary, false_expr);
|
||
|
||
left = ternary;
|
||
continue;
|
||
}
|
||
|
||
// 处理函数调用、成员访问和索引访问(从左到右结合)
|
||
if (op_type == TOKEN_LPAREN || op_type == TOKEN_DOT || op_type == TOKEN_LBRACKET) {
|
||
if (op_type == TOKEN_LPAREN) {
|
||
ASTNode* call = create_node(NODE_CALL_EXPR);
|
||
set_node_position(call, op.line, op.column);
|
||
add_child(call, left);
|
||
|
||
if (!match_token(parser, TOKEN_RPAREN)) {
|
||
// 创建参数列表节点
|
||
ASTNode* params_node = create_node(NODE_PARAM_LIST);
|
||
set_node_position(params_node, parser->lexer->line, parser->lexer->column);
|
||
add_child(call, params_node);
|
||
|
||
while (!match_token(parser, TOKEN_RPAREN)) {
|
||
if (match_token(parser, TOKEN_IDENTIFIER)) {
|
||
Token identifier = parser->current_token;
|
||
consume_token(parser);
|
||
|
||
// 检查是否有等号(命名参数)
|
||
if (match_token(parser, TOKEN_EQ)) {
|
||
consume_token(parser);
|
||
|
||
ASTNode* named_arg = create_node(NODE_NAMED_ARG);
|
||
set_node_position(named_arg, identifier.line, identifier.column);
|
||
|
||
// 添加参数名节点
|
||
ASTNode* name_node = create_node(NODE_IDENTIFIER);
|
||
name_node->value = strdup(identifier.value);
|
||
set_node_position(name_node, identifier.line, identifier.column);
|
||
add_child(named_arg, name_node);
|
||
|
||
// 解析并添加参数值
|
||
ASTNode* value = parse_expression_with_precedence(parser, PREC_NONE);
|
||
add_child(named_arg, value);
|
||
|
||
add_child(params_node, named_arg);
|
||
} else {
|
||
// 不是命名参数,回退并按普通参数处理
|
||
parser->lexer->current_pos--;
|
||
parser->lexer->column = parser->lexer->column - strlen(identifier.value);
|
||
parser->current_token = identifier;
|
||
|
||
ASTNode* arg = parse_expression_with_precedence(parser, PREC_NONE);
|
||
add_child(params_node, arg);
|
||
}
|
||
} else {
|
||
// 普通参数
|
||
ASTNode* arg = parse_expression_with_precedence(parser, PREC_NONE);
|
||
add_child(params_node, arg);
|
||
}
|
||
|
||
if (match_token(parser, TOKEN_COMMA)) {
|
||
consume_token(parser);
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
expect_token(parser, TOKEN_RPAREN);
|
||
left = call;
|
||
}
|
||
else if (op_type == TOKEN_DOT) {
|
||
ASTNode* member = create_node(NODE_MEMBER_EXPR);
|
||
set_node_position(member, op.line, op.column);
|
||
add_child(member, left);
|
||
|
||
Token identifier = expect_token(parser, TOKEN_IDENTIFIER);
|
||
ASTNode* id_node = create_node(NODE_IDENTIFIER);
|
||
set_node_position(id_node, identifier.line, identifier.column);
|
||
id_node->value = strdup(identifier.value);
|
||
add_child(member, id_node);
|
||
|
||
left = member;
|
||
}
|
||
else { // TOKEN_LBRACKET
|
||
ASTNode* index = create_node(NODE_INDEX_EXPR);
|
||
set_node_position(index, op.line, op.column);
|
||
add_child(index, left);
|
||
|
||
ASTNode* expr = parse_expression_with_precedence(parser, PREC_NONE);
|
||
add_child(index, expr);
|
||
|
||
expect_token(parser, TOKEN_RBRACKET);
|
||
left = index;
|
||
}
|
||
}
|
||
// 处理二元运算符(从左到右结合)
|
||
else {
|
||
int new_precedence = get_precedence(op_type);
|
||
// 对于相同优先级的运算符,增加一个微小的优先级来确保左结合性
|
||
ASTNode* right = parse_expression_with_precedence(parser, new_precedence + 1);
|
||
|
||
// 创建二元表达式节点
|
||
ASTNode* binary = create_node(NODE_BINARY_EXPR);
|
||
set_node_position(binary, op.line, op.column);
|
||
|
||
// 设置操作符类型
|
||
switch (op_type) {
|
||
// 算术运算符
|
||
case TOKEN_PLUS:
|
||
binary->op_type = OP_ADD;
|
||
break;
|
||
case TOKEN_MINUS:
|
||
binary->op_type = OP_SUB;
|
||
break;
|
||
case TOKEN_STAR:
|
||
binary->op_type = OP_MUL;
|
||
break;
|
||
case TOKEN_SLASH:
|
||
binary->op_type = OP_DIV;
|
||
break;
|
||
case TOKEN_MOD:
|
||
binary->op_type = OP_MOD;
|
||
break;
|
||
|
||
// 逻辑运算符
|
||
case TOKEN_AND:
|
||
binary->op_type = OP_AND;
|
||
break;
|
||
case TOKEN_OR:
|
||
binary->op_type = OP_OR;
|
||
break;
|
||
|
||
// 比较运算符
|
||
case TOKEN_EQ_EQ:
|
||
binary->op_type = OP_EQ;
|
||
break;
|
||
case TOKEN_NE:
|
||
binary->op_type = OP_NE;
|
||
break;
|
||
case TOKEN_LT:
|
||
binary->op_type = OP_LT;
|
||
break;
|
||
case TOKEN_GT:
|
||
binary->op_type = OP_GT;
|
||
break;
|
||
case TOKEN_LE:
|
||
binary->op_type = OP_LE;
|
||
break;
|
||
case TOKEN_GE:
|
||
binary->op_type = OP_GE;
|
||
break;
|
||
|
||
// 位运算符
|
||
case TOKEN_BITAND:
|
||
binary->op_type = OP_BITAND;
|
||
break;
|
||
case TOKEN_BITOR:
|
||
binary->op_type = OP_BITOR;
|
||
break;
|
||
case TOKEN_BITXOR:
|
||
binary->op_type = OP_BITXOR;
|
||
break;
|
||
case TOKEN_SHL:
|
||
binary->op_type = OP_SHL;
|
||
break;
|
||
case TOKEN_SHR:
|
||
binary->op_type = OP_SHR;
|
||
break;
|
||
|
||
// 赋值运算符
|
||
case TOKEN_EQ:
|
||
binary->op_type = OP_ASSIGN;
|
||
break;
|
||
case TOKEN_PLUS_ASSIGN:
|
||
binary->op_type = OP_PLUS_ASSIGN;
|
||
break;
|
||
case TOKEN_MINUS_ASSIGN:
|
||
binary->op_type = OP_MINUS_ASSIGN;
|
||
break;
|
||
case TOKEN_MUL_ASSIGN:
|
||
binary->op_type = OP_MUL_ASSIGN;
|
||
break;
|
||
case TOKEN_DIV_ASSIGN:
|
||
binary->op_type = OP_DIV_ASSIGN;
|
||
break;
|
||
case TOKEN_MOD_ASSIGN:
|
||
binary->op_type = OP_MOD_ASSIGN;
|
||
break;
|
||
case TOKEN_BITAND_ASSIGN:
|
||
binary->op_type = OP_BITAND_ASSIGN;
|
||
break;
|
||
case TOKEN_BITOR_ASSIGN:
|
||
binary->op_type = OP_BITOR_ASSIGN;
|
||
break;
|
||
case TOKEN_BITXOR_ASSIGN:
|
||
binary->op_type = OP_BITXOR_ASSIGN;
|
||
break;
|
||
case TOKEN_SHL_ASSIGN:
|
||
binary->op_type = OP_SHL_ASSIGN;
|
||
break;
|
||
case TOKEN_SHR_ASSIGN:
|
||
binary->op_type = OP_SHR_ASSIGN;
|
||
break;
|
||
default:
|
||
parser_error_at_current(parser, "Unknown binary operator");
|
||
return NULL;
|
||
}
|
||
|
||
binary->value = strdup(op.value);
|
||
add_child(binary, left);
|
||
add_child(binary, right);
|
||
left = binary;
|
||
}
|
||
}
|
||
if (match_token(parser, TOKEN_SEMICOLON)) consume_token(parser);
|
||
return left;
|
||
}
|
||
|
||
ASTNode* parse_primary(Parser* parser) {
|
||
ASTNode *node = NULL;
|
||
if (match_token(parser, TOKEN_INT_LITERAL)) {
|
||
Token token = consume_token(parser);
|
||
node = create_node(NODE_INT_LITERAL);
|
||
set_node_position(node, token.line, token.column);
|
||
node->value = strdup(token.value);
|
||
} else if (match_token(parser, TOKEN_FLOAT_LITERAL)) {
|
||
Token token = consume_token(parser);
|
||
node = create_node(NODE_FLOAT_LITERAL);
|
||
set_node_position(node, token.line, token.column);
|
||
node->value = strdup(token.value);
|
||
} else if (match_token(parser, TOKEN_STRING_LITERAL)) {
|
||
Token token = consume_token(parser);
|
||
node = create_node(NODE_STRING_LITERAL);
|
||
set_node_position(node, token.line, token.column);
|
||
node->value = strdup(token.value);
|
||
} else if (match_token(parser, TOKEN_IDENTIFIER)) {
|
||
Token token = consume_token(parser);
|
||
node = create_node(NODE_IDENTIFIER);
|
||
set_node_position(node, token.line, token.column);
|
||
node->value = strdup(token.value);
|
||
} else if (match_token(parser, TOKEN_TRUE) || match_token(parser, TOKEN_FALSE)) {
|
||
Token token = consume_token(parser);
|
||
node = create_node(NODE_BOOL_LITERAL);
|
||
set_node_position(node, token.line, token.column);
|
||
node->value = strdup(token.value);
|
||
} else if (match_token(parser, TOKEN_LPAREN)) {
|
||
consume_token(parser);
|
||
ASTNode* expr = parse_expression(parser);
|
||
expect_token(parser, TOKEN_RPAREN);
|
||
return expr;
|
||
} else if (match_token(parser, TOKEN_LBRACKET)) {
|
||
return parse_array_literal(parser);
|
||
} else if (match_token(parser, TOKEN_LBRACE)) {
|
||
return parse_key_value_literal(parser);
|
||
} else {
|
||
char error_message[256];
|
||
snprintf(error_message, sizeof(error_message), "unexpected token '%s'",
|
||
token_type_to_string(parser->current_token.type));
|
||
parser_error_at_current(parser, error_message);
|
||
}
|
||
return node;
|
||
}
|
||
|
||
// 解析程序
|
||
ASTNode* parse_program(Parser* parser) {
|
||
ASTNode* program = create_node(NODE_PROGRAM);
|
||
set_node_position(program, parser->lexer->line, parser->lexer->column);
|
||
while (!match_token(parser, TOKEN_EOF)) {
|
||
ASTNode *stmt = parse_statement(parser);
|
||
if (stmt != NULL) {
|
||
add_child(program, stmt);
|
||
}
|
||
}
|
||
return program;
|
||
} |