Broke builtins.

This commit is contained in:
2025-09-06 19:58:46 +02:00
parent 11c936ba7f
commit 9cf51a1534
6 changed files with 238 additions and 113 deletions

View File

@@ -368,16 +368,30 @@ advance_to_sync_point :: (parse_state : *Parse_State) {
////////////////////////////
//@nb - Base parsing functions
make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node {
make_node :: (nodes : *[..]AST_Node, kind : AST_Kind, allocator : Allocator) -> *AST_Node {
node : AST_Node;
node.kind = kind;
node.children.allocator = parse_state.result.allocator;
array_add(*parse_state.result.nodes, node);
node.children.allocator = allocator;
array_add(nodes, node);
return *parse_state.result.nodes[parse_state.result.nodes.count - 1];
return *(nodes.*[nodes.count - 1]);
}
make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node {
return make_node(*parse_state.result.nodes, kind, parse_state.result.allocator);
}
// new_builtin_node :: (nodes : *[..]AST_Node, kind : AST_Kind) -> *AST_Node {
// node := make_node(parse_state, kind);
// node.builtin = true;
// return node;
// }
// new_builtin_struct_node :: (nodes : *[..]AST_Node, name : string, member_names : []string, allocator : Allocator) -> *AST_Node {
// }
add_child :: (node : *AST_Node, child : *AST_Node) {
child.parent = node;
array_add(*node.children, child);
@@ -408,10 +422,10 @@ advance :: (parse_state : *Parse_State) {
parse_state.previous = parse_state.current;
while true {
if parse_state.current_token_index >= parse_state.result.tokens.tokens.count {
if parse_state.current_token_index >= parse_state.result.tokens.count {
break;
}
parse_state.current = *parse_state.result.tokens.tokens[parse_state.current_token_index];
parse_state.current = *parse_state.result.tokens[parse_state.current_token_index];
parse_state.current_token_index += 1;
if parse_state.current.kind != .TOKEN_ERROR break;
@@ -444,7 +458,7 @@ check_any :: (parse_state : *Parse_State, kinds : ..Token_Kind) -> bool {
//nb - Checks if the next token is of a certain kind
check_next :: (parse_state : *Parse_State, kind : Token_Kind) -> bool {
return parse_state.result.tokens.tokens[parse_state.current_token_index].kind == kind;
return parse_state.result.tokens[parse_state.current_token_index].kind == kind;
}
//nb - Consume a token if
@@ -563,8 +577,8 @@ binary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
}
array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
identifier := parse_state.result.tokens.tokens[parse_state.current_token_index - 3];
left_bracket := parse_state.result.tokens.tokens[parse_state.current_token_index - 2];
identifier := parse_state.result.tokens[parse_state.current_token_index - 3];
left_bracket := parse_state.result.tokens[parse_state.current_token_index - 2];
array_access := make_node(parse_state, .Unary);
array_access.token = left_bracket;
@@ -650,27 +664,44 @@ directive :: (state : *Parse_State) -> *AST_Node {
advance(state);
if check(state, .TOKEN_STRING) {
path := state.current.string_value;
advance(state);
// path_tok := state.current;
// path := path_tok.string_value;
consume(state, .TOKEN_SEMICOLON, "Expected ';' after #load directive");
result : Compile_Result;
result.allocator = state.result.allocator;
result.environment = state.result.environment;
// advance(state);
result.file = make_file(*result, path);
// result : Compile_Result;
// result.allocator = state.result.allocator;
// result.environment = state.result.environment;
if result.file.source.count == 0 {
unable_to_open_file(state, path, state.previous);
advance_to_sync_point(state);
return null;
}
// result.file = make_file(*result, path);
lex(*result);
// if result.file.source.count == 0 {
// unable_to_open_file(state, path, path_tok);
// advance_to_sync_point(state);
// advance(state);
// return null;
// }
for tok : result.tokens.tokens {
array_add(*state.result.tokens.tokens, tok);
}
// consume(state, .TOKEN_SEMICOLON, "Expected ';' after #load directive");
// lex(*result);
// count := state.result.tokens..count;
// current_idx := state.current_token_index;
// result_count := result.tokens..count;
// // state.result.tokens..count -= 1;
// array_resize(*state.result.tokens., count + result_count - 1);
// memcpy(*state.result.tokens[current_idx + result_count - 1], *state.result.tokens[current_idx], size_of(Token) * (count - current_idx));
// for *tok : result.tokens. {
// if tok.kind == .TOKEN_EOF {
// break;
// }
// tok.builtin = true;
// state.result.tokens[it_index] = tok.*;
// }
}
}
@@ -1293,6 +1324,7 @@ const_declaration :: (parse_state : *Parse_State, identifier_token : *Token) ->
}
declaration :: (parse_state : *Parse_State) -> *AST_Node {
skip_statement := false;
decl_node : *AST_Node;
if match(parse_state, .TOKEN_PROPERTIES) {
decl_node = property_block(parse_state);
@@ -1320,6 +1352,7 @@ declaration :: (parse_state : *Parse_State) -> *AST_Node {
decl_node = call(parse_state, null);
} else if check(parse_state, .TOKEN_DIRECTIVE) {
decl_node = directive(parse_state);
skip_statement = true;
} else if check(parse_state, .TOKEN_IDENTIFIER) {
identifier := parse_state.current;
@@ -1342,7 +1375,7 @@ declaration :: (parse_state : *Parse_State) -> *AST_Node {
decl_node = error;
}
if !decl_node {
if !decl_node && !skip_statement {
decl_node = statement(parse_state);
}