Change result to context for clarity. Fix a bunch of stuff in builtin functions and structs.

This commit is contained in:
2025-09-10 23:21:34 +02:00
parent ceafd197f5
commit 9461fe626f
7 changed files with 374 additions and 361 deletions

View File

@@ -16,7 +16,7 @@ Parse_State :: struct {
current_token_index : int;
result : *Compile_Result;
ctx : *Compiler_Context;
}
////////////////////////////
@@ -119,7 +119,7 @@ record_error :: (parse_state : *Parse_State, token : Token, message : string, re
error : Compiler_Message;
error.message_kind = .Error;
error.message = message;
error.path = parse_state.result.file.path;
error.path = parse_state.ctx.file.path;
source_location : Source_Range;
source_location.begin = token;
@@ -134,8 +134,8 @@ record_error :: (parse_state : *Parse_State, token : Token, message : string, re
source_location.end = parse_state.current;
array_add(*error.source_locations, source_location);
parse_state.result.had_error = true;
array_add(*parse_state.result.messages, error);
parse_state.ctx.had_error = true;
array_add(*parse_state.ctx.messages, error);
rewind_to_snapshot(parse_state, snap);
}
@@ -382,7 +382,7 @@ make_node :: (nodes : *[..]AST_Node, kind : AST_Kind, allocator : Allocator) ->
}
make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node {
return make_node(*parse_state.result.nodes, kind, parse_state.result.allocator);
return make_node(*parse_state.ctx.nodes, kind, parse_state.ctx.allocator);
}
// new_builtin_node :: (nodes : *[..]AST_Node, kind : AST_Kind) -> *AST_Node {
@@ -401,7 +401,8 @@ make_builtin_token :: (tokens : *[..]Token, builder : *String_Builder, kind : To
if buffer {
start := buffer.count;
}
}
tok.column = col.*;
print_to_builder(builder, "%", text);
buffer = get_current_buffer(builder);
@@ -416,8 +417,7 @@ make_builtin_token :: (tokens : *[..]Token, builder : *String_Builder, kind : To
}
}
tok.column = col.*;
tok.index = buffer.count;
tok.index = buffer.count - text.count;
tok.length = text.count;
tok.builtin = true;
@@ -426,51 +426,53 @@ make_builtin_token :: (tokens : *[..]Token, builder : *String_Builder, kind : To
return *(tokens.*)[tokens.count - 1];
}
new_builtin_struct_node :: (result : *Compile_Result, name : string, members : []Arg, allocator : Allocator) -> *AST_Node {
new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []Arg, allocator : Allocator) -> *AST_Node {
sc := get_scratch(allocator);
defer scratch_end(sc);
builder : String_Builder;
builder.allocator = sc.allocator; // I want to find a good way to use scratch here...
node := make_node(*result.nodes, .Struct, allocator);
node := make_node(*ctx.nodes, .Struct, allocator);
source_location : Source_Range;
col := 0;
line := 0;
tok_index := result.tokens.count;
tok_index := ctx.tokens.count;
ident_token := make_builtin_token(*result.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", name), *col, *line);
ident_token := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", name), *col, *line);
ident_token.ident_value = name;
source_location.begin = ident_token;
append(*builder, " ");
make_builtin_token(*result.tokens, *builder, .TOKEN_DOUBLECOLON, "::", *col, *line);
make_builtin_token(*ctx.tokens, *builder, .TOKEN_DOUBLECOLON, "::", *col, *line);
append(*builder, " ");
make_builtin_token(*result.tokens, *builder, .TOKEN_STRUCT, "struct", *col, *line);
make_builtin_token(*ctx.tokens, *builder, .TOKEN_STRUCT, "struct", *col, *line);
append(*builder, " ");
make_builtin_token(*result.tokens, *builder, .TOKEN_LEFTBRACE, "{", *col, *line);
make_builtin_token(*ctx.tokens, *builder, .TOKEN_LEFTBRACE, "{", *col, *line);
append(*builder, "\n");
line += 1;
col = 0;
field_list := make_node(*result.nodes, .FieldList, allocator);
field_list := make_node(*ctx.nodes, .FieldList, allocator);
add_child(node, field_list);
for member : members {
field := make_node(*result.nodes, .Field, allocator);
field := make_node(*ctx.nodes, .Field, allocator);
field_source_loc : Source_Range;
field_ident := make_builtin_token(*result.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.name), *col, *line);
indent(*builder, 1);
field_ident := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.name), *col, *line);
field_source_loc.begin = field_ident;
field.token = field_ident;
field.name = member.name;
append(*builder, " ");
make_builtin_token(*result.tokens, *builder, .TOKEN_COLON, ":", *col, *line);
make_builtin_token(*ctx.tokens, *builder, .TOKEN_COLON, ":", *col, *line);
append(*builder, " ");
make_builtin_token(*result.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.typename), *col, *line);
semicolon_tok := make_builtin_token(*result.tokens, *builder, .TOKEN_SEMICOLON, ";", *col, *line);
make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.typename), *col, *line);
semicolon_tok := make_builtin_token(*ctx.tokens, *builder, .TOKEN_SEMICOLON, ";", *col, *line);
append(*builder, "\n");
col = 0;
line += 1;
@@ -481,7 +483,7 @@ new_builtin_struct_node :: (result : *Compile_Result, name : string, members : [
add_child(field_list, field);
}
brace_token := make_builtin_token(*result.tokens, *builder, .TOKEN_RIGHTBRACE, "}", *col, *line);
brace_token := make_builtin_token(*ctx.tokens, *builder, .TOKEN_RIGHTBRACE, "}", *col, *line);
append(*builder, "\n");
source_location.end = brace_token;
@@ -491,8 +493,8 @@ new_builtin_struct_node :: (result : *Compile_Result, name : string, members : [
source_location.begin.source = *source.data[source_location.begin.column];
source_location.end.source = *source.data[source_location.end.column];
for i : tok_index..result.tokens.count - 1 {
tok := result.tokens[i];
for i : tok_index..ctx.tokens.count - 1 {
tok := ctx.tokens[i];
tok.source = *source.data[tok.column];
}
@@ -502,52 +504,48 @@ new_builtin_struct_node :: (result : *Compile_Result, name : string, members : [
// field.source_location.main_token.source = *source.data[tok.column];
}
print_from_source_location(source_location, temp);
node.source_location = source_location;
return node;
}
new_builtin_function_node :: (result : *Compile_Result, name : string, members : []Arg, return_var : Arg, allocator : Allocator) -> *AST_Node {
new_builtin_function_node :: (ctx : *Compiler_Context, name : string, members : []Arg, return_var : Arg, allocator : Allocator) -> *AST_Node {
sc := get_scratch(allocator);
defer scratch_end(sc);
builder : String_Builder;
builder.allocator = sc.allocator; // I want to find a good way to use scratch here...
node := make_node(*result.nodes, .Function, allocator);
node := make_node(*ctx.nodes, .Function, allocator);
source_location : Source_Range;
col := 0;
line := 0;
tok_index := result.tokens.count;
tok_index := ctx.tokens.count;
ident_token := make_builtin_token(*result.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", name), *col, *line);
ident_token := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", name), *col, *line);
source_location.begin = ident_token;
append(*builder, " ");
make_builtin_token(*result.tokens, *builder, .TOKEN_DOUBLECOLON, "::", *col, *line);
make_builtin_token(*ctx.tokens, *builder, .TOKEN_DOUBLECOLON, "::", *col, *line);
append(*builder, " ");
make_builtin_token(*result.tokens, *builder, .TOKEN_LEFTPAREN, "(", *col, *line);
field_list := make_node(*result.nodes, .FieldList, allocator);
make_builtin_token(*ctx.tokens, *builder, .TOKEN_LEFTPAREN, "(", *col, *line);
field_list := make_node(*ctx.nodes, .FieldList, allocator);
add_child(node, field_list);
for member : members {
field := make_node(*result.nodes, .Field, allocator);
field := make_node(*ctx.nodes, .Field, allocator);
field_source_loc : Source_Range;
field_ident := make_builtin_token(*result.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.name), *col, *line);
field_source_loc.begin = field_ident;
field.token = field_ident;
field.name = member.name;
append(*builder, " ");
make_builtin_token(*result.tokens, *builder, .TOKEN_COLON, ":", *col, *line);
append(*builder, " ");
type_tok := make_builtin_token(*result.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.typename), *col, *line);
// field_ident := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.name), *col, *line);
type_tok := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.typename), *col, *line);
field_source_loc.begin = type_tok;
field.token = type_tok;
if it_index < members.count - 1 {
make_builtin_token(*result.tokens, *builder, .TOKEN_COMMA, ";", *col, *line);
make_builtin_token(*ctx.tokens, *builder, .TOKEN_COMMA, ",", *col, *line);
append(*builder, " ");
}
field_source_loc.end = type_tok;
@@ -556,8 +554,8 @@ new_builtin_function_node :: (result : *Compile_Result, name : string, members :
add_child(field_list, field);
}
make_builtin_token(*result.tokens, *builder, .TOKEN_RIGHTPAREN, ")", *col, *line);
semicolon_tok := make_builtin_token(*result.tokens, *builder, .TOKEN_SEMICOLON, ";", *col, *line);
make_builtin_token(*ctx.tokens, *builder, .TOKEN_RIGHTPAREN, ")", *col, *line);
semicolon_tok := make_builtin_token(*ctx.tokens, *builder, .TOKEN_SEMICOLON, ";", *col, *line);
source_location.end = semicolon_tok;
@@ -566,8 +564,8 @@ new_builtin_function_node :: (result : *Compile_Result, name : string, members :
source_location.begin.source = *source.data[source_location.begin.column];
source_location.end.source = *source.data[source_location.end.column];
for i : tok_index..result.tokens.count - 1 {
tok := result.tokens[i];
for i : tok_index..ctx.tokens.count - 1 {
tok := ctx.tokens[i];
tok.source = *source.data[tok.column];
}
@@ -577,7 +575,7 @@ new_builtin_function_node :: (result : *Compile_Result, name : string, members :
// field.source_location.main_token.source = *source.data[tok.column];
}
print_from_source_location(source_location, temp);
node.source_location = source_location;
return node;
}
@@ -617,10 +615,10 @@ advance :: (parse_state : *Parse_State) {
parse_state.previous = parse_state.current;
while true {
if parse_state.current_token_index >= parse_state.result.tokens.count {
if parse_state.current_token_index >= parse_state.ctx.tokens.count {
break;
}
parse_state.current = *parse_state.result.tokens[parse_state.current_token_index];
parse_state.current = *parse_state.ctx.tokens[parse_state.current_token_index];
parse_state.current_token_index += 1;
if parse_state.current.kind != .TOKEN_ERROR break;
@@ -653,7 +651,7 @@ check_any :: (parse_state : *Parse_State, kinds : ..Token_Kind) -> bool {
//nb - Checks if the next token is of a certain kind
check_next :: (parse_state : *Parse_State, kind : Token_Kind) -> bool {
return parse_state.result.tokens[parse_state.current_token_index].kind == kind;
return parse_state.ctx.tokens[parse_state.current_token_index].kind == kind;
}
//nb - Consume a token if
@@ -772,8 +770,8 @@ binary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
}
array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
identifier := parse_state.result.tokens[parse_state.current_token_index - 3];
left_bracket := parse_state.result.tokens[parse_state.current_token_index - 2];
identifier := parse_state.ctx.tokens[parse_state.current_token_index - 3];
left_bracket := parse_state.ctx.tokens[parse_state.current_token_index - 2];
array_access := make_node(parse_state, .Unary);
array_access.token = left_bracket;
@@ -864,13 +862,13 @@ directive :: (state : *Parse_State) -> *AST_Node {
// advance(state);
// result : Compile_Result;
// result.allocator = state.result.allocator;
// result.environment = state.result.environment;
// result : Compiler_Context;
// ctx.allocator = state.ctx.allocator;
// ctx.environment = state.ctx.environment;
// result.file = make_file(*result, path);
// ctx.file = make_file(*result, path);
// if result.file.source.count == 0 {
// if ctx.file.source.count == 0 {
// unable_to_open_file(state, path, path_tok);
// advance_to_sync_point(state);
// advance(state);
@@ -881,21 +879,21 @@ directive :: (state : *Parse_State) -> *AST_Node {
// lex(*result);
// count := state.result.tokens..count;
// count := state.ctx.tokens..count;
// current_idx := state.current_token_index;
// result_count := result.tokens..count;
// result_count := ctx.tokens..count;
// // state.result.tokens..count -= 1;
// array_resize(*state.result.tokens., count + result_count - 1);
// // state.ctx.tokens..count -= 1;
// array_resize(*state.ctx.tokens., count + result_count - 1);
// memcpy(*state.result.tokens[current_idx + result_count - 1], *state.result.tokens[current_idx], size_of(Token) * (count - current_idx));
// memcpy(*state.ctx.tokens[current_idx + result_count - 1], *state.ctx.tokens[current_idx], size_of(Token) * (count - current_idx));
// for *tok : result.tokens. {
// for *tok : ctx.tokens. {
// if tok.kind == .TOKEN_EOF {
// break;
// }
// tok.builtin = true;
// state.result.tokens[it_index] = tok.*;
// state.ctx.tokens[it_index] = tok.*;
// }
}
}
@@ -1041,7 +1039,7 @@ field_declaration :: (parse_state : *Parse_State, identifier_token : *Token) ->
node.array_field = true;
} else {
if !check(parse_state, .TOKEN_ASSIGN) {
internal_error_message(*parse_state.result.messages, "Unimplemented error message.", parse_state.result.file.path);
internal_error_message(*parse_state.ctx.messages, "Unimplemented error message.", parse_state.ctx.file.path);
return node;
}
// missing_type_specifier(parse_state, identifier_token, "Expected type specifier after field name.");
@@ -1085,8 +1083,8 @@ argument_list :: (parse_state : *Parse_State) -> *AST_Node {
source_location.main_token = parse_state.current;
error_before := parse_state.result.had_error;
parse_state.result.had_error = false;
error_before := parse_state.ctx.had_error;
parse_state.ctx.had_error = false;
while !check(parse_state, .TOKEN_RIGHTPAREN) {
arg := expression(parse_state);
@@ -1099,12 +1097,12 @@ argument_list :: (parse_state : *Parse_State) -> *AST_Node {
if check(parse_state, .TOKEN_RIGHTPAREN) break;
consume(parse_state, .TOKEN_COMMA, "Expect ',' after function argument.");
if parse_state.result.had_error {
if parse_state.ctx.had_error {
break;
}
}
parse_state.result.had_error = error_before || parse_state.result.had_error;
parse_state.ctx.had_error = error_before || parse_state.ctx.had_error;
consume(parse_state, .TOKEN_RIGHTPAREN, "Expect ')' after function call.");
@@ -1581,8 +1579,8 @@ declaration :: (parse_state : *Parse_State) -> *AST_Node {
return decl_node;
}
parse :: (result : *Compile_Result, allocator := temp) {
if result.had_error {
parse :: (ctx : *Compiler_Context, allocator := temp) {
if ctx.had_error {
return;
}
@@ -1593,17 +1591,17 @@ parse :: (result : *Compile_Result, allocator := temp) {
defer clear_context_allocators();
parse_state : Parse_State;
result.nodes.allocator = result.allocator;
array_reserve(*result.nodes, 4096);
ctx.nodes.allocator = ctx.allocator;
array_reserve(*ctx.nodes, 4096);
parse_state.current_token_index = 0;
parse_state.result = result;
parse_state.ctx = ctx;
advance(*parse_state);
if !match(*parse_state, .TOKEN_EOF) {
parse_state.result.root = make_node(*parse_state, .Program);
array_reserve(*parse_state.result.root.children, 1024);
program := parse_state.result.root;
parse_state.ctx.root = make_node(*parse_state, .Program);
array_reserve(*parse_state.ctx.root.children, 1024);
program := parse_state.ctx.root;
while !check(*parse_state, .TOKEN_EOF) {
decl := declaration(*parse_state);