Ifdefs, moved semantic to check, fixed error reporting for builtins
This commit is contained in:
168
Parsing.jai
168
Parsing.jai
@@ -186,7 +186,7 @@ unexpected_token :: (state : *Parse_State, token : Token, message : string) {
|
||||
|
||||
indent(*builder, 1);
|
||||
cyan(*builder);
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||
|
||||
indent(*builder, 1);
|
||||
print_token_pointer(*builder, token);
|
||||
@@ -207,7 +207,7 @@ else_if_without_if :: (state : *Parse_State) {
|
||||
|
||||
indent(*builder, 1);
|
||||
cyan(*builder);
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||
|
||||
indent(*builder, 1);
|
||||
print_token_pointer(*builder, token);
|
||||
@@ -229,7 +229,7 @@ else_without_if :: (state : *Parse_State) {
|
||||
|
||||
indent(*builder, 1);
|
||||
cyan(*builder);
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||
|
||||
indent(*builder, 1);
|
||||
print_token_pointer(*builder, token);
|
||||
@@ -249,7 +249,7 @@ unable_to_parse_statement :: (state : *Parse_State, token : Token, message : str
|
||||
|
||||
indent(*builder, 1);
|
||||
cyan(*builder);
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||
|
||||
|
||||
indent(*builder, 1);
|
||||
@@ -269,7 +269,7 @@ expected_expression :: (state : *Parse_State, token : Token, message : string) {
|
||||
|
||||
indent(*builder, 1);
|
||||
cyan(*builder);
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||
|
||||
indent(*builder, 1);
|
||||
print_token_pointer(*builder, token);
|
||||
@@ -288,7 +288,7 @@ missing_type_specifier :: (state : *Parse_State, token : Token, message : string
|
||||
|
||||
indent(*builder, 1);
|
||||
cyan(*builder);
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||
indent(*builder, 1);
|
||||
|
||||
loc := location.begin;
|
||||
@@ -312,7 +312,7 @@ empty_block :: (state : *Parse_State, token : Token, message : string) {
|
||||
|
||||
indent(*builder, 1);
|
||||
cyan(*builder);
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||
indent(*builder, 1);
|
||||
|
||||
loc := location.begin;
|
||||
@@ -336,7 +336,26 @@ unable_to_open_file :: (state : *Parse_State, path : string, token : Token) {
|
||||
|
||||
indent(*builder, 1);
|
||||
cyan(*builder);
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||
indent(*builder, 1);
|
||||
|
||||
loc := location.begin;
|
||||
print_token_pointer(*builder, loc);
|
||||
|
||||
final_message := builder_to_string(*builder);
|
||||
record_error(state, token, final_message, false);
|
||||
}
|
||||
|
||||
entry_point_requires_return_value :: (state : *Parse_State, token : Token) {
|
||||
builder : String_Builder;
|
||||
init_string_builder(*builder,, temp);
|
||||
|
||||
print_to_builder(*builder, "Entry point '%' requires return value\n\n", token.ident_value);
|
||||
|
||||
location := generate_source_location_from_token(state, token);
|
||||
indent(*builder, 1);
|
||||
cyan(*builder);
|
||||
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||
indent(*builder, 1);
|
||||
|
||||
loc := location.begin;
|
||||
@@ -383,28 +402,13 @@ make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node {
|
||||
return make_node(*parse_state.ctx.nodes, kind);
|
||||
}
|
||||
|
||||
// new_builtin_node :: (nodes : *[..]AST_Node, kind : AST_Kind) -> *AST_Node {
|
||||
// node := make_node(parse_state, kind);
|
||||
// node.builtin = true;
|
||||
// return node;
|
||||
// }
|
||||
|
||||
make_builtin_token :: (tokens : *[..]Token, builder : *String_Builder, kind : Token_Kind, text : string, col : *int, line : *int) -> *Token {
|
||||
make_builtin_token :: (tokens : *[..]Token, kind : Token_Kind, text : string, col : *int, line : *int) -> *Token {
|
||||
tok : Token;
|
||||
tok.kind = kind;
|
||||
|
||||
start := 0;
|
||||
|
||||
buffer := get_current_buffer(builder);
|
||||
|
||||
if buffer {
|
||||
start := buffer.count;
|
||||
}
|
||||
tok.column = col.*;
|
||||
|
||||
print_to_builder(builder, "%", text);
|
||||
buffer = get_current_buffer(builder);
|
||||
end := buffer.count;
|
||||
|
||||
for c : text {
|
||||
if c == #char "\n" {
|
||||
@@ -415,9 +419,11 @@ make_builtin_token :: (tokens : *[..]Token, builder : *String_Builder, kind : To
|
||||
}
|
||||
}
|
||||
|
||||
tok.index = buffer.count - text.count;
|
||||
tok.index = tokens.count;
|
||||
tok.length = text.count;
|
||||
tok.builtin = true;
|
||||
tok.source = text.data;
|
||||
tok.ident_value = text;
|
||||
|
||||
array_add(tokens, tok);
|
||||
|
||||
@@ -427,9 +433,6 @@ make_builtin_token :: (tokens : *[..]Token, builder : *String_Builder, kind : To
|
||||
new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []Arg) -> *AST_Node {
|
||||
sc := get_scratch(context.allocator);
|
||||
defer scratch_end(sc);
|
||||
builder : String_Builder;
|
||||
builder.allocator = sc.allocator; // I want to find a good way to use scratch here...
|
||||
|
||||
node := make_node(*ctx.nodes, .Struct);
|
||||
|
||||
source_location : Source_Range;
|
||||
@@ -439,17 +442,13 @@ new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []
|
||||
|
||||
tok_index := ctx.tokens.count;
|
||||
|
||||
ident_token := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", name), *col, *line);
|
||||
ident_token := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, name, *col, *line);
|
||||
ident_token.ident_value = name;
|
||||
source_location.begin = ident_token;
|
||||
|
||||
append(*builder, " ");
|
||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_DOUBLECOLON, "::", *col, *line);
|
||||
append(*builder, " ");
|
||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_STRUCT, "struct", *col, *line);
|
||||
append(*builder, " ");
|
||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_LEFTBRACE, "{", *col, *line);
|
||||
append(*builder, "\n");
|
||||
make_builtin_token(*ctx.tokens, .TOKEN_DOUBLECOLON, " :: ", *col, *line);
|
||||
make_builtin_token(*ctx.tokens, .TOKEN_STRUCT, "struct ", *col, *line);
|
||||
make_builtin_token(*ctx.tokens, .TOKEN_LEFTBRACE, "{\n\t", *col, *line);
|
||||
line += 1;
|
||||
col = 0;
|
||||
|
||||
@@ -460,18 +459,14 @@ new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []
|
||||
field := make_node(*ctx.nodes, .Field);
|
||||
field_source_loc : Source_Range;
|
||||
|
||||
indent(*builder, 1);
|
||||
field_ident := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.name), *col, *line);
|
||||
field_ident := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, member.name, *col, *line);
|
||||
field_source_loc.begin = field_ident;
|
||||
field.token = field_ident;
|
||||
field.name = member.name;
|
||||
|
||||
append(*builder, " ");
|
||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_COLON, ":", *col, *line);
|
||||
append(*builder, " ");
|
||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.typename), *col, *line);
|
||||
semicolon_tok := make_builtin_token(*ctx.tokens, *builder, .TOKEN_SEMICOLON, ";", *col, *line);
|
||||
append(*builder, "\n");
|
||||
make_builtin_token(*ctx.tokens, .TOKEN_COLON, ": ", *col, *line);
|
||||
make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, member.typename, *col, *line);
|
||||
semicolon_tok := make_builtin_token(*ctx.tokens, .TOKEN_SEMICOLON, ";", *col, *line);
|
||||
col = 0;
|
||||
line += 1;
|
||||
|
||||
@@ -481,26 +476,9 @@ new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []
|
||||
add_child(field_list, field);
|
||||
}
|
||||
|
||||
brace_token := make_builtin_token(*ctx.tokens, *builder, .TOKEN_RIGHTBRACE, "}", *col, *line);
|
||||
append(*builder, "\n");
|
||||
brace_token := make_builtin_token(*ctx.tokens, .TOKEN_RIGHTBRACE, "\n}", *col, *line);
|
||||
|
||||
source_location.end = brace_token;
|
||||
|
||||
source := builder_to_string(*builder,, context.allocator);
|
||||
|
||||
source_location.begin.source = *source.data[source_location.begin.column];
|
||||
source_location.end.source = *source.data[source_location.end.column];
|
||||
|
||||
for i : tok_index..ctx.tokens.count - 1 {
|
||||
tok := ctx.tokens[i];
|
||||
tok.source = *source.data[tok.column];
|
||||
}
|
||||
|
||||
for field : field_list.children {
|
||||
field.source_location.begin.source = *source.data[field.source_location.begin.column];
|
||||
field.source_location.end.source = *source.data[field.source_location.end.column];
|
||||
// field.source_location.main_token.source = *source.data[tok.column];
|
||||
}
|
||||
|
||||
node.source_location = source_location;
|
||||
|
||||
@@ -510,8 +488,6 @@ new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []
|
||||
new_builtin_function_node :: (ctx : *Compiler_Context, name : string, members : []Arg, return_var : Arg) -> *AST_Node {
|
||||
sc := get_scratch(context.allocator);
|
||||
defer scratch_end(sc);
|
||||
builder : String_Builder;
|
||||
builder.allocator = sc.allocator; // I want to find a good way to use scratch here...
|
||||
|
||||
node := make_node(*ctx.nodes, .Function);
|
||||
|
||||
@@ -522,13 +498,11 @@ new_builtin_function_node :: (ctx : *Compiler_Context, name : string, members :
|
||||
|
||||
tok_index := ctx.tokens.count;
|
||||
|
||||
ident_token := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", name), *col, *line);
|
||||
ident_token := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, name, *col, *line);
|
||||
source_location.begin = ident_token;
|
||||
|
||||
append(*builder, " ");
|
||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_DOUBLECOLON, "::", *col, *line);
|
||||
append(*builder, " ");
|
||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_LEFTPAREN, "(", *col, *line);
|
||||
make_builtin_token(*ctx.tokens, .TOKEN_DOUBLECOLON, " :: ", *col, *line);
|
||||
make_builtin_token(*ctx.tokens, .TOKEN_LEFTPAREN, "(", *col, *line);
|
||||
field_list := make_node(*ctx.nodes, .FieldList);
|
||||
add_child(node, field_list);
|
||||
|
||||
@@ -536,14 +510,12 @@ new_builtin_function_node :: (ctx : *Compiler_Context, name : string, members :
|
||||
field := make_node(*ctx.nodes, .Field);
|
||||
field_source_loc : Source_Range;
|
||||
|
||||
// field_ident := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.name), *col, *line);
|
||||
type_tok := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.typename), *col, *line);
|
||||
type_tok := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, member.typename, *col, *line);
|
||||
field_source_loc.begin = type_tok;
|
||||
field.token = type_tok;
|
||||
|
||||
if it_index < members.count - 1 {
|
||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_COMMA, ",", *col, *line);
|
||||
append(*builder, " ");
|
||||
make_builtin_token(*ctx.tokens, .TOKEN_COMMA, ", ", *col, *line);
|
||||
}
|
||||
|
||||
field_source_loc.end = type_tok;
|
||||
@@ -552,26 +524,10 @@ new_builtin_function_node :: (ctx : *Compiler_Context, name : string, members :
|
||||
add_child(field_list, field);
|
||||
}
|
||||
|
||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_RIGHTPAREN, ")", *col, *line);
|
||||
semicolon_tok := make_builtin_token(*ctx.tokens, *builder, .TOKEN_SEMICOLON, ";", *col, *line);
|
||||
make_builtin_token(*ctx.tokens, .TOKEN_RIGHTPAREN, ")", *col, *line);
|
||||
semicolon_tok := make_builtin_token(*ctx.tokens, .TOKEN_SEMICOLON, ";", *col, *line);
|
||||
|
||||
source_location.end = semicolon_tok;
|
||||
|
||||
source := builder_to_string(*builder,, context.allocator);
|
||||
|
||||
source_location.begin.source = *source.data[source_location.begin.column];
|
||||
source_location.end.source = *source.data[source_location.end.column];
|
||||
|
||||
for i : tok_index..ctx.tokens.count - 1 {
|
||||
tok := ctx.tokens[i];
|
||||
tok.source = *source.data[tok.column];
|
||||
}
|
||||
|
||||
for field : field_list.children {
|
||||
field.source_location.begin.source = *source.data[field.source_location.begin.column];
|
||||
field.source_location.end.source = *source.data[field.source_location.end.column];
|
||||
// field.source_location.main_token.source = *source.data[tok.column];
|
||||
}
|
||||
|
||||
node.source_location = source_location;
|
||||
|
||||
@@ -838,16 +794,28 @@ directive :: (state : *Parse_State) -> *AST_Node {
|
||||
if_directive := make_node(state, .If_Directive);
|
||||
|
||||
source_location : Source_Range;
|
||||
// source_location.begin = state.previous;
|
||||
if state.previous {
|
||||
source_location.begin = state.previous;
|
||||
} else {
|
||||
source_location.begin = state.current;
|
||||
}
|
||||
|
||||
advance(state);
|
||||
|
||||
cond := expression(state);
|
||||
add_child(if_directive, cond);
|
||||
|
||||
source_location.end = state.previous;
|
||||
advance_to_sync_point(state);
|
||||
|
||||
if_body := block(state);
|
||||
add_child(if_directive, if_body);
|
||||
|
||||
if match(state, .TOKEN_ELSE) {
|
||||
else_node := else_statement(state);
|
||||
add_child(if_directive, else_node);
|
||||
}
|
||||
|
||||
if_directive.source_location = source_location;
|
||||
|
||||
return if_directive;
|
||||
@@ -1256,13 +1224,15 @@ statement :: (parse_state : *Parse_State) -> *AST_Node {
|
||||
else_statement :: (parse_state : *Parse_State) -> *AST_Node {
|
||||
if check(parse_state, .TOKEN_IF) {
|
||||
return statement(parse_state);
|
||||
} else if check(parse_state, .TOKEN_DIRECTIVE) && parse_state.current.ident_value == "if" {
|
||||
return directive(parse_state);
|
||||
}
|
||||
return block(parse_state);
|
||||
}
|
||||
|
||||
block :: (parse_state : *Parse_State) -> *AST_Node {
|
||||
node : *AST_Node = make_node(parse_state, .Block);
|
||||
array_reserve(*node.children, 1024);
|
||||
array_reserve(*node.children, 32);
|
||||
|
||||
source_location : Source_Range;
|
||||
|
||||
@@ -1361,10 +1331,22 @@ function_declaration :: (parse_state : *Parse_State, identifier_token : *Token,
|
||||
case .Vertex; {
|
||||
node.vertex_entry_point = true;
|
||||
name = sprint("vs_%", function_name_token.ident_value);
|
||||
|
||||
// if return_type_token.kind == .TOKEN_INVALID {
|
||||
// entry_point_requires_return_value(parse_state, function_name_token);
|
||||
// advance_to_sync_point(parse_state);
|
||||
// return error_node(parse_state, "");
|
||||
// }
|
||||
}
|
||||
case .Pixel; {
|
||||
node.pixel_entry_point = true;
|
||||
name = sprint("ps_%", function_name_token.ident_value);
|
||||
|
||||
// if return_type_token.kind == .TOKEN_INVALID {
|
||||
// entry_point_requires_return_value(parse_state, function_name_token);
|
||||
// advance_to_sync_point(parse_state);
|
||||
// return error_node(parse_state, "");
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user