Move compile result stuff out of specific stages.
This commit is contained in:
86
Codegen.jai
86
Codegen.jai
@@ -16,26 +16,26 @@ Output_Language :: enum {
|
||||
Codegen_State :: struct {
|
||||
path : string;
|
||||
|
||||
scope_stack : Scope_Stack;
|
||||
// scope_stack : Scope_Stack;
|
||||
current_scope : Scope_Handle;
|
||||
|
||||
type_variables : []Type_Variable;
|
||||
root : *AST_Node;
|
||||
// type_variables : []Type_Variable;
|
||||
// root : *AST_Node;
|
||||
|
||||
output_language : Output_Language;
|
||||
|
||||
builder : String_Builder;
|
||||
|
||||
result : Codegen_Result;
|
||||
result : *Compile_Result;
|
||||
}
|
||||
|
||||
Codegen_Result :: struct {
|
||||
messages : [..]Compiler_Message;
|
||||
// Codegen_Result :: struct {
|
||||
// messages : [..]Compiler_Message;
|
||||
|
||||
had_error : bool;
|
||||
// had_error : bool;
|
||||
|
||||
result_text : string; // @Incomplete(nb): Result for now, should likely be far more sophisticated.
|
||||
}
|
||||
// result_text : string; // @Incomplete(nb): Result for now, should likely be far more sophisticated.
|
||||
// }
|
||||
|
||||
Reserved_HLSL_Words :: string.[
|
||||
"texture",
|
||||
@@ -56,10 +56,7 @@ Reserved_GLSL_Words :: string.[
|
||||
""
|
||||
];
|
||||
|
||||
init_codegen_state :: (state : *Codegen_State, file : *Compiled_File, output_language : Output_Language) {
|
||||
state.root = file.ast_root;
|
||||
state.scope_stack = file.scope_stack;
|
||||
state.type_variables = file.type_variables;
|
||||
init_codegen_state :: (state : *Codegen_State, result : *Compile_Result, output_language : Output_Language) {
|
||||
state.current_scope = cast(Scope_Handle)1;
|
||||
state.output_language = output_language;
|
||||
init_string_builder(*state.builder);
|
||||
@@ -105,16 +102,16 @@ hlsl_type_to_string :: (type_variable : Type_Variable) -> string {
|
||||
}
|
||||
|
||||
emit_field :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
||||
find_result := find_symbol(state.scope_stack, node.name, state.current_scope);
|
||||
find_result := find_symbol(state.result.scope_stack, node.name, state.current_scope);
|
||||
|
||||
field := from_handle(state.type_variables, find_result.type_variable);
|
||||
field := from_handle(state.result.type_variables, find_result.type_variable);
|
||||
|
||||
indent(state, indentation);
|
||||
|
||||
print_to_builder(*state.builder, "% ", hlsl_type_to_string(field));
|
||||
|
||||
if field.struct_field_parent {
|
||||
parent_tv := from_handle(state.type_variables, field.struct_field_parent.type_variable);
|
||||
parent_tv := from_handle(state.result.type_variables, field.struct_field_parent.type_variable);
|
||||
|
||||
if parent_tv.typename == "properties" {
|
||||
append(*state.builder, "__PROPERTIES__");
|
||||
@@ -144,7 +141,7 @@ emit_field :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
||||
|
||||
|
||||
for i :0..field.children.count - 1 {
|
||||
child := from_handle(state.type_variables, field.children[i]);
|
||||
child := from_handle(state.result.type_variables, field.children[i]);
|
||||
emit_node(state, child.source_node, 0);
|
||||
}
|
||||
|
||||
@@ -230,7 +227,7 @@ emit_call :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
||||
}
|
||||
|
||||
emit_properties :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
||||
find_result := find_symbol(state.scope_stack, ifx node.name.count > 0 then node.name else "properties", state.current_scope);
|
||||
find_result := find_symbol(state.result.scope_stack, ifx node.name.count > 0 then node.name else "properties", state.current_scope);
|
||||
|
||||
if !find_result {
|
||||
message : Compiler_Message;
|
||||
@@ -241,7 +238,7 @@ emit_properties :: (state : *Codegen_State, node : *AST_Node, indentation : int)
|
||||
}
|
||||
assert(find_result != null, "Attempting to generate undeclared properties buffer. This should never happen at this stage.");
|
||||
|
||||
variable := from_handle(state.type_variables, find_result.type_variable);
|
||||
variable := from_handle(state.result.type_variables, find_result.type_variable);
|
||||
|
||||
print_to_builder(*state.builder, "cbuffer __PROPERTIES : register(b%) \n{\n", variable.resource_index);
|
||||
|
||||
@@ -253,7 +250,7 @@ emit_properties :: (state : *Codegen_State, node : *AST_Node, indentation : int)
|
||||
for child : node.children {
|
||||
if child.kind == .FieldList {
|
||||
for field : child.children {
|
||||
tv := from_handle(state.type_variables, field.type_variable);
|
||||
tv := from_handle(state.result.type_variables, field.type_variable);
|
||||
if tv.type == .Sampler || tv.type == .Texture2D {
|
||||
array_add(*resources, field);
|
||||
continue;
|
||||
@@ -281,7 +278,7 @@ emit_properties :: (state : *Codegen_State, node : *AST_Node, indentation : int)
|
||||
|
||||
emit_function :: (state : *Codegen_State, node : *AST_Node, indentation : int, emit_body := true) {
|
||||
name := get_actual_function_name(node);
|
||||
find_result := find_symbol(state.scope_stack, name, state.current_scope);
|
||||
find_result := find_symbol(state.result.scope_stack, name, state.current_scope);
|
||||
|
||||
assert(find_result != null, "Attempting to generate undeclared function. This should never happen at this stage.");
|
||||
if !find_result {
|
||||
@@ -293,12 +290,12 @@ emit_function :: (state : *Codegen_State, node : *AST_Node, indentation : int, e
|
||||
}
|
||||
|
||||
for func : find_result.functions {
|
||||
function_variable := from_handle(state.type_variables, func.type_variable);
|
||||
function_variable := from_handle(state.result.type_variables, func.type_variable);
|
||||
|
||||
indent(state, indentation);
|
||||
|
||||
if function_variable.return_type_variable {
|
||||
return_variable := from_handle(state.type_variables, function_variable.return_type_variable);
|
||||
return_variable := from_handle(state.result.type_variables, function_variable.return_type_variable);
|
||||
print_to_builder(*state.builder, "% ", hlsl_type_to_string(return_variable));
|
||||
} else {
|
||||
append(*state.builder, "void ");
|
||||
@@ -438,12 +435,12 @@ emit_node :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
||||
case .Variable; {
|
||||
indent(*state.builder, indentation);
|
||||
|
||||
type_var := from_handle(state.type_variables, node.type_variable);
|
||||
type_var := from_handle(state.result.type_variables, node.type_variable);
|
||||
is_properties := type_var.typename == "properties";
|
||||
|
||||
if !is_properties {
|
||||
if type_var.struct_field_parent {
|
||||
parent_tv := from_handle(state.type_variables, type_var.struct_field_parent.type_variable);
|
||||
parent_tv := from_handle(state.result.type_variables, type_var.struct_field_parent.type_variable);
|
||||
|
||||
if parent_tv.typename == "properties" {
|
||||
append(*state.builder, "__PROPERTIES__");
|
||||
@@ -556,7 +553,7 @@ emit_struct :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
||||
print_to_builder(*state.builder, "struct %", node.name);
|
||||
|
||||
current_scope := state.current_scope;
|
||||
state.current_scope = from_handle(state.type_variables, node.type_variable).scope;
|
||||
state.current_scope = from_handle(state.result.type_variables, node.type_variable).scope;
|
||||
|
||||
field_list := node.children[0];
|
||||
|
||||
@@ -573,11 +570,11 @@ emit_struct :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
||||
}
|
||||
|
||||
emit_cbuffer :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
||||
variable := from_handle(state.type_variables, node.type_variable);
|
||||
variable := from_handle(state.result.type_variables, node.type_variable);
|
||||
print_to_builder(*state.builder, "cbuffer % : register(b%)", variable.name, variable.resource_index);
|
||||
|
||||
current_scope := state.current_scope;
|
||||
state.current_scope = from_handle(state.type_variables, node.type_variable).scope;
|
||||
state.current_scope = from_handle(state.result.type_variables, node.type_variable).scope;
|
||||
|
||||
field_list := node.children[0];
|
||||
|
||||
@@ -615,21 +612,20 @@ codegen :: (result : *Compile_Result) {
|
||||
return;
|
||||
}
|
||||
|
||||
for *file : result.files {
|
||||
state : Codegen_State;
|
||||
init_codegen_state(*state, file, .HLSL);
|
||||
state.result = result;
|
||||
state.current_scope = cast(Scope_Handle)1;
|
||||
state.output_language = .HLSL;
|
||||
init_string_builder(*state.builder);
|
||||
|
||||
codegen_result := codegen(*state);
|
||||
|
||||
file.codegen_result_text = copy_string(codegen_result.result_text);
|
||||
}
|
||||
codegen(*state);
|
||||
}
|
||||
|
||||
codegen :: (state : *Codegen_State) -> Codegen_Result {
|
||||
codegen :: (state : *Codegen_State) {
|
||||
found_function : bool = false;
|
||||
// found_struct : bool = false;
|
||||
|
||||
// for variable : state.type_variables {
|
||||
// for variable : state.result.type_variables {
|
||||
// if variable.type == .Struct && variable.kind == .Declaration && !variable.builtin {
|
||||
// if variable.source_node.kind == .Properties continue;
|
||||
// if variable.source_node.kind == .Meta continue;
|
||||
@@ -642,7 +638,7 @@ codegen :: (state : *Codegen_State) -> Codegen_Result {
|
||||
// append(*state.builder, "\n");
|
||||
// }
|
||||
|
||||
for variable : state.type_variables {
|
||||
for variable : state.result.type_variables {
|
||||
if variable.type == .Function && !variable.builtin
|
||||
&& !variable.source_node.vertex_entry_point && !variable.source_node.pixel_entry_point {
|
||||
emit_function(state, variable.source_node, 0, false);
|
||||
@@ -653,22 +649,24 @@ codegen :: (state : *Codegen_State) -> Codegen_Result {
|
||||
append(*state.builder, "\n");
|
||||
}
|
||||
|
||||
for declaration : state.root.children {
|
||||
for declaration : state.result.root.children {
|
||||
if declaration.foreign_declaration {
|
||||
continue;
|
||||
}
|
||||
emit_declaration(state, declaration);
|
||||
}
|
||||
|
||||
state.result.result_text = builder_to_string(*state.builder);
|
||||
|
||||
return state.result;
|
||||
state.result.codegen_result_text = builder_to_string(*state.builder);
|
||||
}
|
||||
|
||||
codegen :: (file : *Compiled_File, output_language : Output_Language) -> Codegen_Result {
|
||||
codegen :: (result : *Compile_Result, output_language : Output_Language) {
|
||||
codegen_state : Codegen_State;
|
||||
init_codegen_state(*codegen_state, file, output_language);
|
||||
return codegen(*codegen_state);
|
||||
codegen_state.result = result;
|
||||
codegen_state.current_scope = cast(Scope_Handle)1;
|
||||
codegen_state.output_language = output_language;
|
||||
init_string_builder(*codegen_state.builder);
|
||||
|
||||
codegen(*codegen_state);
|
||||
}
|
||||
|
||||
#scope_module
|
||||
|
||||
@@ -367,6 +367,7 @@ make_token :: (lexer : *Lexer, token_kind : Token_Kind) -> *Token {
|
||||
|
||||
skip_whitespace :: (lexer : *Lexer) {
|
||||
while true {
|
||||
if is_at_end(lexer) return;
|
||||
c := peek_char(lexer);
|
||||
|
||||
if c == {
|
||||
@@ -506,21 +507,19 @@ lex :: (result : *Compile_Result) {
|
||||
return;
|
||||
}
|
||||
|
||||
for *file : result.files {
|
||||
lexer : Lexer;
|
||||
init_lexer_from_string(*lexer, file.file.source);
|
||||
lexer.path = file.file.path;
|
||||
init_lexer_from_string(*lexer, result.file.source);
|
||||
lexer.path = result.file.path;
|
||||
token : *Token = scan_next_token(*lexer);
|
||||
while token && token.kind != .TOKEN_EOF {
|
||||
token = scan_next_token(*lexer);
|
||||
}
|
||||
|
||||
array_copy(*file.tokens.tokens, lexer.result.tokens);
|
||||
array_copy(*result.tokens.tokens, lexer.result.tokens);
|
||||
result.had_error |= lexer.result.had_error;
|
||||
|
||||
// @Incomplete(nb): Temporary until we figure out a good way of passing this stuff around
|
||||
copy_messages(lexer.result.messages, *result.messages);
|
||||
}
|
||||
}
|
||||
|
||||
lex :: (lexer : *Lexer, allocator : Allocator = context.allocator) -> Lexing_Result {
|
||||
|
||||
93
Parsing.jai
93
Parsing.jai
@@ -11,33 +11,22 @@
|
||||
Parse_State :: struct {
|
||||
current : *Token;
|
||||
previous : *Token;
|
||||
tokens : [..]Token;
|
||||
|
||||
current_token_index : int;
|
||||
|
||||
node_allocator : Allocator;
|
||||
node_arena : Arena;
|
||||
|
||||
child_allocator : Allocator;
|
||||
child_arena : Arena;
|
||||
|
||||
// had_error : bool;
|
||||
|
||||
path : string;
|
||||
|
||||
result : Parse_Result;
|
||||
result : *Compile_Result;
|
||||
}
|
||||
|
||||
////////////////////////////
|
||||
//@nb - Result and error handling
|
||||
Parse_Result :: struct {
|
||||
root : *AST_Node;
|
||||
nodes : [..]AST_Node;
|
||||
// Parse_Result :: struct {
|
||||
// root : *AST_Node;
|
||||
// nodes : [..]AST_Node;
|
||||
|
||||
had_error : bool;
|
||||
// had_error : bool;
|
||||
|
||||
messages : [..]Compiler_Message;
|
||||
}
|
||||
// messages : [..]Compiler_Message;
|
||||
// }
|
||||
|
||||
Parse_Error_Kind :: enum {
|
||||
Parse_Error_Type_Missing;
|
||||
@@ -129,16 +118,6 @@ parse_rules :: #run -> [(cast(int)Token_Kind.TOKEN_ERROR) + 1]Parse_Rule {
|
||||
return rules;
|
||||
}
|
||||
|
||||
init_parse_state :: (parse_state : *Parse_State, tokens : [..]Token, path : string) {
|
||||
parse_state.tokens = tokens;
|
||||
parse_state.path = path;
|
||||
parse_state.node_allocator = make_arena(*parse_state.node_arena);
|
||||
parse_state.child_allocator = make_arena(*parse_state.child_arena);
|
||||
parse_state.result.nodes.allocator = parse_state.node_allocator;
|
||||
array_reserve(*parse_state.result.nodes, 4096);
|
||||
parse_state.current_token_index = 0;
|
||||
}
|
||||
|
||||
////////////////////////////
|
||||
//@nb - Error handling functions
|
||||
|
||||
@@ -147,7 +126,7 @@ record_error :: (parse_state : *Parse_State, token : Token, message : string, re
|
||||
error : Compiler_Message;
|
||||
error.message_kind = .Error;
|
||||
error.message = message;
|
||||
error.path = parse_state.path;
|
||||
error.path = parse_state.result.file.path;
|
||||
|
||||
source_location : Source_Range;
|
||||
source_location.begin = token;
|
||||
@@ -357,7 +336,7 @@ make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node {
|
||||
node : AST_Node;
|
||||
|
||||
node.kind = kind;
|
||||
node.children.allocator = parse_state.child_allocator;
|
||||
node.children.allocator = parse_state.result.allocator;
|
||||
array_add(*parse_state.result.nodes, node);
|
||||
|
||||
return *parse_state.result.nodes[parse_state.result.nodes.count - 1];
|
||||
@@ -393,10 +372,10 @@ advance :: (parse_state : *Parse_State) {
|
||||
parse_state.previous = parse_state.current;
|
||||
|
||||
while true {
|
||||
if parse_state.current_token_index >= parse_state.tokens.count {
|
||||
if parse_state.current_token_index >= parse_state.result.tokens.tokens.count {
|
||||
break;
|
||||
}
|
||||
parse_state.current = *parse_state.tokens[parse_state.current_token_index];
|
||||
parse_state.current = *parse_state.result.tokens.tokens[parse_state.current_token_index];
|
||||
parse_state.current_token_index += 1;
|
||||
if parse_state.current.kind != .TOKEN_ERROR break;
|
||||
|
||||
@@ -429,7 +408,7 @@ check_any :: (parse_state : *Parse_State, kinds : ..Token_Kind) -> bool {
|
||||
|
||||
//nb - Checks if the next token is of a certain kind
|
||||
check_next :: (parse_state : *Parse_State, kind : Token_Kind) -> bool {
|
||||
return parse_state.tokens[parse_state.current_token_index].kind == kind;
|
||||
return parse_state.result.tokens.tokens[parse_state.current_token_index].kind == kind;
|
||||
}
|
||||
|
||||
//nb - Consume a token if
|
||||
@@ -440,8 +419,13 @@ consume :: (parse_state : *Parse_State, kind : Token_Kind, message : string) {
|
||||
}
|
||||
|
||||
token := parse_state.previous;
|
||||
advance(parse_state);
|
||||
advance_to_sync_point(parse_state);
|
||||
unexpected_token(parse_state, token, message);
|
||||
|
||||
if parse_state.current.kind == .TOKEN_EOF {
|
||||
return;
|
||||
}
|
||||
|
||||
consume(parse_state, kind, message);
|
||||
}
|
||||
|
||||
@@ -540,8 +524,8 @@ binary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
||||
}
|
||||
|
||||
array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
||||
identifier := parse_state.tokens[parse_state.current_token_index - 3];
|
||||
left_bracket := parse_state.tokens[parse_state.current_token_index - 2];
|
||||
identifier := parse_state.result.tokens.tokens[parse_state.current_token_index - 3];
|
||||
left_bracket := parse_state.result.tokens.tokens[parse_state.current_token_index - 2];
|
||||
|
||||
array_access := make_node(parse_state, .Unary);
|
||||
array_access.token = left_bracket;
|
||||
@@ -749,7 +733,7 @@ field_declaration :: (parse_state : *Parse_State, identifier_token : *Token) ->
|
||||
node.array_field = true;
|
||||
} else {
|
||||
if !check(parse_state, .TOKEN_ASSIGN) {
|
||||
internal_error_message(*parse_state.result.messages, "Unimplemented error message.", parse_state.path);
|
||||
internal_error_message(*parse_state.result.messages, "Unimplemented error message.", parse_state.result.file.path);
|
||||
return node;
|
||||
}
|
||||
// missing_type_specifier(parse_state, identifier_token, "Expected type specifier after field name.");
|
||||
@@ -1227,9 +1211,12 @@ parse :: (result : *Compile_Result) {
|
||||
return;
|
||||
}
|
||||
|
||||
for *file : result.files {
|
||||
parse_state : Parse_State;
|
||||
init_parse_state(*parse_state, file.tokens.tokens, file.file.path);
|
||||
result.nodes.allocator = result.allocator;
|
||||
array_reserve(*result.nodes, 4096);
|
||||
parse_state.current_token_index = 0;
|
||||
parse_state.result = result;
|
||||
|
||||
advance(*parse_state);
|
||||
|
||||
if !match(*parse_state, .TOKEN_EOF) {
|
||||
@@ -1244,34 +1231,6 @@ parse :: (result : *Compile_Result) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//@Incomplete(nb): will this straight copy just work?
|
||||
// Might need to rething how we do this.
|
||||
file.ast_root = parse_state.result.root;
|
||||
file.ast_nodes = parse_state.result.nodes;
|
||||
copy_messages(parse_state.result.messages, *result.messages);
|
||||
|
||||
result.had_error |= parse_state.result.had_error;
|
||||
}
|
||||
}
|
||||
|
||||
parse :: (parse_state : *Parse_State) -> Parse_Result {
|
||||
advance(parse_state);
|
||||
|
||||
if !match(parse_state, .TOKEN_EOF) {
|
||||
parse_state.result.root = make_node(parse_state, .Program);
|
||||
array_reserve(*parse_state.result.root.children, 1024);
|
||||
program := parse_state.result.root;
|
||||
|
||||
while !check(parse_state, .TOKEN_EOF) {
|
||||
decl := declaration(parse_state);
|
||||
if decl {
|
||||
add_child(program, decl);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return parse_state.result;
|
||||
}
|
||||
|
||||
#load "AST.jai";
|
||||
|
||||
@@ -140,7 +140,7 @@ Semantic_Checker :: struct {
|
||||
|
||||
current_scope : Scope_Handle;
|
||||
|
||||
result_file : *Compiled_File;
|
||||
result : *Compile_Result;
|
||||
|
||||
current_buffer_index : u32 = 0;
|
||||
current_sampler_index : u32 = 0;
|
||||
@@ -451,7 +451,7 @@ Attempting to access a field on a primitive type '%'.
|
||||
init_string_builder(*builder,, temp);
|
||||
|
||||
variable := from_handle(checker, handle);
|
||||
print_to_builder(*builder, "Attempting to access a field on a primitive type '%'.\n", proper_type_to_string(checker.result_file.type_variables, variable));
|
||||
print_to_builder(*builder, "Attempting to access a field on a primitive type '%'.\n", proper_type_to_string(checker.result.type_variables, variable));
|
||||
|
||||
indent(*builder, 1);
|
||||
cyan(*builder);
|
||||
@@ -512,7 +512,7 @@ if_condition_has_to_be_boolean_type :: (checker : *Semantic_Checker, usage_site
|
||||
usage_child := usage_site.children[0];
|
||||
usage_loc := usage_child.source_location;
|
||||
|
||||
print_to_builder(*builder, "% has type %\n", print_from_source_location(*usage_loc), proper_type_to_string(checker.result_file.type_variables, var));
|
||||
print_to_builder(*builder, "% has type %\n", print_from_source_location(*usage_loc), proper_type_to_string(checker.result.type_variables, var));
|
||||
|
||||
message := builder_to_string(*builder,, temp);
|
||||
record_error(checker, message, usage_site.source_location, false);
|
||||
@@ -554,7 +554,7 @@ type_mismatch :: (checker : *Semantic_Checker, usage_site : *AST_Node, expect_no
|
||||
indent(*builder, 1);
|
||||
print_to_builder(*builder, "expected:\n");
|
||||
indent(*builder, 2);
|
||||
proper_type_to_string(*builder, checker.result_file.type_variables, expect_var);
|
||||
proper_type_to_string(*builder, checker.result.type_variables, expect_var);
|
||||
append(*builder, "\n");
|
||||
|
||||
// indent(*builder, 2);
|
||||
@@ -617,10 +617,10 @@ use_scope :: (checker : *Semantic_Checker, handle : Scope_Handle) -> Scope_Handl
|
||||
|
||||
push_scope :: (checker : *Semantic_Checker, name := "", kind : Scope_Kind = .Global) -> *Scope, Scope_Handle {
|
||||
new_scope : Scope;
|
||||
array_add(*checker.result_file.scope_stack.stack, new_scope);
|
||||
array_add(*checker.result.scope_stack.stack, new_scope);
|
||||
|
||||
count := checker.result_file.scope_stack.stack.count;
|
||||
scope := *checker.result_file.scope_stack.stack[count - 1];
|
||||
count := checker.result.scope_stack.stack.count;
|
||||
scope := *checker.result.scope_stack.stack[count - 1];
|
||||
scope.parent = checker.current_scope;
|
||||
scope.name = name;
|
||||
scope.kind = kind;
|
||||
@@ -628,7 +628,7 @@ push_scope :: (checker : *Semantic_Checker, name := "", kind : Scope_Kind = .Glo
|
||||
scope.builtin = true;
|
||||
}
|
||||
|
||||
scope.children.allocator = checker.result_file.scope_stack.allocator;
|
||||
scope.children.allocator = checker.result.scope_stack.allocator;
|
||||
|
||||
if checker.current_scope {
|
||||
scope := get_current_scope(checker);
|
||||
@@ -651,12 +651,12 @@ pop_scope :: (checker : *Semantic_Checker) -> Scope_Handle {
|
||||
}
|
||||
|
||||
peek_scope :: (checker : *Semantic_Checker) -> *Scope, Scope_Handle {
|
||||
if checker.result_file.scope_stack.stack.count == 0 {
|
||||
if checker.result.scope_stack.stack.count == 0 {
|
||||
return null, 0;
|
||||
}
|
||||
|
||||
count := checker.result_file.scope_stack.stack.count;
|
||||
scope := *checker.result_file.scope_stack.stack[count - 1];
|
||||
count := checker.result.scope_stack.stack.count;
|
||||
scope := *checker.result.scope_stack.stack[count - 1];
|
||||
return scope, xx count;
|
||||
}
|
||||
|
||||
@@ -673,7 +673,7 @@ get_scope :: (scope_stack : Scope_Stack, handle : Scope_Handle) -> *Scope {
|
||||
}
|
||||
|
||||
get_scope :: (checker : *Semantic_Checker, handle : Scope_Handle) -> *Scope {
|
||||
return get_scope(*checker.result_file.scope_stack, handle);
|
||||
return get_scope(*checker.result.scope_stack, handle);
|
||||
}
|
||||
|
||||
add_symbol_to_scope :: (state : Checker_State, scope_stack : *Scope_Stack, scope_handle : Scope_Handle, name : string, symbol : Defined_Symbol) -> *Defined_Symbol {
|
||||
@@ -695,8 +695,8 @@ add_symbol_to_scope :: (state : Checker_State, scope_stack : *Scope_Stack, scope
|
||||
|
||||
new_type_variable :: (checker : *Semantic_Checker) -> *Type_Variable, Type_Variable_Handle {
|
||||
variable : Type_Variable;
|
||||
handle := cast(Type_Variable_Handle)checker.result_file.type_variables.count + 1;
|
||||
array_add(*checker.result_file.type_variables, variable);
|
||||
handle := cast(Type_Variable_Handle)checker.result.type_variables.count + 1;
|
||||
array_add(*checker.result.type_variables, variable);
|
||||
|
||||
return from_handle(checker, handle), handle;
|
||||
}
|
||||
@@ -725,10 +725,10 @@ init_semantic_checker :: (checker : *Semantic_Checker, root : *AST_Node, path :
|
||||
checker.path = path;
|
||||
|
||||
// @Incomplete(niels): Use other allocator and/or add static array with convenience functions
|
||||
array_reserve(*checker.result_file.type_variables, 2048);
|
||||
array_reserve(*checker.result.type_variables, 2048);
|
||||
|
||||
checker.result_file.scope_stack.allocator = make_arena(*checker.result_file.scope_stack.arena);
|
||||
array_reserve(*checker.result_file.scope_stack.stack, 256);
|
||||
checker.result.scope_stack.allocator = make_arena(*checker.result.scope_stack.arena);
|
||||
array_reserve(*checker.result.scope_stack.stack, 256);
|
||||
|
||||
global_scope, global_handle := push_scope(checker, kind = .Global);
|
||||
array_reserve(*global_scope.children, 2048);
|
||||
@@ -754,7 +754,7 @@ find_symbol :: (scope_stack : Scope_Stack, name : string, current_scope : Scope_
|
||||
}
|
||||
|
||||
find_symbol :: (checker : *Semantic_Checker, name : string, current_scope : Scope_Handle, containing_scope : *Scope_Handle = null) -> *Defined_Symbol {
|
||||
return find_symbol(checker.result_file.scope_stack, name, current_scope, containing_scope);
|
||||
return find_symbol(checker.result.scope_stack, name, current_scope, containing_scope);
|
||||
}
|
||||
|
||||
find_symbol :: (name : string, checker : *Semantic_Checker, containing_scope : *Scope_Handle = null) -> *Defined_Symbol {
|
||||
@@ -767,7 +767,7 @@ from_handle :: (variables : []Type_Variable, handle : Type_Variable_Handle) -> *
|
||||
}
|
||||
|
||||
from_handle :: (checker : *Semantic_Checker, handle : Type_Variable_Handle) -> *Type_Variable {
|
||||
return from_handle(checker.result_file.type_variables, handle);
|
||||
return from_handle(checker.result.type_variables, handle);
|
||||
}
|
||||
|
||||
proper_type_to_string :: (builder : *String_Builder, variables : []Type_Variable, var : Type_Variable) {
|
||||
@@ -843,7 +843,6 @@ proper_type_to_string :: (variables : []Type_Variable, var : Type_Variable, allo
|
||||
return "______not proper type______";
|
||||
}
|
||||
|
||||
|
||||
get_type_from_identifier :: (checker : *Semantic_Checker, scope : Scope_Handle, node : *AST_Node, typename : *string = null) -> Type_Kind {
|
||||
type_string := node.token.ident_value;
|
||||
|
||||
@@ -872,10 +871,6 @@ get_type_from_identifier :: (checker : *Semantic_Checker, scope : Scope_Handle,
|
||||
return .Invalid;
|
||||
}
|
||||
|
||||
check_expression :: (node : *AST_Node, checker : *Semantic_Checker) -> Type_Variable_Handle {
|
||||
return 0;
|
||||
}
|
||||
|
||||
check_block :: (checker : *Semantic_Checker, node : *AST_Node) {
|
||||
for child : node.children {
|
||||
check_node(checker, child);
|
||||
@@ -898,7 +893,7 @@ declare_struct :: (checker : *Semantic_Checker, node : *AST_Node, name : string)
|
||||
symbol.name = name;
|
||||
symbol.source_node = node;
|
||||
symbol.type_variable = handle;
|
||||
add_symbol_to_scope(checker.state, *checker.result_file.scope_stack, checker.current_scope, name, symbol);
|
||||
add_symbol_to_scope(checker.state, *checker.result.scope_stack, checker.current_scope, name, symbol);
|
||||
} else {
|
||||
symbol_redeclaration(checker, node, find_result);
|
||||
return 0;
|
||||
@@ -933,7 +928,7 @@ declare_properties :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Va
|
||||
name := ifx node.name.count == 0 then "properties" else node.name;
|
||||
|
||||
if node.name.count > 0 {
|
||||
checker.result_file.property_name = name;
|
||||
checker.result.property_name = name;
|
||||
}
|
||||
type_var := declare_struct(checker, node, name);
|
||||
var := from_handle(checker, type_var);
|
||||
@@ -950,7 +945,7 @@ declare_cbuffer :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Varia
|
||||
var.type = .CBuffer;
|
||||
var.resource_index = checker.current_buffer_index;
|
||||
checker.current_buffer_index += 1;
|
||||
array_add(*checker.result_file.constant_buffers, type_var);
|
||||
array_add(*checker.result.constant_buffers, type_var);
|
||||
return type_var;
|
||||
}
|
||||
|
||||
@@ -986,11 +981,11 @@ declare_function :: (checker : *Semantic_Checker, node : *AST_Node, builtin : bo
|
||||
name_to_check := get_actual_function_name(node);
|
||||
|
||||
if node.vertex_entry_point {
|
||||
checker.result_file.vertex_entry_point.node = node;
|
||||
checker.result.vertex_entry_point.node = node;
|
||||
}
|
||||
|
||||
if node.pixel_entry_point {
|
||||
checker.result_file.pixel_entry_point.node = node;
|
||||
checker.result.pixel_entry_point.node = node;
|
||||
}
|
||||
find_result := find_symbol(checker, name_to_check, checker.current_scope);
|
||||
|
||||
@@ -1007,7 +1002,7 @@ declare_function :: (checker : *Semantic_Checker, node : *AST_Node, builtin : bo
|
||||
array_reserve(*symbol.functions, 32);
|
||||
array_add(*symbol.functions, function);
|
||||
|
||||
add_symbol_to_scope(checker.state, *checker.result_file.scope_stack, checker.current_scope, name_to_check, symbol);
|
||||
add_symbol_to_scope(checker.state, *checker.result.scope_stack, checker.current_scope, name_to_check, symbol);
|
||||
} else {
|
||||
//@Note(niels): This is some ugly code, but it's probably fine for now.
|
||||
field_list := node.children[0];
|
||||
@@ -1244,7 +1239,7 @@ check_field :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variable_
|
||||
symbol.name = node.name;
|
||||
symbol.source_node = node;
|
||||
symbol.type_variable = handle;
|
||||
add_symbol_to_scope(checker.state, *checker.result_file.scope_stack, checker.current_scope, node.name, symbol);
|
||||
add_symbol_to_scope(checker.state, *checker.result.scope_stack, checker.current_scope, node.name, symbol);
|
||||
} else {
|
||||
symbol_redeclaration(checker, node, find_result);
|
||||
return 0;
|
||||
@@ -1672,7 +1667,7 @@ types_compatible :: (checker : *Semantic_Checker, lhs : Type_Variable_Handle, rh
|
||||
// //~ Functions
|
||||
// }
|
||||
|
||||
add_hlsl_builtins :: (checker : *Semantic_Checker) {
|
||||
add_builtins :: (checker : *Semantic_Checker) {
|
||||
source_location := #location().fully_pathed_filename;
|
||||
path_array := split(source_location, "/");
|
||||
|
||||
@@ -1682,11 +1677,11 @@ add_hlsl_builtins :: (checker : *Semantic_Checker) {
|
||||
append(*sb, "/");
|
||||
}
|
||||
|
||||
append(*sb, "hlsl_builtin.ink");
|
||||
append(*sb, "builtins.ink");
|
||||
|
||||
path := builder_to_string(*sb);
|
||||
|
||||
HLSL_BUILTIN, ok := read_entire_file(path);
|
||||
BUILTIN, ok := read_entire_file(path);
|
||||
|
||||
if !ok {
|
||||
messages : [..]Compiler_Message;
|
||||
@@ -1698,36 +1693,36 @@ add_hlsl_builtins :: (checker : *Semantic_Checker) {
|
||||
|
||||
checker.state = .Adding_Builtins;
|
||||
|
||||
add_file_from_string(checker.result, BUILTIN);
|
||||
|
||||
lexer : Lexer;
|
||||
|
||||
init_lexer_from_string(*lexer, HLSL_BUILTIN);
|
||||
if lexer.result.had_error {
|
||||
print("%\n", report_messages(lexer.result.messages));
|
||||
return;
|
||||
}
|
||||
lex(checker.result);
|
||||
parse(checker.result);
|
||||
type_check(checker, checker.result.root);
|
||||
|
||||
lex_result := lex(*lexer,, *temp);
|
||||
if lex_result.had_error {
|
||||
print("%\n", report_messages(lex_result.messages));
|
||||
return;
|
||||
}
|
||||
// lex_result := lex(*lexer,, *temp);
|
||||
// if lex_result.had_error {
|
||||
// print("%\n", report_messages(lex_result.messages));
|
||||
// return;
|
||||
// }
|
||||
|
||||
parse_state : Parse_State;
|
||||
init_parse_state(*parse_state, lex_result.tokens, lexer.path);
|
||||
// parse_state : Parse_State;
|
||||
// init_parse_state(*parse_state, lex_result.tokens, lexer.path);
|
||||
|
||||
parse_result := parse(*parse_state);
|
||||
if parse_result.had_error {
|
||||
print("%\n", report_messages(parse_result.messages));
|
||||
return;
|
||||
}
|
||||
// parse_result := parse(*parse_state);
|
||||
// if parse_result.had_error {
|
||||
// print("%\n", report_messages(parse_result.messages));
|
||||
// return;
|
||||
// }
|
||||
|
||||
type_check(checker, parse_result.root);
|
||||
if checker.had_error {
|
||||
print("%\n", report_messages(checker.messages));
|
||||
return;
|
||||
}
|
||||
// type_check(checker, parse_result.root);
|
||||
// if checker.had_error {
|
||||
// print("%\n", report_messages(checker.messages));
|
||||
// return;
|
||||
// }
|
||||
|
||||
for *type_var : checker.result_file.type_variables {
|
||||
for *type_var : checker.result.type_variables {
|
||||
type_var.builtin = true;
|
||||
}
|
||||
|
||||
@@ -1743,25 +1738,28 @@ check :: (result : *Compile_Result) {
|
||||
return;
|
||||
}
|
||||
|
||||
for *file : result.files {
|
||||
checker : Semantic_Checker;
|
||||
|
||||
checker.current_buffer_index = 0;
|
||||
checker.current_sampler_index = 0;
|
||||
checker.current_texture_index = 0;
|
||||
checker.result_file = file;
|
||||
checker.result = result;
|
||||
file := result.file;
|
||||
root := result.root;
|
||||
array_reserve(*checker.messages, 32);
|
||||
|
||||
init_semantic_checker(*checker, file.ast_root, file.file.path);
|
||||
init_semantic_checker(*checker, result.root, result.file.path);
|
||||
|
||||
// @Performance: Should have this built in stuff done earlier and only once
|
||||
add_hlsl_builtins(*checker);
|
||||
add_builtins(*checker);
|
||||
checker.result.file = file;
|
||||
|
||||
type_check(*checker, file.ast_root);
|
||||
result.root = root;
|
||||
|
||||
type_check(*checker, result.root);
|
||||
|
||||
result.had_error |= checker.had_error;
|
||||
copy_messages(checker.messages, *result.messages);
|
||||
}
|
||||
}
|
||||
|
||||
// ===========================================================
|
||||
@@ -2040,7 +2038,7 @@ pretty_print_symbol_table :: (checker : *Semantic_Checker, allocator : Allocator
|
||||
builder : String_Builder;
|
||||
init_string_builder(*builder,, allocator);
|
||||
|
||||
pretty_print_scope(xx checker.current_scope, checker.result_file.scope_stack, checker.result_file.type_variables, *checker.result_file.scope_stack.stack[0], *builder);
|
||||
pretty_print_scope(xx checker.current_scope, checker.result.scope_stack, checker.result.type_variables, *checker.result.scope_stack.stack[0], *builder);
|
||||
|
||||
return builder_to_string(*builder,, allocator);
|
||||
}
|
||||
@@ -2049,11 +2047,8 @@ pretty_print_symbol_table :: (result : *Compile_Result, allocator : Allocator) -
|
||||
builder : String_Builder;
|
||||
init_string_builder(*builder,, allocator);
|
||||
|
||||
for *file : result.files {
|
||||
current_scope := cast(Scope_Handle)1;
|
||||
pretty_print_scope(current_scope, file.scope_stack, file.type_variables, *file.scope_stack.stack[0], *builder);
|
||||
|
||||
}
|
||||
pretty_print_scope(current_scope, result.scope_stack, result.type_variables, *result.scope_stack.stack[0], *builder);
|
||||
|
||||
|
||||
return builder_to_string(*builder,, allocator);
|
||||
|
||||
63
Test.jai
63
Test.jai
@@ -174,7 +174,7 @@ run_codegen_test :: (file_path : string, result : *Compile_Result, output_type :
|
||||
|
||||
run_codegen_test :: (result : *Compile_Result, output_type : Output_Type = 0) -> Result {
|
||||
result_data : Result;
|
||||
result_data.path = result.files[0].file.path;
|
||||
result_data.path = result.file.path;
|
||||
result_text : string;
|
||||
|
||||
codegen(result);
|
||||
@@ -185,7 +185,7 @@ run_codegen_test :: (result : *Compile_Result, output_type : Output_Type = 0) ->
|
||||
return result_data;
|
||||
}
|
||||
|
||||
result_text = result.files[0].codegen_result_text;
|
||||
result_text = result.codegen_result_text;
|
||||
|
||||
if output_type & .StdOut {
|
||||
result_data.info_text = result_text;
|
||||
@@ -193,7 +193,7 @@ run_codegen_test :: (result : *Compile_Result, output_type : Output_Type = 0) ->
|
||||
return result_data;
|
||||
}
|
||||
|
||||
golden_path := get_golden_path(result.files[0].file.path, .Codegen);
|
||||
golden_path := get_golden_path(result.file.path, .Codegen);
|
||||
do_golden_comparison(golden_path, result_text, *result_data, output_type);
|
||||
return result_data;
|
||||
}
|
||||
@@ -225,7 +225,7 @@ run_lexer_test :: (file_path : string, result : *Compile_Result, output_type : O
|
||||
result_data.type = .Failed;
|
||||
result_text = report_messages(result.messages);
|
||||
} else {
|
||||
result_text = pretty_print_tokens(result.files[0].tokens.tokens, *temp);
|
||||
result_text = pretty_print_tokens(result.tokens.tokens, *temp);
|
||||
}
|
||||
|
||||
if output_type & .StdOut {
|
||||
@@ -259,14 +259,14 @@ run_parser_test :: (file_path : string, result : *Compile_Result, output_type :
|
||||
run_parser_test :: (result : *Compile_Result, output_type : Output_Type = 0) -> Result {
|
||||
parse(result);
|
||||
result_data : Result;
|
||||
result_data.path = result.files[0].file.path;
|
||||
result_data.path = result.file.path;
|
||||
result_text : string;
|
||||
|
||||
if result.had_error {
|
||||
result_data.type = .Failed;
|
||||
result_text = report_messages(result.messages,, temp);
|
||||
} else {
|
||||
result_text = pretty_print_ast(result.files[0].ast_root, *temp);
|
||||
result_text = pretty_print_ast(result.root, *temp);
|
||||
}
|
||||
|
||||
if output_type & .StdOut {
|
||||
@@ -275,14 +275,14 @@ run_parser_test :: (result : *Compile_Result, output_type : Output_Type = 0) ->
|
||||
return result_data;
|
||||
}
|
||||
|
||||
golden_path := get_golden_path(result.files[0].file.path, .Parser);
|
||||
golden_path := get_golden_path(result.file.path, .Parser);
|
||||
do_golden_comparison(golden_path, result_text, *result_data, output_type);
|
||||
return result_data;
|
||||
}
|
||||
|
||||
run_semantic_analysis_test :: (result : *Compile_Result, output_type : Output_Type = 0) -> Result {
|
||||
result_data : Result;
|
||||
result_data.path = result.files[0].file.path;
|
||||
result_data.path = result.file.path;
|
||||
result_text : string;
|
||||
|
||||
check(result);
|
||||
@@ -300,7 +300,7 @@ run_semantic_analysis_test :: (result : *Compile_Result, output_type : Output_Ty
|
||||
return result_data;
|
||||
}
|
||||
|
||||
golden_path := get_golden_path(result.files[0].file.path, .Semantic_Analysis);
|
||||
golden_path := get_golden_path(result.file.path, .Semantic_Analysis);
|
||||
do_golden_comparison(golden_path, result_text, *result_data, output_type);
|
||||
return result_data;
|
||||
}
|
||||
@@ -318,7 +318,7 @@ run_semantic_analysis_test :: (file_path : string, result : *Compile_Result, out
|
||||
return result_data;
|
||||
}
|
||||
|
||||
result_data = run_semantic_analysis_test(result, output_type);;
|
||||
result_data = run_semantic_analysis_test(result, output_type);
|
||||
|
||||
return result_data;
|
||||
}
|
||||
@@ -334,6 +334,7 @@ make_test_case :: (path : string, stage_flags : Stage_Flags, allocator := contex
|
||||
|
||||
run_test_new :: (file_path : string, stage_flags : Stage_Flags, results : *[..]Result, output_type : Output_Type = 0) {
|
||||
compile_result : Compile_Result;
|
||||
add_file(*compile_result, file_path);
|
||||
result : Result;
|
||||
if stage_flags & .Lexer {
|
||||
result = run_lexer_test(file_path, *compile_result, output_type);
|
||||
@@ -372,8 +373,8 @@ run_test_new :: (file_path : string, stage_flags : Stage_Flags, results : *[..]R
|
||||
}
|
||||
}
|
||||
|
||||
run_test :: (test_case : Test_Case, results : *[..]Result, output_type : Output_Type = 0, builder : *String_Builder) {
|
||||
print_to_builder(builder, "%Running test: %......", cyan(), test_case.path);
|
||||
run_test :: (test_case : Test_Case, results : *[..]Result, output_type : Output_Type = 0) {
|
||||
print("%Running test: %......", cyan(), test_case.path);
|
||||
|
||||
// path 30
|
||||
// len 35
|
||||
@@ -387,7 +388,7 @@ run_test :: (test_case : Test_Case, results : *[..]Result, output_type : Output_
|
||||
len := 50;
|
||||
rest := len - test_case.path.count;
|
||||
for i: 0..rest {
|
||||
append(builder, " ");
|
||||
print(" ");
|
||||
}
|
||||
|
||||
run_test_new(test_case.path, test_case.stage_flags, results, output_type);
|
||||
@@ -416,7 +417,7 @@ run_test_suite :: (using suite : *Test_Suite, output_type : Output_Type = 0) {
|
||||
init_string_builder(*builder,, temp);
|
||||
|
||||
for test_case : test_cases {
|
||||
run_test(test_case, *suite.results, output_type, *builder);
|
||||
run_test(test_case, *suite.results, output_type);
|
||||
|
||||
for < suite.results {
|
||||
result := suite.results[it_index];
|
||||
@@ -432,7 +433,7 @@ run_test_suite :: (using suite : *Test_Suite, output_type : Output_Type = 0) {
|
||||
array_add(*failed_test_paths, .{ result.path, tprint("golden file not found for %", stage_to_string(result.stage)) });
|
||||
}
|
||||
}
|
||||
evaluate_result(result, *builder);
|
||||
evaluate_result(result);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
@@ -530,42 +531,42 @@ stage_to_string :: (stage : Stage_Flags) -> string {
|
||||
}
|
||||
}
|
||||
|
||||
evaluate_result :: (result : Result, builder : *String_Builder) {
|
||||
evaluate_result :: (result : Result) {
|
||||
stage : string = stage_to_string(result.stage);
|
||||
|
||||
if #complete result.type == {
|
||||
case .File_Read_Failed; {
|
||||
print_to_builder(builder, " %", red());
|
||||
print_to_builder(builder, "failed with File_Read_Failed\n");
|
||||
print(" %", red());
|
||||
print("failed with File_Read_Failed\n");
|
||||
}
|
||||
case .Golden_File_Not_Found; {
|
||||
print_to_builder(builder, " %", red());
|
||||
print_to_builder(builder, "failed with Golden File Not Found for stage %\n", stage);
|
||||
print(" %", red());
|
||||
print("failed with Golden File Not Found for stage %\n", stage);
|
||||
}
|
||||
case .StdOut; {
|
||||
}
|
||||
case .Golden_Output; {
|
||||
print_to_builder(builder, " %", yellow());
|
||||
print_to_builder(builder, "output new golden file at %\n", result.golden_path);
|
||||
print(" %", yellow());
|
||||
print("output new golden file at %\n", result.golden_path);
|
||||
}
|
||||
case .Passed; {
|
||||
print_to_builder(builder, " %", green());
|
||||
print_to_builder(builder, "passed %\n", stage);
|
||||
print(" %", green());
|
||||
print("passed %\n", stage);
|
||||
}
|
||||
case .Failed; {
|
||||
print_to_builder(builder, " %", red());
|
||||
print_to_builder(builder, "failed %\n", stage);
|
||||
print(" %", red());
|
||||
print("failed %\n", stage);
|
||||
}
|
||||
}
|
||||
|
||||
if result.info_text.count > 0 {
|
||||
print_to_builder(builder, "%", cyan());
|
||||
print_to_builder(builder, "--- Info text ---\n");
|
||||
print_to_builder(builder, "%", yellow());
|
||||
print_to_builder(builder, "%\n", result.info_text);
|
||||
print("%", cyan());
|
||||
print("--- Info text ---\n");
|
||||
print("%", yellow());
|
||||
print("%\n", result.info_text);
|
||||
}
|
||||
|
||||
print_to_builder(builder, "%", reset_color());
|
||||
print("%", reset_color());
|
||||
}
|
||||
|
||||
main :: () {
|
||||
|
||||
96
module.jai
96
module.jai
@@ -140,11 +140,17 @@ Token_Stream :: struct {
|
||||
tokens : [..]Token;
|
||||
}
|
||||
|
||||
Compiled_File :: struct {
|
||||
// Compiled_File :: struct {
|
||||
|
||||
// allocator : Allocator;
|
||||
// arena : Arena;
|
||||
// }
|
||||
|
||||
Compile_Result :: struct {
|
||||
file : Input_File;
|
||||
tokens : Token_Stream;
|
||||
ast_root : *AST_Node;
|
||||
ast_nodes : [..]AST_Node;
|
||||
root : *AST_Node;
|
||||
nodes : [..]AST_Node;
|
||||
|
||||
codegen_result_text : string;
|
||||
|
||||
@@ -172,12 +178,6 @@ Compiled_File :: struct {
|
||||
max_constant_buffers :: 16;
|
||||
cbuffers : Static_Array(Constant_Buffer, max_constant_buffers);
|
||||
|
||||
allocator : Allocator;
|
||||
arena : Arena;
|
||||
}
|
||||
|
||||
Compile_Result :: struct {
|
||||
files : [..]Compiled_File;
|
||||
|
||||
had_error : bool;
|
||||
messages : [..]Compiler_Message;
|
||||
@@ -203,17 +203,18 @@ add_file :: (result : *Compile_Result, path : string) {
|
||||
return;
|
||||
}
|
||||
|
||||
add_file_from_string(result, file_string, path);
|
||||
}
|
||||
|
||||
add_file_from_string :: (result : *Compile_Result, source : string, path : string = "") {
|
||||
input_file : Input_File;
|
||||
|
||||
input_file.source = file_string;
|
||||
input_file.source = source;
|
||||
input_file.path = path;
|
||||
|
||||
compiled_file : Compiled_File;
|
||||
compiled_file.file = input_file;
|
||||
result.file = input_file;
|
||||
|
||||
compiled_file.allocator = make_arena(*compiled_file.arena);
|
||||
|
||||
array_add(*result.files, compiled_file);
|
||||
result.allocator = make_arena(*result.arena);
|
||||
}
|
||||
|
||||
// @Incomplete(nb): Will we ever even use this?
|
||||
@@ -396,15 +397,18 @@ type_variable_to_field :: (type_variables : []Type_Variable, scope_stack : Scope
|
||||
}
|
||||
|
||||
type_variable_to_field :: (checker : *Semantic_Checker, variable : *Type_Variable) -> Field {
|
||||
return type_variable_to_field(checker.result_file.type_variables, checker.result_file.scope_stack, variable);
|
||||
return type_variable_to_field(checker.result.type_variables, checker.result.scope_stack, variable);
|
||||
}
|
||||
|
||||
generate_output_data :: (result : *Compile_Result) {
|
||||
for *file : result.files {
|
||||
if file.vertex_entry_point.node {
|
||||
file.vertex_entry_point.name = file.vertex_entry_point.node.name;
|
||||
if result.had_error {
|
||||
return;
|
||||
}
|
||||
|
||||
type_variable := from_handle(file.type_variables, file.vertex_entry_point.node.type_variable);
|
||||
if result.vertex_entry_point.node {
|
||||
result.vertex_entry_point.name = result.vertex_entry_point.node.name;
|
||||
|
||||
type_variable := from_handle(result.type_variables, result.vertex_entry_point.node.type_variable);
|
||||
assert(type_variable.type == .Function);
|
||||
|
||||
node := type_variable.source_node;
|
||||
@@ -412,50 +416,51 @@ generate_output_data :: (result : *Compile_Result) {
|
||||
if node.children[0].kind == .FieldList {
|
||||
field_list := node.children[0];
|
||||
for child : field_list.children {
|
||||
tv := from_handle(file.type_variables, child.type_variable);
|
||||
field := type_variable_to_field(file.type_variables, file.scope_stack, tv);
|
||||
array_add(*file.vertex_entry_point.input, field);
|
||||
tv := from_handle(result.type_variables, child.type_variable);
|
||||
field := type_variable_to_field(result.type_variables, result.scope_stack, tv);
|
||||
array_add(*result.vertex_entry_point.input, field);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for buffer_variable : file.constant_buffers {
|
||||
variable := from_handle(file.type_variables, buffer_variable);
|
||||
for buffer_variable : result.constant_buffers {
|
||||
variable := from_handle(result.type_variables, buffer_variable);
|
||||
|
||||
cb := array_add(*file.cbuffers);
|
||||
cb := array_add(*result.cbuffers);
|
||||
|
||||
for i : 0..variable.children.count - 1 {
|
||||
child := variable.children[i];
|
||||
field : Property_Field;
|
||||
field.base_field = type_variable_to_field(file.type_variables, file.scope_stack, from_handle(file.type_variables, child));
|
||||
field.base_field = type_variable_to_field(result.type_variables, result.scope_stack, from_handle(result.type_variables, child));
|
||||
array_add(*cb.fields, field);
|
||||
}
|
||||
|
||||
cb.buffer_index = variable.resource_index;
|
||||
}
|
||||
|
||||
find_result := find_symbol(*file.scope_stack, file.property_name, xx 1);
|
||||
find_result := find_symbol(*result.scope_stack, result.property_name, xx 1);
|
||||
if find_result {
|
||||
property_variable := from_handle(file.type_variables, find_result.type_variable);
|
||||
property_variable := from_handle(result.type_variables, find_result.type_variable);
|
||||
|
||||
for i : 0..property_variable.children.count - 1 {
|
||||
child := property_variable.children[i];
|
||||
field := type_variable_to_field(file.type_variables, file.scope_stack, from_handle(file.type_variables, child));
|
||||
field := type_variable_to_field(result.type_variables, result.scope_stack, from_handle(result.type_variables, child));
|
||||
prop_field : Property_Field;
|
||||
prop_field.base_field = field;
|
||||
array_add(*file.properties.fields, prop_field);
|
||||
array_add(*result.properties.fields, prop_field);
|
||||
}
|
||||
file.properties.buffer_index = property_variable.resource_index;
|
||||
result.properties.buffer_index = property_variable.resource_index;
|
||||
}
|
||||
|
||||
if file.pixel_entry_point.node {
|
||||
file.pixel_entry_point.name = file.pixel_entry_point.node.name;
|
||||
|
||||
type_variable := from_handle(file.type_variables, file.pixel_entry_point.node.type_variable);
|
||||
if result.pixel_entry_point.node {
|
||||
result.pixel_entry_point.name = result.pixel_entry_point.node.name;
|
||||
|
||||
type_variable := from_handle(result.type_variables, result.pixel_entry_point.node.type_variable);
|
||||
assert(type_variable.type == .Function);
|
||||
|
||||
field := type_variable_to_field(file.type_variables, file.scope_stack, type_variable.return_type_variable);
|
||||
field := type_variable_to_field(result.type_variables, result.scope_stack, type_variable.return_type_variable);
|
||||
for hint : type_variable.source_node.hint_tokens {
|
||||
field_hint : Field_Hint;
|
||||
|
||||
@@ -479,12 +484,25 @@ generate_output_data :: (result : *Compile_Result) {
|
||||
array_add(*field.hints, field_hint);
|
||||
}
|
||||
|
||||
file.pixel_entry_point.return_value = field;
|
||||
}
|
||||
result.pixel_entry_point.return_value = field;
|
||||
}
|
||||
}
|
||||
|
||||
compile_file :: (compiler : *Shader_Compiler, paths : ..string) -> Compile_Result {
|
||||
compile_file :: (compiler : *Shader_Compiler, path : string) -> Compile_Result {
|
||||
result : Compile_Result;
|
||||
|
||||
add_file(*result, path);
|
||||
|
||||
lex(*result);
|
||||
parse(*result);
|
||||
check(*result);
|
||||
codegen(*result);
|
||||
generate_output_data(*result);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
compile_files :: (compiler : *Shader_Compiler, paths : ..string) -> Compile_Result {
|
||||
result : Compile_Result;
|
||||
|
||||
for path : paths {
|
||||
|
||||
@@ -27,4 +27,5 @@
|
||||
{kind = TOKEN_IDENTIFIER; ; index = 103 ; length = 3 line = 5 ; column = 7 ; value ='pos'; }
|
||||
{kind = TOKEN_SEMICOLON; ; index = 106 ; length = 1 line = 5 ; column = 10 ; value =';'; }
|
||||
{kind = TOKEN_RIGHTBRACE; ; index = 109 ; length = 1 line = 6 ; column = 0 ; value ='}'; }
|
||||
{kind = TOKEN_EOF; ; index = 112 ; length = 0 line = 7 ; column = 0 ; value =''; }
|
||||
{kind = TOKEN_IDENTIFIER; ; index = 110 ; length = 1 line = 6 ; column = 1 ; value ='x'; }
|
||||
{kind = TOKEN_EOF; ; index = 113 ; length = 0 line = 7 ; column = 0 ; value =''; }
|
||||
|
||||
Reference in New Issue
Block a user