Change result to context for clarity. Fix a bunch of stuff in builtin functions and structs.
This commit is contained in:
@@ -26,7 +26,7 @@ Codegen_State :: struct {
|
|||||||
|
|
||||||
builder : String_Builder;
|
builder : String_Builder;
|
||||||
|
|
||||||
result : *Compile_Result;
|
result : *Compiler_Context;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Codegen_Result :: struct {
|
// Codegen_Result :: struct {
|
||||||
@@ -56,7 +56,7 @@ Reserved_GLSL_Words :: string.[
|
|||||||
""
|
""
|
||||||
];
|
];
|
||||||
|
|
||||||
init_codegen_state :: (state : *Codegen_State, result : *Compile_Result, output_language : Output_Language) {
|
init_codegen_state :: (state : *Codegen_State, result : *Compiler_Context, output_language : Output_Language) {
|
||||||
state.current_scope = cast(Scope_Handle)1;
|
state.current_scope = cast(Scope_Handle)1;
|
||||||
state.output_language = output_language;
|
state.output_language = output_language;
|
||||||
init_string_builder(*state.builder);
|
init_string_builder(*state.builder);
|
||||||
@@ -627,11 +627,11 @@ emit_declaration :: (state : *Codegen_State, node : *AST_Node) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
codegen :: (result : *Compile_Result) {
|
codegen :: (result : *Compiler_Context) {
|
||||||
codegen(result, .HLSL);
|
codegen(result, .HLSL);
|
||||||
}
|
}
|
||||||
|
|
||||||
codegen :: (result : *Compile_Result, output_language : Output_Language, allocator := temp) {
|
codegen :: (result : *Compiler_Context, output_language : Output_Language, allocator := temp) {
|
||||||
if result.had_error {
|
if result.had_error {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|||||||
240
Ink.jai
240
Ink.jai
@@ -127,215 +127,203 @@ get_golden_path :: (file_path : string, stage : Stage_Flags) -> string {
|
|||||||
return final_path;
|
return final_path;
|
||||||
}
|
}
|
||||||
|
|
||||||
do_golden_comparison :: (golden_path : string, comparison_text : string, result_data : *Result, output_type : Output_Type) {
|
do_golden_comparison :: (golden_path : string, comparison_text : string, result : *Result, output_type : Output_Type) {
|
||||||
sc := get_scratch();
|
sc := get_scratch();
|
||||||
defer scratch_end(sc);
|
defer scratch_end(sc);
|
||||||
if output_type & .Golden {
|
if output_type & .Golden {
|
||||||
// Output the comparison file
|
// Output the comparison file
|
||||||
write_entire_file(golden_path, comparison_text);
|
write_entire_file(golden_path, comparison_text);
|
||||||
result_data.golden_path = copy_string(golden_path);
|
result.golden_path = copy_string(golden_path);
|
||||||
result_data.type = .Golden_Output;
|
result.type = .Golden_Output;
|
||||||
return;
|
return;
|
||||||
} else {
|
} else {
|
||||||
// Do the comparison
|
// Do the comparison
|
||||||
if !file_exists(golden_path) {
|
if !file_exists(golden_path) {
|
||||||
result_data.info_text = tprint("Golden file % does not exist. Please run with -output-as-golden at least once.\n", golden_path);
|
result.info_text = tprint("Golden file % does not exist. Please run with -output-as-golden at least once.\n", golden_path);
|
||||||
result_data.type = .Golden_File_Not_Found;
|
result.type = .Golden_File_Not_Found;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
golden_text, ok := read_entire_file(golden_path,, sc.allocator);
|
golden_text, ok := read_entire_file(golden_path,, sc.allocator);
|
||||||
if !ok {
|
if !ok {
|
||||||
result_data.info_text = tprint("Unable to open golden file %\n", golden_path);
|
result.info_text = tprint("Unable to open golden file %\n", golden_path);
|
||||||
result_data.type = .Golden_File_Not_Found;
|
result.type = .Golden_File_Not_Found;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
comp := replace(comparison_text, "\r\n", "\n",, sc.allocator);
|
comp := replace(comparison_text, "\r\n", "\n",, sc.allocator);
|
||||||
gold := replace(golden_text, "\r\n", "\n",, sc.allocator);
|
gold := replace(golden_text, "\r\n", "\n",, sc.allocator);
|
||||||
result := compare(comp, gold) == 0;
|
ok = compare(comp, gold) == 0;
|
||||||
if !result {
|
if !ok {
|
||||||
result_data.type = .Failed;
|
result.type = .Failed;
|
||||||
result_data.info_text = tprint("Golden file:\n%\n===============\n%", gold, comp);
|
result.info_text = tprint("Golden file:\n%\n===============\n%", gold, comp);
|
||||||
} else {
|
} else {
|
||||||
result_data.type = .Passed;
|
result.type = .Passed;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
run_codegen_test :: (file_path : string, result : *Compile_Result, output_type : Output_Type = 0) -> Result {
|
run_codegen_test :: (file_path : string, ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
|
||||||
result.file = make_file(result, file_path);
|
result : Result;
|
||||||
result.allocator = make_arena(Megabytes(128));
|
result.path = file_path;
|
||||||
|
|
||||||
result_data : Result;
|
lex(ctx);
|
||||||
result_data.path = file_path;
|
parse(ctx);
|
||||||
|
check(ctx);
|
||||||
|
|
||||||
lex(result);
|
if ctx.had_error {
|
||||||
parse(result);
|
result.type = .Failed;
|
||||||
check(result);
|
return result;
|
||||||
|
|
||||||
if result.had_error {
|
|
||||||
result_data.type = .Failed;
|
|
||||||
return result_data;
|
|
||||||
}
|
}
|
||||||
result_data = run_codegen_test(result, output_type);
|
result = run_codegen_test(ctx, output_type);
|
||||||
|
|
||||||
return result_data;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
run_codegen_test :: (result : *Compile_Result, output_type : Output_Type = 0) -> Result {
|
run_codegen_test :: (ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
|
||||||
result_data : Result;
|
result : Result;
|
||||||
result_data.path = result.file.path;
|
result.path = ctx.file.path;
|
||||||
result_text : string;
|
result_text : string;
|
||||||
|
|
||||||
codegen(result);
|
codegen(ctx);
|
||||||
|
|
||||||
if result.had_error {
|
if ctx.had_error {
|
||||||
result_data.type = .Failed;
|
result.type = .Failed;
|
||||||
result_text = report_messages(result.messages);
|
result_text = report_messages(ctx.messages);
|
||||||
return result_data;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
result_text = result.codegen_result_text;
|
result_text = ctx.codegen_result_text;
|
||||||
|
|
||||||
if output_type & .StdOut {
|
if output_type & .StdOut {
|
||||||
result_data.info_text = result_text;
|
result.info_text = result_text;
|
||||||
result_data.type = .StdOut;
|
result.type = .StdOut;
|
||||||
return result_data;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
golden_path := get_golden_path(result.file.path, .Codegen);
|
golden_path := get_golden_path(ctx.file.path, .Codegen);
|
||||||
do_golden_comparison(golden_path, result_text, *result_data, output_type);
|
do_golden_comparison(golden_path, result_text, *result, output_type);
|
||||||
return result_data;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
run_compile_test :: (path : string, output_type : Output_Type = 0) -> Result, Compile_Result {
|
run_compile_test :: (path : string, output_type : Output_Type = 0) -> Result, Compiler_Context {
|
||||||
compiler : Shader_Compiler;
|
compiler : Compiler_Context;
|
||||||
result : Result;
|
result : Result;
|
||||||
compilation_result := compile_file(*compiler, path);
|
compile_file(*compiler, path);
|
||||||
print("\n");
|
print("\n");
|
||||||
|
|
||||||
if compilation_result.had_error {
|
if compiler.had_error {
|
||||||
result.type = .Failed;
|
result.type = .Failed;
|
||||||
result.info_text = tprint("Failed compiling: %\n", path);
|
result.info_text = tprint("Failed compiling: %\n", path);
|
||||||
}
|
}
|
||||||
|
|
||||||
return result, compilation_result;
|
return result, compiler;
|
||||||
}
|
}
|
||||||
|
|
||||||
run_lexer_test :: (file_path : string, result : *Compile_Result, output_type : Output_Type = 0) -> Result {
|
run_lexer_test :: (file_path : string, ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
|
||||||
result_data : Result;
|
result : Result;
|
||||||
result_data.path = file_path;
|
result.path = file_path;
|
||||||
result_data.stage = .Lexer;
|
result.stage = .Lexer;
|
||||||
|
|
||||||
result_text : string;
|
result_text : string;
|
||||||
|
|
||||||
result.file = make_file(result, file_path);
|
|
||||||
result.allocator = make_arena(Megabytes(128));
|
|
||||||
|
|
||||||
lex(result);
|
lex(ctx);
|
||||||
if result.had_error {
|
if ctx.had_error {
|
||||||
result_data.type = .Failed;
|
result.type = .Failed;
|
||||||
result_text = report_messages(result.messages);
|
result_text = report_messages(ctx.messages);
|
||||||
} else {
|
} else {
|
||||||
result_text = pretty_print_tokens(result.tokens, *temp);
|
result_text = pretty_print_tokens(ctx.tokens, *temp);
|
||||||
}
|
}
|
||||||
|
|
||||||
if output_type & .StdOut {
|
if output_type & .StdOut {
|
||||||
result_data.info_text = result_text;
|
result.info_text = result_text;
|
||||||
result_data.type = .StdOut;
|
result.type = .StdOut;
|
||||||
return result_data;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
golden_path := get_golden_path(file_path, .Lexer);
|
golden_path := get_golden_path(file_path, .Lexer);
|
||||||
do_golden_comparison(golden_path, result_text, *result_data, output_type);
|
do_golden_comparison(golden_path, result_text, *result, output_type);
|
||||||
return result_data;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
run_parser_test :: (file_path : string, result : *Compile_Result, output_type : Output_Type = 0) -> Result {
|
run_parser_test :: (file_path : string, ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
|
||||||
result_data : Result;
|
result : Result;
|
||||||
result_data.path = file_path;
|
result.path = file_path;
|
||||||
|
|
||||||
result.file = make_file(result, file_path);
|
lex(ctx);
|
||||||
result.allocator = make_arena(Megabytes(128));
|
if ctx.had_error {
|
||||||
|
result.type = .Passed;
|
||||||
lex(result);
|
return result;
|
||||||
if result.had_error {
|
|
||||||
result_data.type = .Passed;
|
|
||||||
return result_data;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
result_data = run_parser_test(result, output_type);
|
result = run_parser_test(ctx, output_type);
|
||||||
|
|
||||||
return result_data;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
run_parser_test :: (result : *Compile_Result, output_type : Output_Type = 0) -> Result {
|
run_parser_test :: (ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
|
||||||
parse(result);
|
parse(ctx);
|
||||||
result_data : Result;
|
result : Result;
|
||||||
result_data.path = result.file.path;
|
result.path = ctx.file.path;
|
||||||
result_text : string;
|
result_text : string;
|
||||||
|
|
||||||
if result.had_error {
|
if ctx.had_error {
|
||||||
result_data.type = .Failed;
|
result.type = .Failed;
|
||||||
result_text = report_messages(result.messages,, temp);
|
result_text = report_messages(ctx.messages,, temp);
|
||||||
} else {
|
} else {
|
||||||
result_text = pretty_print_ast(result.root, *temp);
|
result_text = pretty_print_ast(ctx.root, *temp);
|
||||||
}
|
}
|
||||||
|
|
||||||
if output_type & .StdOut {
|
if output_type & .StdOut {
|
||||||
result_data.info_text = result_text;
|
result.info_text = result_text;
|
||||||
result_data.type = .StdOut;
|
result.type = .StdOut;
|
||||||
return result_data;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
golden_path := get_golden_path(result.file.path, .Parser);
|
golden_path := get_golden_path(ctx.file.path, .Parser);
|
||||||
do_golden_comparison(golden_path, result_text, *result_data, output_type);
|
do_golden_comparison(golden_path, result_text, *result, output_type);
|
||||||
return result_data;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
run_semantic_analysis_test :: (result : *Compile_Result, output_type : Output_Type = 0) -> Result {
|
run_semantic_analysis_test :: (ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
|
||||||
result_data : Result;
|
result : Result;
|
||||||
result_data.path = result.file.path;
|
result.path = ctx.file.path;
|
||||||
result_text : string;
|
result_text : string;
|
||||||
|
|
||||||
check(result);
|
check(ctx);
|
||||||
|
|
||||||
if result.had_error {
|
if ctx.had_error {
|
||||||
result_data.type = .Failed;
|
result.type = .Failed;
|
||||||
result_text = report_messages(result.messages);
|
result_text = report_messages(ctx.messages);
|
||||||
} else {
|
} else {
|
||||||
result_text = pretty_print_symbol_table(result, temp);
|
result_text = pretty_print_symbol_table(ctx, temp);
|
||||||
}
|
}
|
||||||
|
|
||||||
if output_type & .StdOut {
|
if output_type & .StdOut {
|
||||||
result_data.info_text = result_text;
|
result.info_text = result_text;
|
||||||
result_data.type = .StdOut;
|
result.type = .StdOut;
|
||||||
return result_data;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
golden_path := get_golden_path(result.file.path, .Semantic_Analysis);
|
golden_path := get_golden_path(ctx.file.path, .Semantic_Analysis);
|
||||||
do_golden_comparison(golden_path, result_text, *result_data, output_type);
|
do_golden_comparison(golden_path, result_text, *result, output_type);
|
||||||
return result_data;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
run_semantic_analysis_test :: (file_path : string, result : *Compile_Result, output_type : Output_Type = 0) -> Result {
|
run_semantic_analysis_test :: (file_path : string, ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
|
||||||
result.file = make_file(result, file_path,, result.allocator);
|
result : Result;
|
||||||
result.allocator = make_arena(Megabytes(128));
|
result.path = file_path;
|
||||||
|
|
||||||
result_data : Result;
|
lex(ctx);
|
||||||
result_data.path = file_path;
|
parse(ctx);
|
||||||
|
if ctx.had_error {
|
||||||
lex(result);
|
result.type = .Failed;
|
||||||
parse(result);
|
return result;
|
||||||
if result.had_error {
|
|
||||||
result_data.type = .Failed;
|
|
||||||
return result_data;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
result_data = run_semantic_analysis_test(result, output_type);
|
result = run_semantic_analysis_test(ctx, output_type);
|
||||||
|
|
||||||
return result_data;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
make_test_case :: (path : string, stage_flags : Stage_Flags, allocator := context.allocator) -> Test_Case {
|
make_test_case :: (path : string, stage_flags : Stage_Flags, allocator := context.allocator) -> Test_Case {
|
||||||
@@ -348,40 +336,40 @@ make_test_case :: (path : string, stage_flags : Stage_Flags, allocator := contex
|
|||||||
}
|
}
|
||||||
|
|
||||||
run_test_new :: (file_path : string, stage_flags : Stage_Flags, results : *[..]Result, output_type : Output_Type = 0) {
|
run_test_new :: (file_path : string, stage_flags : Stage_Flags, results : *[..]Result, output_type : Output_Type = 0) {
|
||||||
compile_result : Compile_Result;
|
ctx : Compiler_Context;
|
||||||
|
|
||||||
compile_result.allocator = make_arena(Megabytes(128));
|
ctx.allocator = make_arena(Megabytes(128));
|
||||||
compile_result.file = make_file(*compile_result, file_path,, compile_result.allocator);
|
ctx.file = make_file(*ctx, file_path,, ctx.allocator);
|
||||||
|
|
||||||
result : Result;
|
result : Result;
|
||||||
if stage_flags & .Lexer {
|
if stage_flags & .Lexer {
|
||||||
result = run_lexer_test(file_path, *compile_result, output_type);
|
result = run_lexer_test(file_path, *ctx, output_type);
|
||||||
record_result(results, result);
|
record_result(results, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
if stage_flags & .Parser {
|
if stage_flags & .Parser {
|
||||||
if stage_flags & .Lexer && result.type == .Passed || result.type == .Golden_Output {
|
if stage_flags & .Lexer && result.type == .Passed || result.type == .Golden_Output {
|
||||||
result = run_parser_test(*compile_result, output_type);
|
result = run_parser_test(*ctx, output_type);
|
||||||
} else {
|
} else {
|
||||||
result = run_parser_test(file_path, *compile_result, output_type);
|
result = run_parser_test(file_path, *ctx, output_type);
|
||||||
}
|
}
|
||||||
record_result(results, result);
|
record_result(results, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
if stage_flags & .Semantic_Analysis {
|
if stage_flags & .Semantic_Analysis {
|
||||||
if stage_flags & .Parser && (result.type == .Passed || result.type == .Golden_Output) {
|
if stage_flags & .Parser && (result.type == .Passed || result.type == .Golden_Output) {
|
||||||
result = run_semantic_analysis_test(*compile_result, output_type);
|
result = run_semantic_analysis_test(*ctx, output_type);
|
||||||
} else {
|
} else {
|
||||||
result = run_semantic_analysis_test(file_path, *compile_result, output_type);
|
result = run_semantic_analysis_test(file_path, *ctx, output_type);
|
||||||
}
|
}
|
||||||
record_result(results, result);
|
record_result(results, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
if stage_flags & .Codegen {
|
if stage_flags & .Codegen {
|
||||||
if stage_flags & .Semantic_Analysis && (result.type == .Passed || result.type == .Golden_Output) {
|
if stage_flags & .Semantic_Analysis && (result.type == .Passed || result.type == .Golden_Output) {
|
||||||
result = run_codegen_test(*compile_result, output_type);
|
result = run_codegen_test(*ctx, output_type);
|
||||||
} else {
|
} else {
|
||||||
result = run_codegen_test(file_path, *compile_result, output_type);
|
result = run_codegen_test(file_path, *ctx, output_type);
|
||||||
}
|
}
|
||||||
record_result(results, result);
|
record_result(results, result);
|
||||||
}
|
}
|
||||||
|
|||||||
52
Lexing.jai
52
Lexing.jai
@@ -5,7 +5,7 @@ Lexer :: struct {
|
|||||||
current_line : int;
|
current_line : int;
|
||||||
current_column : int;
|
current_column : int;
|
||||||
|
|
||||||
result : *Compile_Result;
|
ctx : *Compiler_Context;
|
||||||
|
|
||||||
path : string;
|
path : string;
|
||||||
}
|
}
|
||||||
@@ -262,7 +262,7 @@ identifier_kind :: (using lexer : *Lexer) -> Token_Kind {
|
|||||||
error_token :: (lexer : *Lexer, message : string) -> *Token {
|
error_token :: (lexer : *Lexer, message : string) -> *Token {
|
||||||
token : *Token = new_token(lexer, .TOKEN_ERROR);
|
token : *Token = new_token(lexer, .TOKEN_ERROR);
|
||||||
|
|
||||||
lexer.result.had_error = true;
|
lexer.ctx.had_error = true;
|
||||||
token.error = copy_string(message);
|
token.error = copy_string(message);
|
||||||
|
|
||||||
return token;
|
return token;
|
||||||
@@ -309,8 +309,8 @@ record_error :: (lexer : *Lexer, message : string) {
|
|||||||
|
|
||||||
array_add(*error.source_locations, source_location);
|
array_add(*error.source_locations, source_location);
|
||||||
|
|
||||||
lexer.result.had_error = true;
|
lexer.ctx.had_error = true;
|
||||||
array_add(*lexer.result.messages, error);
|
array_add(*lexer.ctx.messages, error);
|
||||||
}
|
}
|
||||||
|
|
||||||
make_int :: (lexer : *Lexer) -> *Token {
|
make_int :: (lexer : *Lexer) -> *Token {
|
||||||
@@ -356,8 +356,8 @@ new_token :: (lexer : *Lexer, kind : Token_Kind) -> *Token {
|
|||||||
}
|
}
|
||||||
lexer.current_column += length;
|
lexer.current_column += length;
|
||||||
|
|
||||||
array_add(*lexer.result.tokens, token);
|
array_add(*lexer.ctx.tokens, token);
|
||||||
return *lexer.result.tokens[lexer.result.tokens.count - 1];
|
return *lexer.ctx.tokens[lexer.ctx.tokens.count - 1];
|
||||||
}
|
}
|
||||||
|
|
||||||
make_directive :: (lexer : *Lexer) -> *Token {
|
make_directive :: (lexer : *Lexer) -> *Token {
|
||||||
@@ -366,26 +366,26 @@ make_directive :: (lexer : *Lexer) -> *Token {
|
|||||||
if ident.ident_value == "load" {
|
if ident.ident_value == "load" {
|
||||||
path_tok := scan_next_token(lexer);
|
path_tok := scan_next_token(lexer);
|
||||||
path := path_tok.string_value;
|
path := path_tok.string_value;
|
||||||
result : Compile_Result;
|
ctx : Compiler_Context;
|
||||||
result.allocator = lexer.result.allocator;
|
ctx.allocator = lexer.ctx.allocator;
|
||||||
result.environment = lexer.result.environment;
|
ctx.environment = lexer.ctx.environment;
|
||||||
|
|
||||||
result.file = make_file(*result, path);
|
ctx.file = make_file(*ctx, path);
|
||||||
|
|
||||||
if result.file.source.count == 0 {
|
if ctx.file.source.count == 0 {
|
||||||
// unable_to_open_file(lexer, path, path_tok);
|
// unable_to_open_file(lexer, path, path_tok);
|
||||||
record_error(lexer, tprint("Unable to open file '%' for reading\n", path));
|
record_error(lexer, tprint("Unable to open file '%' for reading\n", path));
|
||||||
return error_token(lexer, tprint("Unable to open file '%' for reading\n", path));
|
return error_token(lexer, tprint("Unable to open file '%' for reading\n", path));
|
||||||
}
|
}
|
||||||
|
|
||||||
lex(*result);
|
lex(*ctx);
|
||||||
|
|
||||||
result.tokens.count -= 1; // @Note: remote TOKEN_EOF
|
ctx.tokens.count -= 1; // @Note: remote TOKEN_EOF
|
||||||
lexer.result.tokens.count -= 2;
|
lexer.ctx.tokens.count -= 2;
|
||||||
array_resize(*lexer.result.tokens, lexer.result.tokens.count + result.tokens.count);
|
array_resize(*lexer.ctx.tokens, lexer.ctx.tokens.count + ctx.tokens.count);
|
||||||
|
|
||||||
for tok : result.tokens {
|
for tok : ctx.tokens {
|
||||||
lexer.result.tokens[it_index] = tok;
|
lexer.ctx.tokens[it_index] = tok;
|
||||||
}
|
}
|
||||||
return scan_next_token(lexer);;
|
return scan_next_token(lexer);;
|
||||||
}
|
}
|
||||||
@@ -565,8 +565,8 @@ scan_next_token :: (lexer : *Lexer) -> *Token {
|
|||||||
// return error_token(lexer, tprint("Invalid token: %", s));
|
// return error_token(lexer, tprint("Invalid token: %", s));
|
||||||
}
|
}
|
||||||
|
|
||||||
lex :: (result : *Compile_Result, allocator := temp) {
|
lex :: (ctx : *Compiler_Context, allocator := temp) {
|
||||||
if result.had_error {
|
if ctx.had_error {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -577,12 +577,12 @@ lex :: (result : *Compile_Result, allocator := temp) {
|
|||||||
defer clear_context_allocators();
|
defer clear_context_allocators();
|
||||||
|
|
||||||
lexer : Lexer;
|
lexer : Lexer;
|
||||||
lexer.result = result;
|
lexer.ctx = ctx;
|
||||||
lexer.result.tokens.allocator = result.allocator;
|
lexer.ctx.tokens.allocator = ctx.allocator;
|
||||||
array_reserve(*lexer.result.tokens, 1024 * 1024);
|
array_reserve(*lexer.ctx.tokens, 1024 * 1024);
|
||||||
|
|
||||||
init_lexer_from_string(*lexer, result.file.source);
|
init_lexer_from_string(*lexer, ctx.file.source);
|
||||||
lexer.path = result.file.path;
|
lexer.path = ctx.file.path;
|
||||||
token : *Token = scan_next_token(*lexer);
|
token : *Token = scan_next_token(*lexer);
|
||||||
while token && token.kind != .TOKEN_EOF {
|
while token && token.kind != .TOKEN_EOF {
|
||||||
token = scan_next_token(*lexer);
|
token = scan_next_token(*lexer);
|
||||||
@@ -594,7 +594,7 @@ init_lexer_from_string :: (lexer : *Lexer, input : string) {
|
|||||||
ok := read_input_from_string(lexer, input);
|
ok := read_input_from_string(lexer, input);
|
||||||
if !ok {
|
if !ok {
|
||||||
record_error(lexer, "Unable to initialize from string\n");
|
record_error(lexer, "Unable to initialize from string\n");
|
||||||
lexer.result.had_error = true;
|
lexer.ctx.had_error = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -602,7 +602,7 @@ init_lexer_from_file :: (lexer : *Lexer, file_path : string) {
|
|||||||
ok := read_input_from_file(lexer, file_path);
|
ok := read_input_from_file(lexer, file_path);
|
||||||
if !ok {
|
if !ok {
|
||||||
record_error(lexer, tprint("Unable to read file: %\n", file_path));
|
record_error(lexer, tprint("Unable to read file: %\n", file_path));
|
||||||
lexer.result.had_error = true;
|
lexer.ctx.had_error = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
154
Parsing.jai
154
Parsing.jai
@@ -16,7 +16,7 @@ Parse_State :: struct {
|
|||||||
|
|
||||||
current_token_index : int;
|
current_token_index : int;
|
||||||
|
|
||||||
result : *Compile_Result;
|
ctx : *Compiler_Context;
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////
|
////////////////////////////
|
||||||
@@ -119,7 +119,7 @@ record_error :: (parse_state : *Parse_State, token : Token, message : string, re
|
|||||||
error : Compiler_Message;
|
error : Compiler_Message;
|
||||||
error.message_kind = .Error;
|
error.message_kind = .Error;
|
||||||
error.message = message;
|
error.message = message;
|
||||||
error.path = parse_state.result.file.path;
|
error.path = parse_state.ctx.file.path;
|
||||||
|
|
||||||
source_location : Source_Range;
|
source_location : Source_Range;
|
||||||
source_location.begin = token;
|
source_location.begin = token;
|
||||||
@@ -134,8 +134,8 @@ record_error :: (parse_state : *Parse_State, token : Token, message : string, re
|
|||||||
source_location.end = parse_state.current;
|
source_location.end = parse_state.current;
|
||||||
array_add(*error.source_locations, source_location);
|
array_add(*error.source_locations, source_location);
|
||||||
|
|
||||||
parse_state.result.had_error = true;
|
parse_state.ctx.had_error = true;
|
||||||
array_add(*parse_state.result.messages, error);
|
array_add(*parse_state.ctx.messages, error);
|
||||||
|
|
||||||
rewind_to_snapshot(parse_state, snap);
|
rewind_to_snapshot(parse_state, snap);
|
||||||
}
|
}
|
||||||
@@ -382,7 +382,7 @@ make_node :: (nodes : *[..]AST_Node, kind : AST_Kind, allocator : Allocator) ->
|
|||||||
}
|
}
|
||||||
|
|
||||||
make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node {
|
make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node {
|
||||||
return make_node(*parse_state.result.nodes, kind, parse_state.result.allocator);
|
return make_node(*parse_state.ctx.nodes, kind, parse_state.ctx.allocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
// new_builtin_node :: (nodes : *[..]AST_Node, kind : AST_Kind) -> *AST_Node {
|
// new_builtin_node :: (nodes : *[..]AST_Node, kind : AST_Kind) -> *AST_Node {
|
||||||
@@ -401,7 +401,8 @@ make_builtin_token :: (tokens : *[..]Token, builder : *String_Builder, kind : To
|
|||||||
|
|
||||||
if buffer {
|
if buffer {
|
||||||
start := buffer.count;
|
start := buffer.count;
|
||||||
}
|
}
|
||||||
|
tok.column = col.*;
|
||||||
|
|
||||||
print_to_builder(builder, "%", text);
|
print_to_builder(builder, "%", text);
|
||||||
buffer = get_current_buffer(builder);
|
buffer = get_current_buffer(builder);
|
||||||
@@ -416,8 +417,7 @@ make_builtin_token :: (tokens : *[..]Token, builder : *String_Builder, kind : To
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tok.column = col.*;
|
tok.index = buffer.count - text.count;
|
||||||
tok.index = buffer.count;
|
|
||||||
tok.length = text.count;
|
tok.length = text.count;
|
||||||
tok.builtin = true;
|
tok.builtin = true;
|
||||||
|
|
||||||
@@ -426,51 +426,53 @@ make_builtin_token :: (tokens : *[..]Token, builder : *String_Builder, kind : To
|
|||||||
return *(tokens.*)[tokens.count - 1];
|
return *(tokens.*)[tokens.count - 1];
|
||||||
}
|
}
|
||||||
|
|
||||||
new_builtin_struct_node :: (result : *Compile_Result, name : string, members : []Arg, allocator : Allocator) -> *AST_Node {
|
new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []Arg, allocator : Allocator) -> *AST_Node {
|
||||||
sc := get_scratch(allocator);
|
sc := get_scratch(allocator);
|
||||||
defer scratch_end(sc);
|
defer scratch_end(sc);
|
||||||
builder : String_Builder;
|
builder : String_Builder;
|
||||||
builder.allocator = sc.allocator; // I want to find a good way to use scratch here...
|
builder.allocator = sc.allocator; // I want to find a good way to use scratch here...
|
||||||
|
|
||||||
node := make_node(*result.nodes, .Struct, allocator);
|
node := make_node(*ctx.nodes, .Struct, allocator);
|
||||||
|
|
||||||
source_location : Source_Range;
|
source_location : Source_Range;
|
||||||
|
|
||||||
col := 0;
|
col := 0;
|
||||||
line := 0;
|
line := 0;
|
||||||
|
|
||||||
tok_index := result.tokens.count;
|
tok_index := ctx.tokens.count;
|
||||||
|
|
||||||
ident_token := make_builtin_token(*result.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", name), *col, *line);
|
ident_token := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", name), *col, *line);
|
||||||
|
ident_token.ident_value = name;
|
||||||
source_location.begin = ident_token;
|
source_location.begin = ident_token;
|
||||||
|
|
||||||
append(*builder, " ");
|
append(*builder, " ");
|
||||||
make_builtin_token(*result.tokens, *builder, .TOKEN_DOUBLECOLON, "::", *col, *line);
|
make_builtin_token(*ctx.tokens, *builder, .TOKEN_DOUBLECOLON, "::", *col, *line);
|
||||||
append(*builder, " ");
|
append(*builder, " ");
|
||||||
make_builtin_token(*result.tokens, *builder, .TOKEN_STRUCT, "struct", *col, *line);
|
make_builtin_token(*ctx.tokens, *builder, .TOKEN_STRUCT, "struct", *col, *line);
|
||||||
append(*builder, " ");
|
append(*builder, " ");
|
||||||
make_builtin_token(*result.tokens, *builder, .TOKEN_LEFTBRACE, "{", *col, *line);
|
make_builtin_token(*ctx.tokens, *builder, .TOKEN_LEFTBRACE, "{", *col, *line);
|
||||||
append(*builder, "\n");
|
append(*builder, "\n");
|
||||||
line += 1;
|
line += 1;
|
||||||
col = 0;
|
col = 0;
|
||||||
|
|
||||||
field_list := make_node(*result.nodes, .FieldList, allocator);
|
field_list := make_node(*ctx.nodes, .FieldList, allocator);
|
||||||
add_child(node, field_list);
|
add_child(node, field_list);
|
||||||
|
|
||||||
for member : members {
|
for member : members {
|
||||||
field := make_node(*result.nodes, .Field, allocator);
|
field := make_node(*ctx.nodes, .Field, allocator);
|
||||||
field_source_loc : Source_Range;
|
field_source_loc : Source_Range;
|
||||||
|
|
||||||
field_ident := make_builtin_token(*result.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.name), *col, *line);
|
indent(*builder, 1);
|
||||||
|
field_ident := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.name), *col, *line);
|
||||||
field_source_loc.begin = field_ident;
|
field_source_loc.begin = field_ident;
|
||||||
field.token = field_ident;
|
field.token = field_ident;
|
||||||
field.name = member.name;
|
field.name = member.name;
|
||||||
|
|
||||||
append(*builder, " ");
|
append(*builder, " ");
|
||||||
make_builtin_token(*result.tokens, *builder, .TOKEN_COLON, ":", *col, *line);
|
make_builtin_token(*ctx.tokens, *builder, .TOKEN_COLON, ":", *col, *line);
|
||||||
append(*builder, " ");
|
append(*builder, " ");
|
||||||
make_builtin_token(*result.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.typename), *col, *line);
|
make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.typename), *col, *line);
|
||||||
semicolon_tok := make_builtin_token(*result.tokens, *builder, .TOKEN_SEMICOLON, ";", *col, *line);
|
semicolon_tok := make_builtin_token(*ctx.tokens, *builder, .TOKEN_SEMICOLON, ";", *col, *line);
|
||||||
append(*builder, "\n");
|
append(*builder, "\n");
|
||||||
col = 0;
|
col = 0;
|
||||||
line += 1;
|
line += 1;
|
||||||
@@ -481,7 +483,7 @@ new_builtin_struct_node :: (result : *Compile_Result, name : string, members : [
|
|||||||
add_child(field_list, field);
|
add_child(field_list, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
brace_token := make_builtin_token(*result.tokens, *builder, .TOKEN_RIGHTBRACE, "}", *col, *line);
|
brace_token := make_builtin_token(*ctx.tokens, *builder, .TOKEN_RIGHTBRACE, "}", *col, *line);
|
||||||
append(*builder, "\n");
|
append(*builder, "\n");
|
||||||
|
|
||||||
source_location.end = brace_token;
|
source_location.end = brace_token;
|
||||||
@@ -491,8 +493,8 @@ new_builtin_struct_node :: (result : *Compile_Result, name : string, members : [
|
|||||||
source_location.begin.source = *source.data[source_location.begin.column];
|
source_location.begin.source = *source.data[source_location.begin.column];
|
||||||
source_location.end.source = *source.data[source_location.end.column];
|
source_location.end.source = *source.data[source_location.end.column];
|
||||||
|
|
||||||
for i : tok_index..result.tokens.count - 1 {
|
for i : tok_index..ctx.tokens.count - 1 {
|
||||||
tok := result.tokens[i];
|
tok := ctx.tokens[i];
|
||||||
tok.source = *source.data[tok.column];
|
tok.source = *source.data[tok.column];
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -502,52 +504,48 @@ new_builtin_struct_node :: (result : *Compile_Result, name : string, members : [
|
|||||||
// field.source_location.main_token.source = *source.data[tok.column];
|
// field.source_location.main_token.source = *source.data[tok.column];
|
||||||
}
|
}
|
||||||
|
|
||||||
print_from_source_location(source_location, temp);
|
node.source_location = source_location;
|
||||||
|
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
new_builtin_function_node :: (result : *Compile_Result, name : string, members : []Arg, return_var : Arg, allocator : Allocator) -> *AST_Node {
|
new_builtin_function_node :: (ctx : *Compiler_Context, name : string, members : []Arg, return_var : Arg, allocator : Allocator) -> *AST_Node {
|
||||||
sc := get_scratch(allocator);
|
sc := get_scratch(allocator);
|
||||||
defer scratch_end(sc);
|
defer scratch_end(sc);
|
||||||
builder : String_Builder;
|
builder : String_Builder;
|
||||||
builder.allocator = sc.allocator; // I want to find a good way to use scratch here...
|
builder.allocator = sc.allocator; // I want to find a good way to use scratch here...
|
||||||
|
|
||||||
node := make_node(*result.nodes, .Function, allocator);
|
node := make_node(*ctx.nodes, .Function, allocator);
|
||||||
|
|
||||||
source_location : Source_Range;
|
source_location : Source_Range;
|
||||||
|
|
||||||
col := 0;
|
col := 0;
|
||||||
line := 0;
|
line := 0;
|
||||||
|
|
||||||
tok_index := result.tokens.count;
|
tok_index := ctx.tokens.count;
|
||||||
|
|
||||||
ident_token := make_builtin_token(*result.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", name), *col, *line);
|
ident_token := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", name), *col, *line);
|
||||||
source_location.begin = ident_token;
|
source_location.begin = ident_token;
|
||||||
|
|
||||||
append(*builder, " ");
|
append(*builder, " ");
|
||||||
make_builtin_token(*result.tokens, *builder, .TOKEN_DOUBLECOLON, "::", *col, *line);
|
make_builtin_token(*ctx.tokens, *builder, .TOKEN_DOUBLECOLON, "::", *col, *line);
|
||||||
append(*builder, " ");
|
append(*builder, " ");
|
||||||
make_builtin_token(*result.tokens, *builder, .TOKEN_LEFTPAREN, "(", *col, *line);
|
make_builtin_token(*ctx.tokens, *builder, .TOKEN_LEFTPAREN, "(", *col, *line);
|
||||||
field_list := make_node(*result.nodes, .FieldList, allocator);
|
field_list := make_node(*ctx.nodes, .FieldList, allocator);
|
||||||
add_child(node, field_list);
|
add_child(node, field_list);
|
||||||
|
|
||||||
for member : members {
|
for member : members {
|
||||||
field := make_node(*result.nodes, .Field, allocator);
|
field := make_node(*ctx.nodes, .Field, allocator);
|
||||||
field_source_loc : Source_Range;
|
field_source_loc : Source_Range;
|
||||||
|
|
||||||
field_ident := make_builtin_token(*result.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.name), *col, *line);
|
// field_ident := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.name), *col, *line);
|
||||||
field_source_loc.begin = field_ident;
|
type_tok := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.typename), *col, *line);
|
||||||
field.token = field_ident;
|
field_source_loc.begin = type_tok;
|
||||||
field.name = member.name;
|
field.token = type_tok;
|
||||||
|
|
||||||
append(*builder, " ");
|
|
||||||
make_builtin_token(*result.tokens, *builder, .TOKEN_COLON, ":", *col, *line);
|
|
||||||
append(*builder, " ");
|
|
||||||
type_tok := make_builtin_token(*result.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.typename), *col, *line);
|
|
||||||
|
|
||||||
if it_index < members.count - 1 {
|
if it_index < members.count - 1 {
|
||||||
make_builtin_token(*result.tokens, *builder, .TOKEN_COMMA, ";", *col, *line);
|
make_builtin_token(*ctx.tokens, *builder, .TOKEN_COMMA, ",", *col, *line);
|
||||||
|
append(*builder, " ");
|
||||||
}
|
}
|
||||||
|
|
||||||
field_source_loc.end = type_tok;
|
field_source_loc.end = type_tok;
|
||||||
@@ -556,8 +554,8 @@ new_builtin_function_node :: (result : *Compile_Result, name : string, members :
|
|||||||
add_child(field_list, field);
|
add_child(field_list, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
make_builtin_token(*result.tokens, *builder, .TOKEN_RIGHTPAREN, ")", *col, *line);
|
make_builtin_token(*ctx.tokens, *builder, .TOKEN_RIGHTPAREN, ")", *col, *line);
|
||||||
semicolon_tok := make_builtin_token(*result.tokens, *builder, .TOKEN_SEMICOLON, ";", *col, *line);
|
semicolon_tok := make_builtin_token(*ctx.tokens, *builder, .TOKEN_SEMICOLON, ";", *col, *line);
|
||||||
|
|
||||||
source_location.end = semicolon_tok;
|
source_location.end = semicolon_tok;
|
||||||
|
|
||||||
@@ -566,8 +564,8 @@ new_builtin_function_node :: (result : *Compile_Result, name : string, members :
|
|||||||
source_location.begin.source = *source.data[source_location.begin.column];
|
source_location.begin.source = *source.data[source_location.begin.column];
|
||||||
source_location.end.source = *source.data[source_location.end.column];
|
source_location.end.source = *source.data[source_location.end.column];
|
||||||
|
|
||||||
for i : tok_index..result.tokens.count - 1 {
|
for i : tok_index..ctx.tokens.count - 1 {
|
||||||
tok := result.tokens[i];
|
tok := ctx.tokens[i];
|
||||||
tok.source = *source.data[tok.column];
|
tok.source = *source.data[tok.column];
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -577,7 +575,7 @@ new_builtin_function_node :: (result : *Compile_Result, name : string, members :
|
|||||||
// field.source_location.main_token.source = *source.data[tok.column];
|
// field.source_location.main_token.source = *source.data[tok.column];
|
||||||
}
|
}
|
||||||
|
|
||||||
print_from_source_location(source_location, temp);
|
node.source_location = source_location;
|
||||||
|
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
@@ -617,10 +615,10 @@ advance :: (parse_state : *Parse_State) {
|
|||||||
parse_state.previous = parse_state.current;
|
parse_state.previous = parse_state.current;
|
||||||
|
|
||||||
while true {
|
while true {
|
||||||
if parse_state.current_token_index >= parse_state.result.tokens.count {
|
if parse_state.current_token_index >= parse_state.ctx.tokens.count {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
parse_state.current = *parse_state.result.tokens[parse_state.current_token_index];
|
parse_state.current = *parse_state.ctx.tokens[parse_state.current_token_index];
|
||||||
parse_state.current_token_index += 1;
|
parse_state.current_token_index += 1;
|
||||||
if parse_state.current.kind != .TOKEN_ERROR break;
|
if parse_state.current.kind != .TOKEN_ERROR break;
|
||||||
|
|
||||||
@@ -653,7 +651,7 @@ check_any :: (parse_state : *Parse_State, kinds : ..Token_Kind) -> bool {
|
|||||||
|
|
||||||
//nb - Checks if the next token is of a certain kind
|
//nb - Checks if the next token is of a certain kind
|
||||||
check_next :: (parse_state : *Parse_State, kind : Token_Kind) -> bool {
|
check_next :: (parse_state : *Parse_State, kind : Token_Kind) -> bool {
|
||||||
return parse_state.result.tokens[parse_state.current_token_index].kind == kind;
|
return parse_state.ctx.tokens[parse_state.current_token_index].kind == kind;
|
||||||
}
|
}
|
||||||
|
|
||||||
//nb - Consume a token if
|
//nb - Consume a token if
|
||||||
@@ -772,8 +770,8 @@ binary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|||||||
}
|
}
|
||||||
|
|
||||||
array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
||||||
identifier := parse_state.result.tokens[parse_state.current_token_index - 3];
|
identifier := parse_state.ctx.tokens[parse_state.current_token_index - 3];
|
||||||
left_bracket := parse_state.result.tokens[parse_state.current_token_index - 2];
|
left_bracket := parse_state.ctx.tokens[parse_state.current_token_index - 2];
|
||||||
|
|
||||||
array_access := make_node(parse_state, .Unary);
|
array_access := make_node(parse_state, .Unary);
|
||||||
array_access.token = left_bracket;
|
array_access.token = left_bracket;
|
||||||
@@ -864,13 +862,13 @@ directive :: (state : *Parse_State) -> *AST_Node {
|
|||||||
|
|
||||||
// advance(state);
|
// advance(state);
|
||||||
|
|
||||||
// result : Compile_Result;
|
// result : Compiler_Context;
|
||||||
// result.allocator = state.result.allocator;
|
// ctx.allocator = state.ctx.allocator;
|
||||||
// result.environment = state.result.environment;
|
// ctx.environment = state.ctx.environment;
|
||||||
|
|
||||||
// result.file = make_file(*result, path);
|
// ctx.file = make_file(*result, path);
|
||||||
|
|
||||||
// if result.file.source.count == 0 {
|
// if ctx.file.source.count == 0 {
|
||||||
// unable_to_open_file(state, path, path_tok);
|
// unable_to_open_file(state, path, path_tok);
|
||||||
// advance_to_sync_point(state);
|
// advance_to_sync_point(state);
|
||||||
// advance(state);
|
// advance(state);
|
||||||
@@ -881,21 +879,21 @@ directive :: (state : *Parse_State) -> *AST_Node {
|
|||||||
|
|
||||||
// lex(*result);
|
// lex(*result);
|
||||||
|
|
||||||
// count := state.result.tokens..count;
|
// count := state.ctx.tokens..count;
|
||||||
// current_idx := state.current_token_index;
|
// current_idx := state.current_token_index;
|
||||||
// result_count := result.tokens..count;
|
// result_count := ctx.tokens..count;
|
||||||
|
|
||||||
// // state.result.tokens..count -= 1;
|
// // state.ctx.tokens..count -= 1;
|
||||||
// array_resize(*state.result.tokens., count + result_count - 1);
|
// array_resize(*state.ctx.tokens., count + result_count - 1);
|
||||||
|
|
||||||
// memcpy(*state.result.tokens[current_idx + result_count - 1], *state.result.tokens[current_idx], size_of(Token) * (count - current_idx));
|
// memcpy(*state.ctx.tokens[current_idx + result_count - 1], *state.ctx.tokens[current_idx], size_of(Token) * (count - current_idx));
|
||||||
|
|
||||||
// for *tok : result.tokens. {
|
// for *tok : ctx.tokens. {
|
||||||
// if tok.kind == .TOKEN_EOF {
|
// if tok.kind == .TOKEN_EOF {
|
||||||
// break;
|
// break;
|
||||||
// }
|
// }
|
||||||
// tok.builtin = true;
|
// tok.builtin = true;
|
||||||
// state.result.tokens[it_index] = tok.*;
|
// state.ctx.tokens[it_index] = tok.*;
|
||||||
// }
|
// }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1041,7 +1039,7 @@ field_declaration :: (parse_state : *Parse_State, identifier_token : *Token) ->
|
|||||||
node.array_field = true;
|
node.array_field = true;
|
||||||
} else {
|
} else {
|
||||||
if !check(parse_state, .TOKEN_ASSIGN) {
|
if !check(parse_state, .TOKEN_ASSIGN) {
|
||||||
internal_error_message(*parse_state.result.messages, "Unimplemented error message.", parse_state.result.file.path);
|
internal_error_message(*parse_state.ctx.messages, "Unimplemented error message.", parse_state.ctx.file.path);
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
// missing_type_specifier(parse_state, identifier_token, "Expected type specifier after field name.");
|
// missing_type_specifier(parse_state, identifier_token, "Expected type specifier after field name.");
|
||||||
@@ -1085,8 +1083,8 @@ argument_list :: (parse_state : *Parse_State) -> *AST_Node {
|
|||||||
|
|
||||||
source_location.main_token = parse_state.current;
|
source_location.main_token = parse_state.current;
|
||||||
|
|
||||||
error_before := parse_state.result.had_error;
|
error_before := parse_state.ctx.had_error;
|
||||||
parse_state.result.had_error = false;
|
parse_state.ctx.had_error = false;
|
||||||
|
|
||||||
while !check(parse_state, .TOKEN_RIGHTPAREN) {
|
while !check(parse_state, .TOKEN_RIGHTPAREN) {
|
||||||
arg := expression(parse_state);
|
arg := expression(parse_state);
|
||||||
@@ -1099,12 +1097,12 @@ argument_list :: (parse_state : *Parse_State) -> *AST_Node {
|
|||||||
if check(parse_state, .TOKEN_RIGHTPAREN) break;
|
if check(parse_state, .TOKEN_RIGHTPAREN) break;
|
||||||
consume(parse_state, .TOKEN_COMMA, "Expect ',' after function argument.");
|
consume(parse_state, .TOKEN_COMMA, "Expect ',' after function argument.");
|
||||||
|
|
||||||
if parse_state.result.had_error {
|
if parse_state.ctx.had_error {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
parse_state.result.had_error = error_before || parse_state.result.had_error;
|
parse_state.ctx.had_error = error_before || parse_state.ctx.had_error;
|
||||||
|
|
||||||
consume(parse_state, .TOKEN_RIGHTPAREN, "Expect ')' after function call.");
|
consume(parse_state, .TOKEN_RIGHTPAREN, "Expect ')' after function call.");
|
||||||
|
|
||||||
@@ -1581,8 +1579,8 @@ declaration :: (parse_state : *Parse_State) -> *AST_Node {
|
|||||||
return decl_node;
|
return decl_node;
|
||||||
}
|
}
|
||||||
|
|
||||||
parse :: (result : *Compile_Result, allocator := temp) {
|
parse :: (ctx : *Compiler_Context, allocator := temp) {
|
||||||
if result.had_error {
|
if ctx.had_error {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1593,17 +1591,17 @@ parse :: (result : *Compile_Result, allocator := temp) {
|
|||||||
defer clear_context_allocators();
|
defer clear_context_allocators();
|
||||||
|
|
||||||
parse_state : Parse_State;
|
parse_state : Parse_State;
|
||||||
result.nodes.allocator = result.allocator;
|
ctx.nodes.allocator = ctx.allocator;
|
||||||
array_reserve(*result.nodes, 4096);
|
array_reserve(*ctx.nodes, 4096);
|
||||||
parse_state.current_token_index = 0;
|
parse_state.current_token_index = 0;
|
||||||
parse_state.result = result;
|
parse_state.ctx = ctx;
|
||||||
|
|
||||||
advance(*parse_state);
|
advance(*parse_state);
|
||||||
|
|
||||||
if !match(*parse_state, .TOKEN_EOF) {
|
if !match(*parse_state, .TOKEN_EOF) {
|
||||||
parse_state.result.root = make_node(*parse_state, .Program);
|
parse_state.ctx.root = make_node(*parse_state, .Program);
|
||||||
array_reserve(*parse_state.result.root.children, 1024);
|
array_reserve(*parse_state.ctx.root.children, 1024);
|
||||||
program := parse_state.result.root;
|
program := parse_state.ctx.root;
|
||||||
|
|
||||||
while !check(*parse_state, .TOKEN_EOF) {
|
while !check(*parse_state, .TOKEN_EOF) {
|
||||||
decl := declaration(*parse_state);
|
decl := declaration(*parse_state);
|
||||||
|
|||||||
@@ -143,7 +143,7 @@ Semantic_Checker :: struct {
|
|||||||
|
|
||||||
current_scope : Scope_Handle;
|
current_scope : Scope_Handle;
|
||||||
|
|
||||||
result : *Compile_Result;
|
ctx : *Compiler_Context;
|
||||||
|
|
||||||
current_buffer_index : u32 = 0;
|
current_buffer_index : u32 = 0;
|
||||||
current_sampler_index : u32 = 0;
|
current_sampler_index : u32 = 0;
|
||||||
@@ -451,7 +451,7 @@ Attempting to access a field on a primitive type '%'.
|
|||||||
init_string_builder(*builder,, temp);
|
init_string_builder(*builder,, temp);
|
||||||
|
|
||||||
variable := from_handle(checker, handle);
|
variable := from_handle(checker, handle);
|
||||||
print_to_builder(*builder, "Attempting to access a field on a primitive type '%'.\n", proper_type_to_string(checker.result.type_variables, variable));
|
print_to_builder(*builder, "Attempting to access a field on a primitive type '%'.\n", proper_type_to_string(checker.ctx.type_variables, variable));
|
||||||
|
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
cyan(*builder);
|
cyan(*builder);
|
||||||
@@ -512,7 +512,7 @@ if_condition_has_to_be_boolean_type :: (checker : *Semantic_Checker, usage_site
|
|||||||
usage_child := usage_site.children[0];
|
usage_child := usage_site.children[0];
|
||||||
usage_loc := usage_child.source_location;
|
usage_loc := usage_child.source_location;
|
||||||
|
|
||||||
print_to_builder(*builder, "% has type %\n", print_from_source_location(*usage_loc), proper_type_to_string(checker.result.type_variables, var));
|
print_to_builder(*builder, "% has type %\n", print_from_source_location(*usage_loc), proper_type_to_string(checker.ctx.type_variables, var));
|
||||||
|
|
||||||
message := builder_to_string(*builder,, temp);
|
message := builder_to_string(*builder,, temp);
|
||||||
record_error(checker, message, usage_site.source_location, false);
|
record_error(checker, message, usage_site.source_location, false);
|
||||||
@@ -554,7 +554,7 @@ type_mismatch :: (checker : *Semantic_Checker, usage_site : *AST_Node, expect_no
|
|||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
print_to_builder(*builder, "expected:\n");
|
print_to_builder(*builder, "expected:\n");
|
||||||
indent(*builder, 2);
|
indent(*builder, 2);
|
||||||
proper_type_to_string(*builder, checker.result.type_variables, expect_var);
|
proper_type_to_string(*builder, checker.ctx.type_variables, expect_var);
|
||||||
append(*builder, "\n");
|
append(*builder, "\n");
|
||||||
|
|
||||||
// indent(*builder, 2);
|
// indent(*builder, 2);
|
||||||
@@ -592,7 +592,7 @@ record_error :: (checker : *Semantic_Checker, error_string : string, locations :
|
|||||||
error.message = error_string;
|
error.message = error_string;
|
||||||
|
|
||||||
checker.had_error = true;
|
checker.had_error = true;
|
||||||
array_add(*checker.result.messages, error);
|
array_add(*checker.ctx.messages, error);
|
||||||
}
|
}
|
||||||
|
|
||||||
is_proper :: (var : Type_Variable) -> bool {
|
is_proper :: (var : Type_Variable) -> bool {
|
||||||
@@ -617,10 +617,10 @@ use_scope :: (checker : *Semantic_Checker, handle : Scope_Handle) -> Scope_Handl
|
|||||||
|
|
||||||
push_scope :: (checker : *Semantic_Checker, name := "", kind : Scope_Kind = .Global) -> *Scope, Scope_Handle {
|
push_scope :: (checker : *Semantic_Checker, name := "", kind : Scope_Kind = .Global) -> *Scope, Scope_Handle {
|
||||||
new_scope : Scope;
|
new_scope : Scope;
|
||||||
array_add(*checker.result.scope_stack.stack, new_scope);
|
array_add(*checker.ctx.scope_stack.stack, new_scope);
|
||||||
|
|
||||||
count := checker.result.scope_stack.stack.count;
|
count := checker.ctx.scope_stack.stack.count;
|
||||||
scope := *checker.result.scope_stack.stack[count - 1];
|
scope := *checker.ctx.scope_stack.stack[count - 1];
|
||||||
scope.allocator = make_arena(Kilobytes(512));
|
scope.allocator = make_arena(Kilobytes(512));
|
||||||
scope.table.allocator = scope.allocator;
|
scope.table.allocator = scope.allocator;
|
||||||
scope.parent = checker.current_scope;
|
scope.parent = checker.current_scope;
|
||||||
@@ -630,7 +630,7 @@ push_scope :: (checker : *Semantic_Checker, name := "", kind : Scope_Kind = .Glo
|
|||||||
scope.builtin = true;
|
scope.builtin = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
scope.children.allocator = checker.result.scope_stack.stack.allocator;
|
scope.children.allocator = checker.ctx.scope_stack.stack.allocator;
|
||||||
|
|
||||||
if checker.current_scope {
|
if checker.current_scope {
|
||||||
scope := get_current_scope(checker);
|
scope := get_current_scope(checker);
|
||||||
@@ -653,12 +653,12 @@ pop_scope :: (checker : *Semantic_Checker) -> Scope_Handle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
peek_scope :: (checker : *Semantic_Checker) -> *Scope, Scope_Handle {
|
peek_scope :: (checker : *Semantic_Checker) -> *Scope, Scope_Handle {
|
||||||
if checker.result.scope_stack.stack.count == 0 {
|
if checker.ctx.scope_stack.stack.count == 0 {
|
||||||
return null, 0;
|
return null, 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
count := checker.result.scope_stack.stack.count;
|
count := checker.ctx.scope_stack.stack.count;
|
||||||
scope := *checker.result.scope_stack.stack[count - 1];
|
scope := *checker.ctx.scope_stack.stack[count - 1];
|
||||||
return scope, xx count;
|
return scope, xx count;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -675,7 +675,7 @@ get_scope :: (scope_stack : Scope_Stack, handle : Scope_Handle) -> *Scope {
|
|||||||
}
|
}
|
||||||
|
|
||||||
get_scope :: (checker : *Semantic_Checker, handle : Scope_Handle) -> *Scope {
|
get_scope :: (checker : *Semantic_Checker, handle : Scope_Handle) -> *Scope {
|
||||||
return get_scope(*checker.result.scope_stack, handle);
|
return get_scope(*checker.ctx.scope_stack, handle);
|
||||||
}
|
}
|
||||||
|
|
||||||
add_symbol_to_scope :: (state : Checker_State, scope_stack : *Scope_Stack, scope_handle : Scope_Handle, name : string, symbol : Defined_Symbol) -> *Defined_Symbol {
|
add_symbol_to_scope :: (state : Checker_State, scope_stack : *Scope_Stack, scope_handle : Scope_Handle, name : string, symbol : Defined_Symbol) -> *Defined_Symbol {
|
||||||
@@ -697,8 +697,8 @@ add_symbol_to_scope :: (state : Checker_State, scope_stack : *Scope_Stack, scope
|
|||||||
|
|
||||||
new_type_variable :: (checker : *Semantic_Checker) -> *Type_Variable, Type_Variable_Handle {
|
new_type_variable :: (checker : *Semantic_Checker) -> *Type_Variable, Type_Variable_Handle {
|
||||||
variable : Type_Variable;
|
variable : Type_Variable;
|
||||||
handle := cast(Type_Variable_Handle)checker.result.type_variables.count + 1;
|
handle := cast(Type_Variable_Handle)checker.ctx.type_variables.count + 1;
|
||||||
array_add(*checker.result.type_variables, variable);
|
array_add(*checker.ctx.type_variables, variable);
|
||||||
|
|
||||||
return from_handle(checker, handle), handle;
|
return from_handle(checker, handle), handle;
|
||||||
}
|
}
|
||||||
@@ -723,14 +723,14 @@ Arg :: struct {
|
|||||||
new_builtin_struct :: (checker : *Semantic_Checker, name : string, members : []Arg) -> *Type_Variable, Type_Variable_Handle {
|
new_builtin_struct :: (checker : *Semantic_Checker, name : string, members : []Arg) -> *Type_Variable, Type_Variable_Handle {
|
||||||
tv, handle := new_builtin_type_variable(checker, .Struct, .Declaration, name, name);
|
tv, handle := new_builtin_type_variable(checker, .Struct, .Declaration, name, name);
|
||||||
|
|
||||||
builtin_node := new_builtin_struct_node(checker.result, name, members, checker.result.allocator);
|
builtin_node := new_builtin_struct_node(checker.ctx, name, members, checker.ctx.allocator);
|
||||||
|
|
||||||
symbol : Defined_Symbol;
|
symbol : Defined_Symbol;
|
||||||
symbol.name = name;
|
symbol.name = name;
|
||||||
symbol.source_node = builtin_node;
|
symbol.source_node = builtin_node;
|
||||||
symbol.builtin = true;
|
symbol.builtin = true;
|
||||||
symbol.type_variable = handle;
|
symbol.type_variable = handle;
|
||||||
add_symbol_to_scope(checker.state, *checker.result.scope_stack, checker.current_scope, name, symbol);
|
add_symbol_to_scope(checker.state, *checker.ctx.scope_stack, checker.current_scope, name, symbol);
|
||||||
|
|
||||||
tv.source_node = builtin_node;
|
tv.source_node = builtin_node;
|
||||||
|
|
||||||
@@ -749,7 +749,7 @@ new_builtin_struct :: (checker : *Semantic_Checker, name : string, members : []A
|
|||||||
member_symbol : Defined_Symbol;
|
member_symbol : Defined_Symbol;
|
||||||
member_symbol.name = member.name;
|
member_symbol.name = member.name;
|
||||||
member_symbol.type_variable = member_handle;
|
member_symbol.type_variable = member_handle;
|
||||||
add_symbol_to_scope(checker.state, *checker.result.scope_stack, checker.current_scope, member.name, member_symbol);
|
add_symbol_to_scope(checker.state, *checker.ctx.scope_stack, checker.current_scope, member.name, member_symbol);
|
||||||
|
|
||||||
field_list.children[it_index].type_variable = member_handle;
|
field_list.children[it_index].type_variable = member_handle;
|
||||||
member_var.source_node = field_list.children[it_index];
|
member_var.source_node = field_list.children[it_index];
|
||||||
@@ -765,7 +765,7 @@ new_builtin_struct :: (checker : *Semantic_Checker, name : string, members : []A
|
|||||||
new_builtin_function :: (checker : *Semantic_Checker, name : string, args : []Arg, return_arg : Arg) -> *Type_Variable, Type_Variable_Handle {
|
new_builtin_function :: (checker : *Semantic_Checker, name : string, args : []Arg, return_arg : Arg) -> *Type_Variable, Type_Variable_Handle {
|
||||||
tv, handle := new_builtin_type_variable(checker, .Function, .Declaration, name);
|
tv, handle := new_builtin_type_variable(checker, .Function, .Declaration, name);
|
||||||
|
|
||||||
builtin_node := new_builtin_function_node(checker.result, name, args, return_arg, checker.result.allocator);
|
builtin_node := new_builtin_function_node(checker.ctx, name, args, return_arg, checker.ctx.allocator);
|
||||||
|
|
||||||
function : Defined_Symbol;
|
function : Defined_Symbol;
|
||||||
function.name = name;
|
function.name = name;
|
||||||
@@ -782,7 +782,7 @@ new_builtin_function :: (checker : *Semantic_Checker, name : string, args : []Ar
|
|||||||
symbol.type_variable = 0;
|
symbol.type_variable = 0;
|
||||||
symbol.functions.allocator = get_current_scope(checker).allocator;
|
symbol.functions.allocator = get_current_scope(checker).allocator;
|
||||||
array_add(*symbol.functions, function);
|
array_add(*symbol.functions, function);
|
||||||
add_symbol_to_scope(checker.state, *checker.result.scope_stack, checker.current_scope, name, symbol);
|
add_symbol_to_scope(checker.state, *checker.ctx.scope_stack, checker.current_scope, name, symbol);
|
||||||
} else {
|
} else {
|
||||||
array_add(*find_result.functions, function);
|
array_add(*find_result.functions, function);
|
||||||
}
|
}
|
||||||
@@ -791,20 +791,14 @@ new_builtin_function :: (checker : *Semantic_Checker, name : string, args : []Ar
|
|||||||
|
|
||||||
field_list := get_field_list(builtin_node);
|
field_list := get_field_list(builtin_node);
|
||||||
|
|
||||||
scope, scope_handle := push_scope(checker, name, .Struct);
|
|
||||||
tv.scope = scope_handle;
|
|
||||||
|
|
||||||
for arg : args {
|
for arg : args {
|
||||||
typename : string;
|
typename : string;
|
||||||
kind := lookup_type(checker, checker.current_scope, arg.typename, *typename);
|
kind := lookup_type(checker, checker.current_scope, arg.typename, *typename);
|
||||||
|
|
||||||
arg_var, arg_handle := new_builtin_type_variable(checker, kind, .Expression, arg.name);
|
arg_var, arg_handle := new_builtin_type_variable(checker, kind, .Expression, arg.name);
|
||||||
arg_var.scope = tv.scope;
|
arg_var.scope = tv.scope;
|
||||||
|
arg_var.type = kind;
|
||||||
arg_symbol : Defined_Symbol;
|
arg_var.typename = typename;
|
||||||
arg_symbol.name = arg.name;
|
|
||||||
arg_symbol.type_variable = arg_handle;
|
|
||||||
add_symbol_to_scope(checker.state, *checker.result.scope_stack, checker.current_scope, arg.name, arg_symbol);
|
|
||||||
|
|
||||||
field_list.children[it_index].type_variable = arg_handle;
|
field_list.children[it_index].type_variable = arg_handle;
|
||||||
arg_var.source_node = field_list.children[it_index];
|
arg_var.source_node = field_list.children[it_index];
|
||||||
@@ -812,7 +806,11 @@ new_builtin_function :: (checker : *Semantic_Checker, name : string, args : []Ar
|
|||||||
add_child(checker, handle, arg_handle);
|
add_child(checker, handle, arg_handle);
|
||||||
}
|
}
|
||||||
|
|
||||||
pop_scope(checker);
|
if return_arg.typename.count > 0 {
|
||||||
|
return_var, return_handle := new_type_variable(checker);
|
||||||
|
return_var.type = lookup_type(checker, checker.current_scope, return_arg.typename, *return_var.typename);
|
||||||
|
from_handle(checker, handle).return_type_variable = return_handle;
|
||||||
|
}
|
||||||
|
|
||||||
return from_handle(checker, handle), handle;
|
return from_handle(checker, handle), handle;
|
||||||
}
|
}
|
||||||
@@ -838,12 +836,12 @@ init_semantic_checker :: (checker : *Semantic_Checker, root : *AST_Node, path :
|
|||||||
checker.program_root = root;
|
checker.program_root = root;
|
||||||
checker.path = path;
|
checker.path = path;
|
||||||
|
|
||||||
checker.result.type_variables.allocator = checker.result.allocator;
|
checker.ctx.type_variables.allocator = checker.ctx.allocator;
|
||||||
array_reserve(*checker.result.type_variables, 2048);
|
array_reserve(*checker.ctx.type_variables, 2048);
|
||||||
|
|
||||||
checker.result.scope_stack.allocator = make_arena(Megabytes(8));
|
checker.ctx.scope_stack.allocator = make_arena(Megabytes(8));
|
||||||
checker.result.scope_stack.stack.allocator = checker.result.scope_stack.allocator;
|
checker.ctx.scope_stack.stack.allocator = checker.ctx.scope_stack.allocator;
|
||||||
array_reserve(*checker.result.scope_stack.stack, 256);
|
array_reserve(*checker.ctx.scope_stack.stack, 256);
|
||||||
|
|
||||||
global_scope, global_handle := push_scope(checker, kind = .Global);
|
global_scope, global_handle := push_scope(checker, kind = .Global);
|
||||||
array_reserve(*global_scope.children, 2048);
|
array_reserve(*global_scope.children, 2048);
|
||||||
@@ -869,7 +867,7 @@ find_symbol :: (scope_stack : Scope_Stack, name : string, current_scope : Scope_
|
|||||||
}
|
}
|
||||||
|
|
||||||
find_symbol :: (checker : *Semantic_Checker, name : string, current_scope : Scope_Handle, containing_scope : *Scope_Handle = null) -> *Defined_Symbol {
|
find_symbol :: (checker : *Semantic_Checker, name : string, current_scope : Scope_Handle, containing_scope : *Scope_Handle = null) -> *Defined_Symbol {
|
||||||
return find_symbol(checker.result.scope_stack, name, current_scope, containing_scope);
|
return find_symbol(checker.ctx.scope_stack, name, current_scope, containing_scope);
|
||||||
}
|
}
|
||||||
|
|
||||||
find_symbol :: (name : string, checker : *Semantic_Checker, containing_scope : *Scope_Handle = null) -> *Defined_Symbol {
|
find_symbol :: (name : string, checker : *Semantic_Checker, containing_scope : *Scope_Handle = null) -> *Defined_Symbol {
|
||||||
@@ -882,7 +880,7 @@ from_handle :: (variables : []Type_Variable, handle : Type_Variable_Handle) -> *
|
|||||||
}
|
}
|
||||||
|
|
||||||
from_handle :: (checker : *Semantic_Checker, handle : Type_Variable_Handle) -> *Type_Variable {
|
from_handle :: (checker : *Semantic_Checker, handle : Type_Variable_Handle) -> *Type_Variable {
|
||||||
return from_handle(checker.result.type_variables, handle);
|
return from_handle(checker.ctx.type_variables, handle);
|
||||||
}
|
}
|
||||||
|
|
||||||
proper_type_to_string :: (builder : *String_Builder, variables : []Type_Variable, var : Type_Variable) {
|
proper_type_to_string :: (builder : *String_Builder, variables : []Type_Variable, var : Type_Variable) {
|
||||||
@@ -1011,7 +1009,7 @@ declare_struct :: (checker : *Semantic_Checker, node : *AST_Node, name : string)
|
|||||||
symbol.name = name;
|
symbol.name = name;
|
||||||
symbol.source_node = node;
|
symbol.source_node = node;
|
||||||
symbol.type_variable = handle;
|
symbol.type_variable = handle;
|
||||||
add_symbol_to_scope(checker.state, *checker.result.scope_stack, checker.current_scope, name, symbol);
|
add_symbol_to_scope(checker.state, *checker.ctx.scope_stack, checker.current_scope, name, symbol);
|
||||||
} else {
|
} else {
|
||||||
symbol_redeclaration(checker, node, find_result);
|
symbol_redeclaration(checker, node, find_result);
|
||||||
return 0;
|
return 0;
|
||||||
@@ -1046,7 +1044,7 @@ declare_properties :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Va
|
|||||||
name := ifx node.name.count == 0 then "properties" else node.name;
|
name := ifx node.name.count == 0 then "properties" else node.name;
|
||||||
|
|
||||||
if node.name.count > 0 {
|
if node.name.count > 0 {
|
||||||
checker.result.property_name = name;
|
checker.ctx.property_name = name;
|
||||||
}
|
}
|
||||||
type_var := declare_struct(checker, node, name);
|
type_var := declare_struct(checker, node, name);
|
||||||
var := from_handle(checker, type_var);
|
var := from_handle(checker, type_var);
|
||||||
@@ -1063,7 +1061,7 @@ declare_cbuffer :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Varia
|
|||||||
var.type = .CBuffer;
|
var.type = .CBuffer;
|
||||||
var.resource_index = checker.current_buffer_index;
|
var.resource_index = checker.current_buffer_index;
|
||||||
checker.current_buffer_index += 1;
|
checker.current_buffer_index += 1;
|
||||||
array_add(*checker.result.constant_buffers, type_var);
|
array_add(*checker.ctx.constant_buffers, type_var);
|
||||||
return type_var;
|
return type_var;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1098,11 +1096,11 @@ declare_function :: (checker : *Semantic_Checker, node : *AST_Node, builtin : bo
|
|||||||
name_to_check := get_actual_function_name(node);
|
name_to_check := get_actual_function_name(node);
|
||||||
|
|
||||||
if node.vertex_entry_point {
|
if node.vertex_entry_point {
|
||||||
checker.result.vertex_entry_point.node = node;
|
checker.ctx.vertex_entry_point.node = node;
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.pixel_entry_point {
|
if node.pixel_entry_point {
|
||||||
checker.result.pixel_entry_point.node = node;
|
checker.ctx.pixel_entry_point.node = node;
|
||||||
}
|
}
|
||||||
find_result := find_symbol(checker, name_to_check, checker.current_scope);
|
find_result := find_symbol(checker, name_to_check, checker.current_scope);
|
||||||
|
|
||||||
@@ -1120,7 +1118,7 @@ declare_function :: (checker : *Semantic_Checker, node : *AST_Node, builtin : bo
|
|||||||
array_reserve(*symbol.functions, 32);
|
array_reserve(*symbol.functions, 32);
|
||||||
array_add(*symbol.functions, function);
|
array_add(*symbol.functions, function);
|
||||||
|
|
||||||
add_symbol_to_scope(checker.state, *checker.result.scope_stack, checker.current_scope, name_to_check, symbol);
|
add_symbol_to_scope(checker.state, *checker.ctx.scope_stack, checker.current_scope, name_to_check, symbol);
|
||||||
} else {
|
} else {
|
||||||
//@Note(niels): This is some ugly code, but it's probably fine for now.
|
//@Note(niels): This is some ugly code, but it's probably fine for now.
|
||||||
field_list := node.children[0];
|
field_list := node.children[0];
|
||||||
@@ -1357,7 +1355,7 @@ check_field :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variable_
|
|||||||
symbol.name = node.name;
|
symbol.name = node.name;
|
||||||
symbol.source_node = node;
|
symbol.source_node = node;
|
||||||
symbol.type_variable = handle;
|
symbol.type_variable = handle;
|
||||||
add_symbol_to_scope(checker.state, *checker.result.scope_stack, checker.current_scope, node.name, symbol);
|
add_symbol_to_scope(checker.state, *checker.ctx.scope_stack, checker.current_scope, node.name, symbol);
|
||||||
} else {
|
} else {
|
||||||
symbol_redeclaration(checker, node, find_result);
|
symbol_redeclaration(checker, node, find_result);
|
||||||
return 0;
|
return 0;
|
||||||
@@ -1612,7 +1610,7 @@ check_node :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variable_H
|
|||||||
typename : string;
|
typename : string;
|
||||||
variable.type = .Int;
|
variable.type = .Int;
|
||||||
symbol.type_variable = handle;
|
symbol.type_variable = handle;
|
||||||
add_symbol_to_scope(checker.state, *checker.result.scope_stack, checker.current_scope, symbol.name, symbol);
|
add_symbol_to_scope(checker.state, *checker.ctx.scope_stack, checker.current_scope, symbol.name, symbol);
|
||||||
|
|
||||||
begin_iter := check_node(checker, node.children[0]);
|
begin_iter := check_node(checker, node.children[0]);
|
||||||
begin_var := from_handle(checker, begin_iter);
|
begin_var := from_handle(checker, begin_iter);
|
||||||
@@ -1807,15 +1805,19 @@ add_builtins_new :: (checker : *Semantic_Checker) {
|
|||||||
float_name := Typenames[Type_Kind.Float];
|
float_name := Typenames[Type_Kind.Float];
|
||||||
int_name := Typenames[Type_Kind.Int];
|
int_name := Typenames[Type_Kind.Int];
|
||||||
|
|
||||||
arg :: (name : string, kind : Type_Kind) -> Arg {
|
arg :: (name : string = "", kind : Type_Kind) -> Arg {
|
||||||
return .{ name, Typenames[kind] };
|
return .{ name, Typenames[kind] };
|
||||||
}
|
}
|
||||||
|
|
||||||
farg :: (name : string) -> Arg {
|
targ :: (typename : string) -> Arg {
|
||||||
|
return .{ "", typename };
|
||||||
|
}
|
||||||
|
|
||||||
|
farg :: (name : string = "") -> Arg {
|
||||||
return arg(name, .Float);
|
return arg(name, .Float);
|
||||||
}
|
}
|
||||||
|
|
||||||
iarg :: (name : string) -> Arg {
|
iarg :: (name : string = "") -> Arg {
|
||||||
return arg(name, .Int);
|
return arg(name, .Int);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1827,7 +1829,7 @@ add_builtins_new :: (checker : *Semantic_Checker) {
|
|||||||
i := 0;
|
i := 0;
|
||||||
for x : 0..3 {
|
for x : 0..3 {
|
||||||
for y : 0..3 {
|
for y : 0..3 {
|
||||||
float4x4_members[i] = farg(tprint("m%%", x + 1, y + 1));
|
float4x4_members[i] = farg(tprint("m%1%2", x + 1, y + 1));
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1842,7 +1844,7 @@ add_builtins_new :: (checker : *Semantic_Checker) {
|
|||||||
i = 0;
|
i = 0;
|
||||||
for x : 0..3 {
|
for x : 0..3 {
|
||||||
for y : 0..3 {
|
for y : 0..3 {
|
||||||
int4x4_members[i].name = tprint("m%%", x + 1, y + 1);
|
int4x4_members[i].name = tprint("m%1%2", x + 1, y + 1);
|
||||||
int4x4_members[i].typename = int_name;
|
int4x4_members[i].typename = int_name;
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
@@ -1850,9 +1852,27 @@ add_builtins_new :: (checker : *Semantic_Checker) {
|
|||||||
|
|
||||||
int4x4_tv, i4x4h := new_builtin_struct(checker, "int4x4", int4x4_members);
|
int4x4_tv, i4x4h := new_builtin_struct(checker, "int4x4", int4x4_members);
|
||||||
|
|
||||||
new_builtin_function(checker, "float2", .[farg("x"), farg("y")], .{ "res", "float2" });
|
new_builtin_function(checker, "float2", .[farg(), farg()], targ("float2"));
|
||||||
new_builtin_function(checker, "float2", .[.{"v", "float2"}], .{ "res", "float2" });
|
new_builtin_function(checker, "float2", .[farg()], targ("float2"));
|
||||||
new_builtin_function(checker, "float3", .[farg("x"), farg("y"), farg("z")], .{ "res", "float3" });
|
new_builtin_function(checker, "float2", .[targ("float2")], targ("float2"));
|
||||||
|
|
||||||
|
|
||||||
|
new_builtin_function(checker, "float3", .[farg(), farg(), farg()], targ("float3"));
|
||||||
|
new_builtin_function(checker, "float3", .[targ("float3")], targ("float3"));
|
||||||
|
new_builtin_function(checker, "float3", .[targ("float2"), farg()], targ("float3"));
|
||||||
|
new_builtin_function(checker, "float3", .[farg(), targ("float2")], targ("float3"));
|
||||||
|
new_builtin_function(checker, "float3", .[farg()], targ("float3"));
|
||||||
|
|
||||||
|
|
||||||
|
new_builtin_function(checker, "float4", .[farg(), farg(), farg(), farg()], targ("float4"));
|
||||||
|
new_builtin_function(checker, "float4", .[targ("float2"), targ("float2")], targ("float4"));
|
||||||
|
new_builtin_function(checker, "float4", .[targ("float2"), farg(), farg()], targ("float4"));
|
||||||
|
new_builtin_function(checker, "float4", .[farg(), targ("float2"), farg()], targ("float4"));
|
||||||
|
new_builtin_function(checker, "float4", .[farg(), farg(), targ("float2")], targ("float4"));
|
||||||
|
new_builtin_function(checker, "float4", .[farg(), targ("float3")], targ("float4"));
|
||||||
|
new_builtin_function(checker, "float4", .[targ("float3"), farg()], targ("float4"));
|
||||||
|
new_builtin_function(checker, "float4", .[targ("float4")], targ("float4"));
|
||||||
|
new_builtin_function(checker, "float4", .[farg()], targ("float4"));
|
||||||
|
|
||||||
checker.state = .Type_Checking;
|
checker.state = .Type_Checking;
|
||||||
}
|
}
|
||||||
@@ -1883,45 +1903,45 @@ add_builtins :: (checker : *Semantic_Checker) {
|
|||||||
|
|
||||||
checker.state = .Adding_Builtins;
|
checker.state = .Adding_Builtins;
|
||||||
|
|
||||||
checker.result.file = make_file_from_string(BUILTIN);
|
checker.ctx.file = make_file_from_string(BUILTIN);
|
||||||
checker.result.allocator = make_arena(Megabytes(128));
|
checker.ctx.allocator = make_arena(Megabytes(128));
|
||||||
|
|
||||||
prev_file := checker.result.file;
|
prev_file := checker.ctx.file;
|
||||||
prev_root := checker.result.root;
|
prev_root := checker.ctx.root;
|
||||||
prev_tokens := checker.result.tokens;
|
prev_tokens := checker.ctx.tokens;
|
||||||
|
|
||||||
checker.result.root = null;
|
checker.ctx.root = null;
|
||||||
|
|
||||||
tokens : [..]Token;
|
tokens : [..]Token;
|
||||||
scratch := get_scratch();
|
scratch := get_scratch();
|
||||||
defer scratch_end(scratch);
|
defer scratch_end(scratch);
|
||||||
tokens.allocator = scratch.allocator;
|
tokens.allocator = scratch.allocator;
|
||||||
array_reserve(*tokens, 1024 * 1024);
|
array_reserve(*tokens, 1024 * 1024);
|
||||||
checker.result.tokens = tokens;
|
checker.ctx.tokens = tokens;
|
||||||
|
|
||||||
checker.result.tokens.count = 0;
|
checker.ctx.tokens.count = 0;
|
||||||
|
|
||||||
lex(checker.result);
|
lex(checker.ctx);
|
||||||
parse(checker.result);
|
parse(checker.ctx);
|
||||||
type_check(checker, checker.result.root);
|
type_check(checker, checker.ctx.root);
|
||||||
|
|
||||||
for *type_var : checker.result.type_variables {
|
for *type_var : checker.ctx.type_variables {
|
||||||
type_var.builtin = true;
|
type_var.builtin = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
checker.state = .Type_Checking;
|
checker.state = .Type_Checking;
|
||||||
|
|
||||||
checker.result.file = prev_file;
|
checker.ctx.file = prev_file;
|
||||||
checker.result.root = prev_root;
|
checker.ctx.root = prev_root;
|
||||||
checker.result.tokens = prev_tokens;
|
checker.ctx.tokens = prev_tokens;
|
||||||
}
|
}
|
||||||
|
|
||||||
type_check :: (checker : *Semantic_Checker, root : *AST_Node) {
|
type_check :: (checker : *Semantic_Checker, root : *AST_Node) {
|
||||||
traverse(checker, root);
|
traverse(checker, root);
|
||||||
}
|
}
|
||||||
|
|
||||||
check :: (result : *Compile_Result, allocator : Allocator = temp) {
|
check :: (ctx : *Compiler_Context, allocator : Allocator = temp) {
|
||||||
if result.had_error {
|
if ctx.had_error {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1936,16 +1956,16 @@ check :: (result : *Compile_Result, allocator : Allocator = temp) {
|
|||||||
checker.current_buffer_index = 0;
|
checker.current_buffer_index = 0;
|
||||||
checker.current_sampler_index = 0;
|
checker.current_sampler_index = 0;
|
||||||
checker.current_texture_index = 0;
|
checker.current_texture_index = 0;
|
||||||
checker.result = result;
|
checker.ctx = ctx;
|
||||||
|
|
||||||
init_semantic_checker(*checker, result.root, result.file.path);
|
init_semantic_checker(*checker, ctx.root, ctx.file.path);
|
||||||
|
|
||||||
add_builtins_new(*checker);
|
add_builtins_new(*checker);
|
||||||
// add_builtins(*checker);
|
// add_builtins(*checker);
|
||||||
|
|
||||||
type_check(*checker, result.root);
|
type_check(*checker, ctx.root);
|
||||||
|
|
||||||
result.had_error |= checker.had_error;
|
ctx.had_error |= checker.had_error;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -2227,18 +2247,17 @@ pretty_print_symbol_table :: (checker : *Semantic_Checker, allocator : Allocator
|
|||||||
builder : String_Builder;
|
builder : String_Builder;
|
||||||
init_string_builder(*builder,, allocator);
|
init_string_builder(*builder,, allocator);
|
||||||
|
|
||||||
pretty_print_scope(xx checker.current_scope, checker.result.scope_stack, checker.result.type_variables, *checker.result.scope_stack.stack[0], *builder);
|
pretty_print_scope(xx checker.current_scope, checker.ctx.scope_stack, checker.ctx.type_variables, *checker.ctx.scope_stack.stack[0], *builder);
|
||||||
|
|
||||||
return builder_to_string(*builder,, allocator);
|
return builder_to_string(*builder,, allocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
pretty_print_symbol_table :: (result : *Compile_Result, allocator : Allocator) -> string {
|
pretty_print_symbol_table :: (ctx : *Compiler_Context, allocator : Allocator) -> string {
|
||||||
builder : String_Builder;
|
builder : String_Builder;
|
||||||
init_string_builder(*builder,, allocator);
|
init_string_builder(*builder,, allocator);
|
||||||
|
|
||||||
current_scope := cast(Scope_Handle)1;
|
current_scope := cast(Scope_Handle)1;
|
||||||
pretty_print_scope(current_scope, result.scope_stack, result.type_variables, *result.scope_stack.stack[0], *builder);
|
pretty_print_scope(current_scope, ctx.scope_stack, ctx.type_variables, *ctx.scope_stack.stack[0], *builder);
|
||||||
|
|
||||||
|
|
||||||
return builder_to_string(*builder,, allocator);
|
return builder_to_string(*builder,, allocator);
|
||||||
}
|
}
|
||||||
|
|||||||
83
module.jai
83
module.jai
@@ -28,10 +28,6 @@ Environment :: struct {
|
|||||||
defines : [..]string;
|
defines : [..]string;
|
||||||
}
|
}
|
||||||
|
|
||||||
Shader_Compiler :: struct {
|
|
||||||
environment : Environment;
|
|
||||||
}
|
|
||||||
|
|
||||||
Field_Kind :: enum {
|
Field_Kind :: enum {
|
||||||
Int :: 0;
|
Int :: 0;
|
||||||
Half :: 1;
|
Half :: 1;
|
||||||
@@ -141,7 +137,7 @@ Input_File :: struct {
|
|||||||
path : string;
|
path : string;
|
||||||
}
|
}
|
||||||
|
|
||||||
Compile_Result :: struct {
|
Compiler_Context :: struct {
|
||||||
file : Input_File;
|
file : Input_File;
|
||||||
|
|
||||||
environment : Environment;
|
environment : Environment;
|
||||||
@@ -174,14 +170,13 @@ Compile_Result :: struct {
|
|||||||
properties : Properties;
|
properties : Properties;
|
||||||
|
|
||||||
max_constant_buffers :: 16;
|
max_constant_buffers :: 16;
|
||||||
cbuffers : Static_Array(Constant_Buffer, max_constant_buffers);
|
|
||||||
|
|
||||||
|
cbuffers : Static_Array(Constant_Buffer, max_constant_buffers);
|
||||||
|
|
||||||
had_error : bool;
|
had_error : bool;
|
||||||
messages : [..]Compiler_Message;
|
messages : [..]Compiler_Message;
|
||||||
|
|
||||||
allocator : Allocator;
|
allocator : Allocator;
|
||||||
// string_allocator : Allocator;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#add_context scratch_allocators : [2]Allocator;
|
#add_context scratch_allocators : [2]Allocator;
|
||||||
@@ -211,7 +206,7 @@ get_scratch :: (conflict : Allocator = .{}) -> Scratch {
|
|||||||
return scratch_begin(*context.scratch_allocators[0]);
|
return scratch_begin(*context.scratch_allocators[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
record_error :: (result : *Compile_Result, format : string, args : .. Any) {
|
record_error :: (result : *Compiler_Context, format : string, args : .. Any) {
|
||||||
error : Compiler_Message;
|
error : Compiler_Message;
|
||||||
error.message_kind = .Error;
|
error.message_kind = .Error;
|
||||||
error.message = sprint(format, args);
|
error.message = sprint(format, args);
|
||||||
@@ -219,7 +214,7 @@ record_error :: (result : *Compile_Result, format : string, args : .. Any) {
|
|||||||
array_add(*result.messages, error);
|
array_add(*result.messages, error);
|
||||||
}
|
}
|
||||||
|
|
||||||
make_file :: (result : *Compile_Result, path : string) -> Input_File {
|
make_file :: (result : *Compiler_Context, path : string) -> Input_File {
|
||||||
if !file_exists(path) {
|
if !file_exists(path) {
|
||||||
record_error(result, "Unable to load file: %", path);
|
record_error(result, "Unable to load file: %", path);
|
||||||
return .{};
|
return .{};
|
||||||
@@ -407,18 +402,18 @@ type_variable_to_field :: (type_variables : []Type_Variable, scope_stack : Scope
|
|||||||
}
|
}
|
||||||
|
|
||||||
type_variable_to_field :: (checker : *Semantic_Checker, variable : *Type_Variable) -> Field {
|
type_variable_to_field :: (checker : *Semantic_Checker, variable : *Type_Variable) -> Field {
|
||||||
return type_variable_to_field(checker.result.type_variables, checker.result.scope_stack, variable);
|
return type_variable_to_field(checker.ctx.type_variables, checker.ctx.scope_stack, variable);
|
||||||
}
|
}
|
||||||
|
|
||||||
generate_output_data :: (result : *Compile_Result) {
|
generate_output_data :: (ctx : *Compiler_Context) {
|
||||||
if result.had_error {
|
if ctx.had_error {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if result.vertex_entry_point.node {
|
if ctx.vertex_entry_point.node {
|
||||||
result.vertex_entry_point.name = result.vertex_entry_point.node.name;
|
ctx.vertex_entry_point.name = ctx.vertex_entry_point.node.name;
|
||||||
|
|
||||||
type_variable := from_handle(result.type_variables, result.vertex_entry_point.node.type_variable);
|
type_variable := from_handle(ctx.type_variables, ctx.vertex_entry_point.node.type_variable);
|
||||||
assert(type_variable.type == .Function);
|
assert(type_variable.type == .Function);
|
||||||
|
|
||||||
node := type_variable.source_node;
|
node := type_variable.source_node;
|
||||||
@@ -426,23 +421,23 @@ generate_output_data :: (result : *Compile_Result) {
|
|||||||
if node.children[0].kind == .FieldList {
|
if node.children[0].kind == .FieldList {
|
||||||
field_list := node.children[0];
|
field_list := node.children[0];
|
||||||
for child : field_list.children {
|
for child : field_list.children {
|
||||||
tv := from_handle(result.type_variables, child.type_variable);
|
tv := from_handle(ctx.type_variables, child.type_variable);
|
||||||
field := type_variable_to_field(result.type_variables, result.scope_stack, tv);
|
field := type_variable_to_field(ctx.type_variables, ctx.scope_stack, tv);
|
||||||
array_add(*result.vertex_entry_point.input, field);
|
array_add(*ctx.vertex_entry_point.input, field);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for buffer_variable : result.constant_buffers {
|
for buffer_variable : ctx.constant_buffers {
|
||||||
variable := from_handle(result.type_variables, buffer_variable);
|
variable := from_handle(ctx.type_variables, buffer_variable);
|
||||||
|
|
||||||
cb := array_add(*result.cbuffers);
|
cb := array_add(*ctx.cbuffers);
|
||||||
|
|
||||||
for i : 0..variable.children.count - 1 {
|
for i : 0..variable.children.count - 1 {
|
||||||
child := variable.children[i];
|
child := variable.children[i];
|
||||||
field : Property_Field;
|
field : Property_Field;
|
||||||
field.base_field = type_variable_to_field(result.type_variables, result.scope_stack, from_handle(result.type_variables, child));
|
field.base_field = type_variable_to_field(ctx.type_variables, ctx.scope_stack, from_handle(ctx.type_variables, child));
|
||||||
array_add(*cb.fields, field);
|
array_add(*cb.fields, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -456,28 +451,28 @@ generate_output_data :: (result : *Compile_Result) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
find_result := find_symbol(*result.scope_stack, result.property_name, xx 1);
|
find_result := find_symbol(*ctx.scope_stack, ctx.property_name, xx 1);
|
||||||
if find_result {
|
if find_result {
|
||||||
property_variable := from_handle(result.type_variables, find_result.type_variable);
|
property_variable := from_handle(ctx.type_variables, find_result.type_variable);
|
||||||
|
|
||||||
for i : 0..property_variable.children.count - 1 {
|
for i : 0..property_variable.children.count - 1 {
|
||||||
child := property_variable.children[i];
|
child := property_variable.children[i];
|
||||||
field := type_variable_to_field(result.type_variables, result.scope_stack, from_handle(result.type_variables, child));
|
field := type_variable_to_field(ctx.type_variables, ctx.scope_stack, from_handle(ctx.type_variables, child));
|
||||||
prop_field : Property_Field;
|
prop_field : Property_Field;
|
||||||
prop_field.base_field = field;
|
prop_field.base_field = field;
|
||||||
array_add(*result.properties.fields, prop_field);
|
array_add(*ctx.properties.fields, prop_field);
|
||||||
}
|
}
|
||||||
result.properties.buffer_index = property_variable.resource_index;
|
ctx.properties.buffer_index = property_variable.resource_index;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if result.pixel_entry_point.node {
|
if ctx.pixel_entry_point.node {
|
||||||
result.pixel_entry_point.name = result.pixel_entry_point.node.name;
|
ctx.pixel_entry_point.name = ctx.pixel_entry_point.node.name;
|
||||||
|
|
||||||
type_variable := from_handle(result.type_variables, result.pixel_entry_point.node.type_variable);
|
type_variable := from_handle(ctx.type_variables, ctx.pixel_entry_point.node.type_variable);
|
||||||
assert(type_variable.type == .Function);
|
assert(type_variable.type == .Function);
|
||||||
|
|
||||||
field := type_variable_to_field(result.type_variables, result.scope_stack, type_variable.return_type_variable);
|
field := type_variable_to_field(ctx.type_variables, ctx.scope_stack, type_variable.return_type_variable);
|
||||||
for hint : type_variable.source_node.hint_tokens {
|
for hint : type_variable.source_node.hint_tokens {
|
||||||
field_hint : Field_Hint;
|
field_hint : Field_Hint;
|
||||||
|
|
||||||
@@ -501,33 +496,25 @@ generate_output_data :: (result : *Compile_Result) {
|
|||||||
array_add(*field.hints, field_hint);
|
array_add(*field.hints, field_hint);
|
||||||
}
|
}
|
||||||
|
|
||||||
result.pixel_entry_point.return_value = field;
|
ctx.pixel_entry_point.return_value = field;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
compile_file :: (compiler : *Shader_Compiler, path : string, allocator : Allocator = temp) -> Compile_Result {
|
compile_file :: (ctx : *Compiler_Context, path : string, allocator : Allocator = temp) {
|
||||||
result : Compile_Result;
|
|
||||||
|
|
||||||
new_context := context;
|
new_context := context;
|
||||||
new_context.allocator = allocator;
|
new_context.allocator = allocator;
|
||||||
push_context new_context {
|
push_context new_context {
|
||||||
init_context_allocators();
|
init_context_allocators();
|
||||||
defer clear_context_allocators();
|
defer clear_context_allocators();
|
||||||
|
|
||||||
|
ctx.allocator = make_arena(Megabytes(128));
|
||||||
|
|
||||||
|
ctx.file = make_file(ctx, path);
|
||||||
|
|
||||||
result.allocator = make_arena(Megabytes(128));
|
lex(ctx);
|
||||||
|
parse(ctx);
|
||||||
result.file = make_file(*result, path);
|
check(ctx);
|
||||||
result.environment = compiler.environment;
|
codegen(ctx);
|
||||||
|
generate_output_data(ctx);
|
||||||
lex(*result);
|
|
||||||
parse(*result);
|
|
||||||
check(*result);
|
|
||||||
codegen(*result);
|
|
||||||
generate_output_data(*result);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,28 @@
|
|||||||
vertex main :: () {
|
vertex main :: () {
|
||||||
v2 : float2 = float2(2.0, 2.0);
|
v2 : float2 = float2(2.0, 2.0);
|
||||||
|
v2 = float2(2.0);
|
||||||
|
v2 = float2(v2);
|
||||||
|
|
||||||
v3 : float3 = float3(2.0, 2.0, 2.0);
|
v3 : float3 = float3(2.0, 2.0, 2.0);
|
||||||
v4 : float4;// = float4(2.0, 2.0, 2.0);
|
v3 = float3(v2, 1.0);
|
||||||
|
v3 = float3(1.0, v2);
|
||||||
|
v3 = float3(1.0);
|
||||||
|
v3 = float3(v3);
|
||||||
|
|
||||||
|
v4 : float4 = float4(2.0, 2.0, 2.0, 2.0);
|
||||||
|
v4 = float4(v4);
|
||||||
|
v4 = float4(v2, v2);
|
||||||
|
v4 = float4(v2, 1.0, 1.0);
|
||||||
|
v4 = float4(1.0, v2, 1.0);
|
||||||
|
v4 = float4(1.0, 1.0, v2);
|
||||||
|
v4 = float4(v3, 2.0);
|
||||||
|
v4 = float4(2.0, v3);
|
||||||
|
v4 = float4(2.0);
|
||||||
|
|
||||||
|
|
||||||
|
v4 = float4(1.0, 1.0, v2);
|
||||||
|
v4 = float4(2.0);
|
||||||
|
|
||||||
v2.x = 2.0;
|
v2.x = 2.0;
|
||||||
v2.y = 2.0;
|
v2.y = 2.0;
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user