Compare commits

36 Commits

Author SHA1 Message Date
0c0e31db38 Fix lvalue/rvalue binaries. Fix structured buffer output. 2025-09-29 22:22:00 +02:00
2e23b37405 Finalize struct gen for structured buffers. Rename buffer builtins. 2025-09-29 20:55:27 +02:00
63a68b70b4 More fixes to access and buffer compilation. 2025-09-26 07:01:06 +02:00
6528ca854b Merge branch 'main' of git.nbross.com:nielsbross/Ink-Shader-Language 2025-09-24 14:04:56 +02:00
940b58331d A bunch of array fixes and some buffer stuff that doesn't quite work yet 2025-09-24 14:04:50 +02:00
c26fa892ee Added todos to module 2025-09-18 11:08:52 +00:00
4c84437022 Added comment and todo on resource index in type var. 2025-09-18 11:02:35 +00:00
3fdaebf66d Added todo regarding for loop scoping. 2025-09-18 10:58:44 +00:00
50a404984d Started some fixes for array support. Not entirely there yet. 2025-09-17 21:37:53 +02:00
89904824bb Fixed test to fully deprecate properties. 2025-09-17 12:33:38 +02:00
94daf81a85 Merge branch 'main' of git.nbross.com:nielsbross/Ink-Shader-Language
# Conflicts:
#	AST.jai
#	Check.jai
#	module.jai
2025-09-17 12:32:47 +02:00
607a6a0bed Deprecate properties. Use hinted cbuffers instead. This opens up to use a structured buffer in that way as well if you want instead. 2025-09-17 12:31:37 +02:00
8b2141df16 Fix some if directive stuff. Fix a property output issue. Will be deprecated next commit anyway. 2025-09-17 12:30:36 +02:00
7fefe0ecf6 Ifdefs, moved semantic to check, fixed error reporting for builtins 2025-09-16 11:04:57 +02:00
f99f86bc37 Update readme 2025-09-14 20:21:43 +02:00
d5476b54d7 Fix tests. 2025-09-13 22:02:44 +02:00
622ce388fa Actually fix which allocator we're using in the tests. 2025-09-13 22:01:29 +02:00
b0653b6563 Tracy 2025-09-11 17:29:45 +02:00
d6ea2e4c2f Memory optimization for the general case by reserving significantly fewer tokens in lexer. It was reserving 1 million always! 2025-09-11 12:27:44 +02:00
361a310ed1 Actually pass the allocator in so it's used instead of temp. 2025-09-11 11:33:16 +02:00
78e6d6146e Remove compiler ctx allocator 2025-09-11 11:23:07 +02:00
79ec6cc42f Added the rest of current builtins. Started properly implementing compile tests. 2025-09-11 11:03:02 +02:00
9461fe626f Change result to context for clarity. Fix a bunch of stuff in builtin functions and structs. 2025-09-10 23:21:34 +02:00
ceafd197f5 A bunch of new allocation related stuff. 2025-09-10 06:59:29 +02:00
f4a9592f26 Much better allocation strategy with new ncore arenas. Add more builtin node gen stuff. It's kind of wonky. 2025-09-06 23:30:45 +02:00
9cf51a1534 Broke builtins. 2025-09-06 19:58:46 +02:00
11c936ba7f Started load directive. 2025-09-03 22:31:18 +02:00
4924b01eac Added some initial directive code. Don't quite like the way it's done 2025-09-03 21:05:00 +02:00
603b625e21 Rename of files, improved for handling, add cb hints
- Rename Test to Ink as main file
- Support more errors for for loops.
- Add hints to cbs
2025-09-02 11:55:27 +02:00
9e0728f952 Fixed some error handling and weird consume logic. 2025-09-01 12:58:45 +02:00
94fc3a4dad Added basic for i loops. Missing some breaking tests and more tests. Also want to add for each at some point and it_index. 2025-08-30 22:58:51 +02:00
14f8b20d5f More small changes 2025-08-27 22:12:19 +02:00
4825623c73 Merge branch 'main' of git.nbross.com:nielsbross/Ink-Shader-Language 2025-08-27 21:55:08 +02:00
da87209690 Move compile result stuff out of specific stages. 2025-08-27 21:55:01 +02:00
e0908a67c0 Add hint parsing on constant buffers. Not yet used in output. 2025-08-25 22:08:58 +02:00
ab711b5610 Minor experiment 2025-08-25 21:56:08 +02:00
160 changed files with 6302 additions and 3688 deletions

3
.gitmodules vendored
View File

@@ -1,3 +1,6 @@
[submodule "modules/nbrutil"] [submodule "modules/nbrutil"]
path = modules/ncore path = modules/ncore
url = git@git.nbross.com:nielsbross/NCore.git url = git@git.nbross.com:nielsbross/NCore.git
[submodule "modules/tracy"]
path = modules/tracy
url = https://github.com/rluba/jai-tracy.git

154
AST.jai
View File

@@ -8,20 +8,17 @@ AST_Kind :: enum {
Function; Function;
Return; Return;
// @Incomplete(nb): Should these three really be their own block types?
// Maybe they at least shouldn't need to have their own tokens...
Properties;
Meta;
Instance;
//== //==
// Directives
If_Directive;
// Hint; Access;
// Type;
// Operator;
Call; Call;
Struct; Struct;
If; If;
For;
CBuffer; CBuffer;
Buffer;
FieldList; FieldList;
ArgList; ArgList;
Variable; Variable;
@@ -190,7 +187,13 @@ pretty_print_children :: (parent : *AST_Node, indentation : int, builder : *Stri
ind = 0; ind = 0;
} }
// skip := ifx it_index > 0 then false else true; // skip := ifx it_index > 0 then false else true;
if child.kind == .Function {
pretty_print_declaration(child, ind, builder);
} else {
pretty_print_node(child, ind, builder); pretty_print_node(child, ind, builder);
}
if it_index != children.count - 1 { if it_index != children.count - 1 {
if flags & .Separator { if flags & .Separator {
@@ -252,9 +255,15 @@ pretty_print_binary :: (node : *AST_Node, indentation : int, builder : *String_B
if !skip_indent { if !skip_indent {
indent(builder, indentation); indent(builder, indentation);
} }
if node.token.kind == .TOKEN_LEFTBRACKET {
pretty_print_node(node.children[0], 0, builder);
append(builder, "[");
pretty_print_node(node.children[1], 0, builder);
append(builder, "]");
} else {
append(builder, "("); append(builder, "(");
op := node.token; op := node.token;
print_to_builder(builder, op_to_string(op)); print_to_builder(builder, op_to_string(op));
append(builder, " "); append(builder, " ");
@@ -264,6 +273,19 @@ pretty_print_binary :: (node : *AST_Node, indentation : int, builder : *String_B
append(builder, ")"); append(builder, ")");
} }
}
pretty_print_access :: (node : *AST_Node, indentation : int, builder : *String_Builder, skip_indent := false) {
if !skip_indent {
indent(builder, indentation);
}
pretty_print_node(node.children[0], 0, builder);
append(builder, ".");
pretty_print_node(node.children[1], 0, builder);
}
pretty_print_unary :: (node : *AST_Node, indentation : int, builder : *String_Builder, skip_indent := false) { pretty_print_unary :: (node : *AST_Node, indentation : int, builder : *String_Builder, skip_indent := false) {
if !skip_indent { if !skip_indent {
indent(builder, indentation); indent(builder, indentation);
@@ -309,6 +331,26 @@ pretty_print_if :: (node : *AST_Node, indentation : int, builder : *String_Build
append(builder, ")"); append(builder, ")");
} }
pretty_print_for :: (node : *AST_Node, indentation : int, builder : *String_Builder, skip_indent := false) {
if !skip_indent {
indent(builder, indentation);
}
append(builder, "(for ");
loop_iterator := node.token;
print_to_builder(builder, "% : ", loop_iterator.ident_value);
pretty_print_node(node.children[0], 0, builder);
append(builder, "..");
pretty_print_node(node.children[1], 0, builder);
append(builder, "\n");
pretty_print_node(node.children[2], indentation + 4, builder);
append(builder, ")");
}
print_expression_statement :: (node : *AST_Node, indentation : int, builder : *String_Builder, skip_indent := false) { print_expression_statement :: (node : *AST_Node, indentation : int, builder : *String_Builder, skip_indent := false) {
if !skip_indent { if !skip_indent {
indent(builder, indentation); indent(builder, indentation);
@@ -327,6 +369,32 @@ pretty_print_node :: (node : *AST_Node, indentation : int, builder : *String_Bui
case .If; { case .If; {
pretty_print_if(node, indentation, builder, skip_indent); pretty_print_if(node, indentation, builder, skip_indent);
} }
case .If_Directive; {
if !skip_indent {
indent(builder, indentation);
}
append(builder, "(#if ");
condition := node.children[0];
pretty_print_node(condition, 0, builder);
append(builder, "\n");
body := node.children[1];
// indent(builder,indentation + 4);
// append(builder, "(");
pretty_print_node(body, indentation + 4, builder);
// append(builder, ")");
if node.children.count == 3 { //@Note: Else branch
append(builder, "\n");
pretty_print_node(node.children[2], indentation + 4, builder);
}
append(builder, ")");
}
case .For; {
pretty_print_for(node, indentation, builder, skip_indent);
}
case .Struct; case .Struct;
case .ArgList; { case .ArgList; {
pretty_print_arglist(node, indentation + 2, builder, skip_indent); pretty_print_arglist(node, indentation + 2, builder, skip_indent);
@@ -346,6 +414,9 @@ pretty_print_node :: (node : *AST_Node, indentation : int, builder : *String_Bui
case .Binary; { case .Binary; {
pretty_print_binary(node, indentation, builder, skip_indent); pretty_print_binary(node, indentation, builder, skip_indent);
} }
case .Access; {
pretty_print_access(node, indentation, builder, skip_indent);
}
case .Unary; { case .Unary; {
pretty_print_unary(node, indentation, builder, skip_indent); pretty_print_unary(node, indentation, builder, skip_indent);
} }
@@ -410,23 +481,28 @@ pretty_print_declaration :: (declaration : *AST_Node, indentation : int, builder
append(builder, "pixel "); append(builder, "pixel ");
} }
if declaration.kind == .Properties { if declaration.kind == .If_Directive {
append(builder, "properties"); append(builder, "#if ");
if declaration.name.count > 0 {
print_to_builder(builder, " %", declaration.name);
} }
} else if declaration.kind == .Instance {
append(builder, "instance");
} else if declaration.kind == .Meta {
append(builder, "meta");
}
else {
if declaration.kind == .Struct { if declaration.kind == .Struct {
append(builder, "struct "); append(builder, "struct ");
} else if declaration.kind == .CBuffer { } else if declaration.kind == .CBuffer {
append(builder, "constant_buffer "); append(builder, "constant_buffer ");
} else if declaration.kind == .Buffer {
append(builder, "buffer ");
} }
print_to_builder(builder, "%", declaration.name); print_to_builder(builder, "%", declaration.name);
if declaration.kind == .CBuffer || declaration.kind == .Buffer{
for hint : declaration.hint_tokens {
if hint.string_value.count > 0 {
print_to_builder(builder, " (@%)", hint.string_value);
}
}
// if declaration.kind != .If_Directive {
// print_to_builder(builder, "%", declaration.name);
// }
} }
if declaration.kind == .Function && declaration.token.kind == .TOKEN_IDENTIFIER{ if declaration.kind == .Function && declaration.token.kind == .TOKEN_IDENTIFIER{
@@ -439,24 +515,32 @@ pretty_print_declaration :: (declaration : *AST_Node, indentation : int, builder
} }
if declaration.children.count > 0 { if declaration.children.count > 0 {
// print_to_builder(builder, "\n"); if declaration.kind == .If_Directive {
// if declaration.kind == .Function { pretty_print_node(declaration.children[0], 0, builder);
// field_list := declaration.children[0]; append(builder, "\n");
// pretty_print_fieldlist(field_list, indentation + 1, builder); pretty_print_node(declaration.children[1], indentation + 5, builder);
// append(builder, "\n");
// if declaration.children.count > 1 {
// body := declaration.children[1];
// pretty_print_node(body, indentation + 1, builder, true);
// }
// } else if declaration.kind == .Struct {
// pretty_print_node(declaration.children[0], indentation + 1, builder);
// } else {
// pretty_print_node(declaration.children[0], indentation + 1, builder);
// }
if declaration.children.count > 2 {
append(builder, "\n");
if declaration.children[2].kind == .If_Directive {
pretty_print_declaration(declaration.children[2], indentation + 5, builder);
} else {
pretty_print_node(declaration.children[2], indentation + 5, builder);
}
}
} else {
print_to_builder(builder, "\n"); print_to_builder(builder, "\n");
pretty_print_children(declaration, indentation + 1, builder, flags = .NewLine);
flags := Children_Print_Flags.NewLine;
if declaration.parent && declaration.parent.parent {
if declaration.parent.parent.kind == .If_Directive {
indent(builder, indentation - 1); //@Note: Hack the indent for now... Wow this is stupid, but it works!
}
}
pretty_print_children(declaration, indentation + 1, builder, flags = flags);
}
} }
append(builder, ")"); append(builder, ")");

2688
Check.jai Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -5,36 +5,24 @@
///////////////////////////////////// /////////////////////////////////////
//~ nbr: Codegen TODOs //~ nbr: Codegen TODOs
// //
// [ ] Prefix output of property values with __PROPERTIES so we don't get name clashes
Output_Language :: enum { Output_Language :: enum {
HLSL; HLSL;
GLSL; // @Incomplete GLSL; // @Incomplete
MLSL; // @Incomplete MLSL; // @Incomplete
// SPIRV; // @Incomplete: Should we do this?
} }
Codegen_State :: struct { Codegen_State :: struct {
path : string; path : string;
scope_stack : Scope_Stack;
current_scope : Scope_Handle; current_scope : Scope_Handle;
type_variables : []Type_Variable;
root : *AST_Node;
output_language : Output_Language; output_language : Output_Language;
builder : String_Builder; builder : String_Builder;
result : Codegen_Result; ctx : *Compiler_Context;
}
Codegen_Result :: struct {
messages : [..]Compiler_Message;
had_error : bool;
result_text : string; // @Incomplete(nb): Result for now, should likely be far more sophisticated.
} }
Reserved_HLSL_Words :: string.[ Reserved_HLSL_Words :: string.[
@@ -56,10 +44,7 @@ Reserved_GLSL_Words :: string.[
"" ""
]; ];
init_codegen_state :: (state : *Codegen_State, file : *Compiled_File, output_language : Output_Language) { init_codegen_state :: (state : *Codegen_State, ctx : *Compiler_Context, output_language : Output_Language) {
state.root = file.ast_root;
state.scope_stack = file.scope_stack;
state.type_variables = file.type_variables;
state.current_scope = cast(Scope_Handle)1; state.current_scope = cast(Scope_Handle)1;
state.output_language = output_language; state.output_language = output_language;
init_string_builder(*state.builder); init_string_builder(*state.builder);
@@ -69,7 +54,11 @@ indent :: (state : *Codegen_State, indentation : int) {
for 1..indentation append(*state.builder, " "); for 1..indentation append(*state.builder, " ");
} }
hlsl_type_to_string :: (type_variable : Type_Variable) -> string { hlsl_type_to_string :: (variables : []Type_Variable, type_handle : Type_Variable_Handle) -> string {
return hlsl_type_to_string(variables, from_handle(variables, type_handle));
}
hlsl_type_to_string :: (variables : []Type_Variable, type_variable : Type_Variable) -> string {
if type_variable.type == { if type_variable.type == {
case .Invalid; case .Invalid;
return "{{invalid}}"; return "{{invalid}}";
@@ -98,28 +87,21 @@ hlsl_type_to_string :: (type_variable : Type_Variable) -> string {
return type_variable.typename; return type_variable.typename;
} }
case .Array; case .Array;
return "array"; return hlsl_type_to_string(variables, type_variable.element_type);
} }
return ""; return "";
} }
emit_field :: (state : *Codegen_State, node : *AST_Node, indentation : int) { emit_field :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
find_result := find_symbol(state.scope_stack, node.name, state.current_scope); find_result := find_symbol(state.ctx.scope_stack, node.name, state.current_scope);
field := from_handle(state.type_variables, find_result.type_variable); field := from_handle(state.ctx.type_variables, find_result.type_variable);
indent(state, indentation); indent(state, indentation);
print_to_builder(*state.builder, "% ", hlsl_type_to_string(field)); print_to_builder(*state.builder, "% ", hlsl_type_to_string(state.ctx.type_variables, field));
if field.struct_field_parent {
parent_tv := from_handle(state.type_variables, field.struct_field_parent.type_variable);
if parent_tv.typename == "properties" {
append(*state.builder, "__PROPERTIES__");
}
}
print_to_builder(*state.builder, "%", node.name); print_to_builder(*state.builder, "%", node.name);
if field.type == .Sampler { if field.type == .Sampler {
@@ -133,41 +115,51 @@ emit_field :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
if node.children.count == 1 { if node.children.count == 1 {
child := node.children[0]; child := node.children[0];
if field.type == .Array {
append(*state.builder, "[");
emit_node(state, child, 0);
append(*state.builder, "]");
} else {
print_to_builder(*state.builder, " = "); print_to_builder(*state.builder, " = ");
emit_node(state, child, 0); emit_node(state, child, 0);
}
} }
if node.parent.kind == .Block { if node.parent.kind == .Block {
append(*state.builder, ";"); append(*state.builder, ";");
} }
for i :0..field.children.count - 1 { for i :0..field.children.count - 1 {
child := from_handle(state.type_variables, field.children[i]); child := from_handle(state.ctx.type_variables, field.children[i]);
emit_node(state, child.source_node, 0); emit_node(state, child.source_node, 0);
} }
for hint : node.hint_tokens { for hint : node.hint_tokens {
if hint.ident_value == "position" { if lookup_hint(hint.ident_value) == .Position {
// @Incomplete(nb): Should be a lookup table somewhere
append(*state.builder, " : POSITION"); append(*state.builder, " : POSITION");
} else if hint.ident_value == "uv" { } else if lookup_hint(hint.ident_value) == .UV {
append(*state.builder, " : TEXCOORD0"); append(*state.builder, " : TEXCOORD0");
} else if hint.ident_value == "outposition" { } else if lookup_hint(hint.ident_value) == .Output_Position {
append(*state.builder, " : SV_POSITION"); append(*state.builder, " : SV_POSITION");
} }
} }
} }
emit_block :: (state : *Codegen_State, node : *AST_Node, indentation : int) { emit_block :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
previous_scope := state.current_scope;
for statement : node.children { for statement : node.children {
if statement.type_variable {
state.current_scope = from_handle(state.ctx.type_variables, statement.type_variable).scope;
}
emit_node(state, statement, indentation); emit_node(state, statement, indentation);
if it_index < node.children.count { if it_index < node.children.count {
append(*state.builder, "\n"); append(*state.builder, "\n");
} }
} }
state.current_scope = previous_scope;
} }
emit_call :: (state : *Codegen_State, node : *AST_Node, indentation : int) { emit_call :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
@@ -229,59 +221,9 @@ emit_call :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
append(*state.builder, ")"); append(*state.builder, ")");
} }
emit_properties :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
find_result := find_symbol(state.scope_stack, ifx node.name.count > 0 then node.name else "properties", state.current_scope);
if !find_result {
message : Compiler_Message;
message.message_kind = .Internal_Error;
message.path = state.path;
message.message = "Attempting to generate undeclared properties buffer. This should never happen at this stage.";
array_add(*state.result.messages, message);
}
assert(find_result != null, "Attempting to generate undeclared properties buffer. This should never happen at this stage.");
variable := from_handle(state.type_variables, find_result.type_variable);
print_to_builder(*state.builder, "cbuffer __PROPERTIES : register(b%) \n{\n", variable.resource_index);
previous_scope := state.current_scope;
state.current_scope = variable.scope;
resources : Static_Array(*AST_Node, 8);
for child : node.children {
if child.kind == .FieldList {
for field : child.children {
tv := from_handle(state.type_variables, field.type_variable);
if tv.type == .Sampler || tv.type == .Texture2D {
array_add(*resources, field);
continue;
}
emit_node(state, field, 1);
append(*state.builder, ";\n");
}
}
}
append(*state.builder, "}\n\n");
for i : 0..resources.count - 1 {
resource := resources[i];
emit_node(state, resource, 0);
append(*state.builder, ";\n");
}
append(*state.builder, "\n");
state.current_scope = previous_scope;
}
emit_function :: (state : *Codegen_State, node : *AST_Node, indentation : int, emit_body := true) { emit_function :: (state : *Codegen_State, node : *AST_Node, indentation : int, emit_body := true) {
name := get_actual_function_name(node); name := get_actual_function_name(node);
find_result := find_symbol(state.scope_stack, name, state.current_scope); find_result := find_symbol(state.ctx.scope_stack, name, state.current_scope);
assert(find_result != null, "Attempting to generate undeclared function. This should never happen at this stage."); assert(find_result != null, "Attempting to generate undeclared function. This should never happen at this stage.");
if !find_result { if !find_result {
@@ -289,17 +231,17 @@ emit_function :: (state : *Codegen_State, node : *AST_Node, indentation : int, e
message.message_kind = .Internal_Error; message.message_kind = .Internal_Error;
message.path = state.path; message.path = state.path;
message.message = "Attempting to generate undeclared function. This should never happen at this stage."; message.message = "Attempting to generate undeclared function. This should never happen at this stage.";
array_add(*state.result.messages, message); array_add(*state.ctx.messages, message);
} }
for func : find_result.functions { for func : find_result.functions {
function_variable := from_handle(state.type_variables, func.type_variable); function_variable := from_handle(state.ctx.type_variables, func.type_variable);
indent(state, indentation); indent(state, indentation);
if function_variable.return_type_variable { if function_variable.return_type_variable {
return_variable := from_handle(state.type_variables, function_variable.return_type_variable); return_variable := from_handle(state.ctx.type_variables, function_variable.return_type_variable);
print_to_builder(*state.builder, "% ", hlsl_type_to_string(return_variable)); print_to_builder(*state.builder, "% ", hlsl_type_to_string(state.ctx.type_variables, return_variable));
} else { } else {
append(*state.builder, "void "); append(*state.builder, "void ");
} }
@@ -424,9 +366,6 @@ emit_node :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
} }
case .Float; { case .Float; {
print_to_builder(*state.builder, "%f", formatFloat(node.float_value, zero_removal=.ONE_ZERO_AFTER_DECIMAL)); print_to_builder(*state.builder, "%f", formatFloat(node.float_value, zero_removal=.ONE_ZERO_AFTER_DECIMAL));
}
case .Properties; {
} }
case .Field; { case .Field; {
emit_field(state, node, indentation); emit_field(state, node, indentation);
@@ -438,46 +377,57 @@ emit_node :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
case .Variable; { case .Variable; {
indent(*state.builder, indentation); indent(*state.builder, indentation);
type_var := from_handle(state.type_variables, node.type_variable); type_var := from_handle(state.ctx.type_variables, node.type_variable);
is_properties := type_var.typename == "properties";
if !is_properties {
if type_var.struct_field_parent {
parent_tv := from_handle(state.type_variables, type_var.struct_field_parent.type_variable);
if parent_tv.typename == "properties" {
append(*state.builder, "__PROPERTIES__");
}
}
print_to_builder(*state.builder, "%", node.name); print_to_builder(*state.builder, "%", node.name);
}
if node.children.count > 0 { if node.children.count > 0 {
if !is_properties {
append(*state.builder, "."); append(*state.builder, ".");
}
emit_node(state, node.children[0], 0); emit_node(state, node.children[0], 0);
} }
} }
case .Access; {
indent(*state.builder, indentation);
lhs := node.children[0];
rhs := node.children[1];
emit_node(state, lhs, 0);
print_to_builder(*state.builder, "%.", node.name);
emit_node(state, rhs, 0);
}
case .Binary; { case .Binary; {
indent(*state.builder, indentation); indent(*state.builder, indentation);
if node.token.kind != .TOKEN_ASSIGN { if node.token.kind != .TOKEN_ASSIGN && node.token.kind != .TOKEN_LEFTBRACKET {
if (node.parent.kind == .Binary && node.parent.token.kind != .TOKEN_ASSIGN) || node.parent.kind == .Access {
append(*state.builder, "("); append(*state.builder, "(");
} }
}
lhs := node.children[0]; lhs := node.children[0];
rhs := node.children[1]; rhs := node.children[1];
emit_node(state, lhs, 0);
if node.token.kind == .TOKEN_LEFTBRACKET {
emit_node(state, lhs, 0);
append(*state.builder, "[");
emit_node(state, rhs, 0);
append(*state.builder, "]");
} else {
emit_node(state, lhs, 0);
append(*state.builder, " "); append(*state.builder, " ");
emit_operator(state, node.token.kind); emit_operator(state, node.token.kind);
append(*state.builder, " "); append(*state.builder, " ");
emit_node(state, rhs, 0); emit_node(state, rhs, 0);
if node.token.kind != .TOKEN_ASSIGN { }
if node.token.kind != .TOKEN_ASSIGN && node.token.kind != .TOKEN_LEFTBRACKET {
if (node.parent.kind == .Binary && node.parent.token.kind != .TOKEN_ASSIGN) || node.parent.kind == .Access {
append(*state.builder, ")"); append(*state.builder, ")");
} }
} }
}
case .Unary; { case .Unary; {
indent(*state.builder, indentation); indent(*state.builder, indentation);
@@ -497,6 +447,26 @@ emit_node :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
emit_node(state, node.children[0], 0); emit_node(state, node.children[0], 0);
append(*state.builder, ";"); append(*state.builder, ";");
} }
case .For; {
if node.parent.kind != .For {
indent(*state.builder, indentation);
}
append(*state.builder, "for ");
loop_ident := node.token.ident_value;
begin_val := node.children[0].integer_value;
end_val := node.children[1].integer_value;
print_to_builder(*state.builder, "(int % = %; % < %; %++)\n", loop_ident, begin_val, loop_ident, end_val, loop_ident);
indent(*state.builder, indentation);
append(*state.builder, "{\n");
emit_block(state, node.children[2], indentation + 1);
indent(*state.builder, indentation);
append(*state.builder, "}\n");
}
case .If; { case .If; {
if node.parent.kind != .If { if node.parent.kind != .If {
indent(*state.builder, indentation); indent(*state.builder, indentation);
@@ -505,7 +475,9 @@ emit_node :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
append(*state.builder, "if "); append(*state.builder, "if ");
cond := node.children[0]; cond := node.children[0];
append(*state.builder, "(");
emit_node(state, cond, 0); emit_node(state, cond, 0);
append(*state.builder, ")");
body := node.children[1]; body := node.children[1];
append(*state.builder, "\n"); append(*state.builder, "\n");
@@ -552,11 +524,16 @@ emit_field_list :: (state : *Codegen_State, field_list : *AST_Node, indentation
} }
} }
emit_struct :: (state : *Codegen_State, node : *AST_Node, indentation : int) { emit_struct :: (state : *Codegen_State, node : *AST_Node, indentation : int, name : string = "") {
if name.count > 0 {
print_to_builder(*state.builder, "struct %", name);
} else {
print_to_builder(*state.builder, "struct %", node.name); print_to_builder(*state.builder, "struct %", node.name);
}
current_scope := state.current_scope; current_scope := state.current_scope;
state.current_scope = from_handle(state.type_variables, node.type_variable).scope; state.current_scope = from_handle(state.ctx.type_variables, node.type_variable).scope;
field_list := node.children[0]; field_list := node.children[0];
@@ -573,11 +550,11 @@ emit_struct :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
} }
emit_cbuffer :: (state : *Codegen_State, node : *AST_Node, indentation : int) { emit_cbuffer :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
variable := from_handle(state.type_variables, node.type_variable); variable := from_handle(state.ctx.type_variables, node.type_variable);
print_to_builder(*state.builder, "cbuffer % : register(b%)", variable.name, variable.resource_index); print_to_builder(*state.builder, "cbuffer % : register(b%)", variable.name, variable.resource_index);
current_scope := state.current_scope; current_scope := state.current_scope;
state.current_scope = from_handle(state.type_variables, node.type_variable).scope; state.current_scope = from_handle(state.ctx.type_variables, node.type_variable).scope;
field_list := node.children[0]; field_list := node.children[0];
@@ -593,43 +570,63 @@ emit_cbuffer :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
state.current_scope = current_scope; state.current_scope = current_scope;
} }
emit_buffer :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
variable := from_handle(state.ctx.type_variables, node.type_variable);
element := from_handle(state.ctx.type_variables, variable.element_type);
emit_struct(state, node, indentation, element.typename);
print_to_builder(*state.builder, "StructuredBuffer<%> % : register(t%);\n\n", element.typename, variable.name, variable.resource_index);
}
emit_declaration :: (state : *Codegen_State, node : *AST_Node) { emit_declaration :: (state : *Codegen_State, node : *AST_Node) {
if node.kind == { if node.kind == {
case .Function; { case .Function; {
emit_function(state, node, 0); emit_function(state, node, 0);
} }
case .Properties; {
emit_properties(state, node, 0);
}
case .CBuffer; { case .CBuffer; {
emit_cbuffer(state, node, 0); emit_cbuffer(state, node, 0);
} }
case .Buffer; {
emit_buffer(state, node, 0);
}
case .Struct; { case .Struct; {
emit_struct(state, node, 0); emit_struct(state, node, 0);
} }
} }
} }
codegen :: (result : *Compile_Result) { codegen :: (result : *Compiler_Context, allocator := temp) {
codegen(result, .HLSL);
}
codegen :: (result : *Compiler_Context, output_language : Output_Language, allocator := temp) {
if result.had_error { if result.had_error {
return; return;
} }
for *file : result.files { new_context := context;
new_context.allocator = allocator;
push_context new_context {
init_context_allocators();
defer clear_context_allocators();
state : Codegen_State; state : Codegen_State;
init_codegen_state(*state, file, .HLSL); state.ctx = result;
state.current_scope = cast(Scope_Handle)1;
state.output_language = output_language;
init_string_builder(*state.builder);
codegen_result := codegen(*state); codegen(*state);
file.codegen_result_text = copy_string(codegen_result.result_text);
} }
} }
codegen :: (state : *Codegen_State) -> Codegen_Result { #scope_file
codegen :: (state : *Codegen_State) {
found_function : bool = false; found_function : bool = false;
// found_struct : bool = false; // found_struct : bool = false;
// for variable : state.type_variables { // for variable : state.ctx.type_variables {
// if variable.type == .Struct && variable.kind == .Declaration && !variable.builtin { // if variable.type == .Struct && variable.kind == .Declaration && !variable.builtin {
// if variable.source_node.kind == .Properties continue; // if variable.source_node.kind == .Properties continue;
// if variable.source_node.kind == .Meta continue; // if variable.source_node.kind == .Meta continue;
@@ -642,7 +639,7 @@ codegen :: (state : *Codegen_State) -> Codegen_Result {
// append(*state.builder, "\n"); // append(*state.builder, "\n");
// } // }
for variable : state.type_variables { for variable : state.ctx.type_variables {
if variable.type == .Function && !variable.builtin if variable.type == .Function && !variable.builtin
&& !variable.source_node.vertex_entry_point && !variable.source_node.pixel_entry_point { && !variable.source_node.vertex_entry_point && !variable.source_node.pixel_entry_point {
emit_function(state, variable.source_node, 0, false); emit_function(state, variable.source_node, 0, false);
@@ -653,22 +650,14 @@ codegen :: (state : *Codegen_State) -> Codegen_Result {
append(*state.builder, "\n"); append(*state.builder, "\n");
} }
for declaration : state.root.children { for declaration : state.ctx.root.children {
if declaration.foreign_declaration { if declaration.foreign_declaration {
continue; continue;
} }
emit_declaration(state, declaration); emit_declaration(state, declaration);
} }
state.result.result_text = builder_to_string(*state.builder); state.ctx.codegen_result_text = builder_to_string(*state.builder);
return state.result;
}
codegen :: (file : *Compiled_File, output_language : Output_Language) -> Codegen_Result {
codegen_state : Codegen_State;
init_codegen_state(*codegen_state, file, output_language);
return codegen(*codegen_state);
} }
#scope_module #scope_module

View File

@@ -102,20 +102,20 @@ copy_messages :: (source : []Compiler_Message, dest : *[..]Compiler_Message) {
} }
} }
report_messages :: (messages : []Compiler_Message) -> string { report_messages :: (ctx : *Compiler_Context, messages : []Compiler_Message) -> string {
builder : String_Builder; builder : String_Builder;
init_string_builder(*builder); init_string_builder(*builder);
for message : messages { for message : messages {
report_message(*builder, message); report_message(ctx, *builder, message);
} }
return builder_to_string(*builder); return builder_to_string(*builder);
} }
report_message :: (builder : *String_Builder, message : Compiler_Message) { report_message :: (ctx : *Compiler_Context, builder : *String_Builder, message : Compiler_Message) {
report_message(builder, message.path, message.message, message.source_locations, message.message_kind, message.report_source_location); report_message(ctx, builder, message.path, message.message, message.source_locations, message.message_kind, message.report_source_location);
} }
report_message :: (builder : *String_Builder, path : string, message : string, source_locations : []Source_Range, kind : Message_Kind, report_source_location : bool = false) { report_message :: (ctx : *Compiler_Context, builder : *String_Builder, path : string, message : string, source_locations : []Source_Range, kind : Message_Kind, report_source_location : bool = false) {
append(builder, "\x1b[1;37m"); append(builder, "\x1b[1;37m");
if path.count > 0 { if path.count > 0 {
print_to_builder(builder, "%:", path); print_to_builder(builder, "%:", path);
@@ -140,7 +140,7 @@ report_message :: (builder : *String_Builder, path : string, message : string, s
if report_source_location { if report_source_location {
for location : source_locations { for location : source_locations {
append(builder, "\t"); append(builder, "\t");
print_from_source_location(builder, location); print_from_source_location(ctx, builder, location);
append(builder, "\n\t"); append(builder, "\n\t");
begin := location.begin; begin := location.begin;

768
Ink.jai Normal file
View File

@@ -0,0 +1,768 @@
/////////////////////////////////////
/*~ nbr: General improvements
- [x] Print out all failed tests in a list at the end
- [x] Use new compiler API with Compile_Result and Compiled_File instead
- [ ] Use unix (posix? bash? ascii?) color codes for errors
- [ ] Print golden file as green and new output as red
- [ ] Rename to Ink.jai
- [ ] Add -test option. -test does the same as test.exe used to do
- [ ] Add -fuzz option to run fuzzer (add args later)
- [ ] Add -output option to output the compiled file. Issue with this is the generated data can't be output like that. Would require serialization.
*/
#import "Basic";
#import "File";
#import "String";
#import "File_Utilities";
#import "Print_Color";
#load "module.jai";
GOLDEN_EXTENSION :: "golden";
LEXER_FOLDER :: "lex";
PARSER_FOLDER :: "parse";
CODEGEN_FOLDER :: "codegen";
COMPILED_FOLDER :: "compiled";
CHECK_FOLDER :: "check";
TESTS_FOLDER :: "test";
SHADER_EXTENSION :: "ink";
SUITE_EXTENSION :: "suite";
Stage_Flags :: enum_flags u16 {
Lexer :: 0x1;
Parser :: 0x2;
Check :: 0x4;
Codegen :: 0x8;
Compile :: 0x10;
}
Output_Type :: enum_flags u16 {
Golden :: 0x1;
StdOut :: 0x2;
}
Result_Type :: enum {
File_Read_Failed;
Golden_File_Not_Found;
StdOut;
Golden_Output;
Passed;
Failed;
}
Result :: struct {
type : Result_Type;
path : string;
stage : Stage_Flags;
golden_path : string;
info_text : string;
}
Test_Case :: struct {
path : string;
stage_flags : Stage_Flags;
}
Test_Suite :: struct {
name : string;
test_cases : [..]Test_Case;
results : [..]Result;
}
get_golden_path :: (file_path : string, stage : Stage_Flags) -> string {
sc := get_scratch();
defer scratch_end(sc);
path := parse_path(file_path,, sc.allocator);
file_without_extension := split(path.words[path.words.count - 1], ".",, sc.allocator);
builder : String_Builder;
builder.allocator = temp;
final_path_length := file_path.count - SHADER_EXTENSION.count + GOLDEN_EXTENSION.count + 1; // +1 for dot
path.words.count -= 1;
path.words.allocator = sc.allocator;
if stage == {
case .Lexer; {
dir := tprint("%/%", TESTS_FOLDER, LEXER_FOLDER);
make_directory_if_it_does_not_exist(dir);
array_add(*path.words, LEXER_FOLDER);
}
case .Parser; {
dir := tprint("%/%", TESTS_FOLDER, PARSER_FOLDER);
make_directory_if_it_does_not_exist(dir);
array_add(*path.words, PARSER_FOLDER);
}
case .Check; {
dir := tprint("%/%", TESTS_FOLDER, CHECK_FOLDER);
make_directory_if_it_does_not_exist(dir);
array_add(*path.words, CHECK_FOLDER);
}
case .Codegen; {
dir := tprint("%/%", TESTS_FOLDER, CODEGEN_FOLDER);
make_directory_if_it_does_not_exist(dir);
array_add(*path.words, CODEGEN_FOLDER);
}
case .Compile; {
dir := tprint("%/%", TESTS_FOLDER, COMPILED_FOLDER);
make_directory_if_it_does_not_exist(dir);
array_add(*path.words, COMPILED_FOLDER);
}
}
init_string_builder(*builder, file_without_extension.count + GOLDEN_EXTENSION.count + 1);
builder.allocator = sc.allocator;
append(*builder, file_without_extension[0]);
append(*builder, ".");
append(*builder, GOLDEN_EXTENSION);
golden_path := builder_to_string(*builder,, sc.allocator);
array_add(*path.words, golden_path);
final_path := path_to_string(path);
return final_path;
}
do_golden_comparison :: (golden_path : string, comparison_text : string, result : *Result, output_type : Output_Type) {
sc := get_scratch();
defer scratch_end(sc);
if output_type & .Golden {
// Output the comparison file
write_entire_file(golden_path, comparison_text);
result.golden_path = copy_string(golden_path);
result.type = .Golden_Output;
return;
} else {
// Do the comparison
if !file_exists(golden_path) {
result.info_text = tprint("Golden file % does not exist. Please run with -output-as-golden at least once.\n", golden_path);
result.type = .Golden_File_Not_Found;
return;
}
golden_text, ok := read_entire_file(golden_path,, sc.allocator);
if !ok {
result.info_text = tprint("Unable to open golden file %\n", golden_path);
result.type = .Golden_File_Not_Found;
return;
}
comp := replace(comparison_text, "\r\n", "\n",, sc.allocator);
gold := replace(golden_text, "\r\n", "\n",, sc.allocator);
ok = compare(comp, gold) == 0;
if !ok {
result.type = .Failed;
result.info_text = tprint("Golden file:\n%\n===============\n%", gold, comp);
} else {
result.type = .Passed;
}
}
}
run_codegen_test :: (file_path : string, ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
result : Result;
result.path = file_path;
lex(ctx, context.allocator);
parse(ctx, context.allocator);
check(ctx, context.allocator);
if ctx.had_error {
result.type = .Failed;
return result;
}
result = run_codegen_test(ctx, output_type);
return result;
}
run_codegen_test :: (ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
result : Result;
result.path = ctx.file.path;
result_text : string;
codegen(ctx, context.allocator);
if ctx.had_error {
result.type = .Failed;
result_text = report_messages(ctx, ctx.messages);
return result;
}
result_text = ctx.codegen_result_text;
if output_type & .StdOut {
result.info_text = result_text;
result.type = .StdOut;
return result;
}
golden_path := get_golden_path(ctx.file.path, .Codegen);
do_golden_comparison(golden_path, result_text, *result, output_type);
return result;
}
run_compile_test :: (path : string, output_type : Output_Type = 0) -> Result, Compiler_Context {
ctx : Compiler_Context;
result : Result;
result.path = path;
compile_file(*ctx, path, context.allocator);
if ctx.had_error {
result.type = .Failed;
result.info_text = tprint("Failed compiling: %\n", path);
} else {
sc := get_scratch();
defer scratch_end(sc);
sb : String_Builder;
init_string_builder(*sb,, sc.allocator);
if ctx.vertex_entry_point.name.count > 0 {
print_to_builder(*sb, "[vertex entry point] - %\n", ctx.vertex_entry_point.name);
}
if ctx.pixel_entry_point.name.count > 0 {
print_to_builder(*sb, "[pixel entry point] - %\n", ctx.pixel_entry_point.name);
}
for buf : ctx.buffers {
if buf.kind == {
case .Constant; {
print_to_builder(*sb, "[constant_buffer] - % - %", buf.name, buf.buffer_index);
}
case .Structured; {
print_to_builder(*sb, "[buffer] - % - %", buf.name, buf.buffer_index);
}
if buf.hints.count > 0 {
for hint : buf.hints {
print_to_builder(*sb, " (@%)", hint.custom_hint_name);
}
}
append(*sb, "\n");
indent(*sb, 1);
for field : buf.fields {
append(*sb, "[field] - ");
pretty_print_field(*sb, *field);
append(*sb, "\n");
indent(*sb, 1);
}
}
}
result.info_text = builder_to_string(*sb);
}
if output_type & .StdOut {
result.type = .StdOut;
return result, ctx;
}
golden_path := get_golden_path(ctx.file.path, .Compile);
do_golden_comparison(golden_path, result.info_text, *result, output_type);
return result, ctx;
}
run_lexer_test :: (file_path : string, ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
result : Result;
result.path = file_path;
result.stage = .Lexer;
result_text : string;
lex(ctx);
if ctx.had_error {
result.type = .Failed;
result_text = report_messages(ctx, ctx.messages);
} else {
result_text = pretty_print_tokens(ctx.tokens, context.allocator);
}
if output_type & .StdOut {
result.info_text = result_text;
result.type = .StdOut;
return result;
}
golden_path := get_golden_path(file_path, .Lexer);
do_golden_comparison(golden_path, result_text, *result, output_type);
return result;
}
run_parser_test :: (file_path : string, ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
result : Result;
result.path = file_path;
lex(ctx);
if ctx.had_error {
result.type = .Passed;
return result;
}
result = run_parser_test(ctx, output_type);
return result;
}
run_parser_test :: (ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
parse(ctx, context.allocator);
result : Result;
result.path = ctx.file.path;
result_text : string;
if ctx.had_error {
result.type = .Failed;
result_text = report_messages(ctx, ctx.messages);
} else {
result_text = pretty_print_ast(ctx.root, context.allocator);
}
if output_type & .StdOut {
result.info_text = result_text;
result.type = .StdOut;
return result;
}
golden_path := get_golden_path(ctx.file.path, .Parser);
do_golden_comparison(golden_path, result_text, *result, output_type);
return result;
}
run_check_test :: (ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
result : Result;
result.path = ctx.file.path;
result_text : string;
check(ctx, context.allocator);
if ctx.had_error {
result.type = .Failed;
result_text = report_messages(ctx, ctx.messages);
} else {
result_text = pretty_print_symbol_table(ctx, context.allocator);
}
if output_type & .StdOut {
result.info_text = result_text;
result.type = .StdOut;
return result;
}
golden_path := get_golden_path(ctx.file.path, .Check);
do_golden_comparison(golden_path, result_text, *result, output_type);
return result;
}
run_check_test :: (file_path : string, ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
result : Result;
result.path = file_path;
lex(ctx, context.allocator);
parse(ctx, context.allocator);
if ctx.had_error {
result.type = .Failed;
return result;
}
result = run_check_test(ctx, output_type);
return result;
}
make_test_case :: (path : string, stage_flags : Stage_Flags, allocator := context.allocator) -> Test_Case {
test_case : Test_Case;
test_case.path = copy_string(path,, allocator);
replace_chars(test_case.path, "\\", #char "/");
test_case.stage_flags = stage_flags;
return test_case;
}
run_test_new :: (file_path : string, stage_flags : Stage_Flags, results : *[..]Result, output_type : Output_Type = 0, allocator := temp) {
new_context := context;
new_context.allocator = allocator;
push_context new_context {
ctx : Compiler_Context;
ctx.file = make_file(*ctx, file_path);
result : Result;
if stage_flags & .Lexer {
result = run_lexer_test(file_path, *ctx, output_type);
record_result(results, result);
}
if stage_flags & .Parser {
if stage_flags & .Lexer && result.type == .Passed || result.type == .Golden_Output {
result = run_parser_test(*ctx, output_type);
} else {
result = run_parser_test(file_path, *ctx, output_type);
}
record_result(results, result);
}
if stage_flags & .Check {
if stage_flags & .Parser && (result.type == .Passed || result.type == .Golden_Output) {
result = run_check_test(*ctx, output_type);
} else {
result = run_check_test(file_path, *ctx, output_type);
}
record_result(results, result);
}
if stage_flags & .Codegen {
if stage_flags & .Check && (result.type == .Passed || result.type == .Golden_Output) {
result = run_codegen_test(*ctx, output_type);
} else {
result = run_codegen_test(file_path, *ctx, output_type);
}
record_result(results, result);
}
if stage_flags & .Compile {
result = run_compile_test(file_path, output_type);
record_result(results, result);
}
}
}
run_test :: (test_case : Test_Case, results : *[..]Result, output_type : Output_Type = 0, allocator := temp) {
print("%Running test: %......", cyan(), test_case.path);
// path 30
// len 35
// == 5
// path 20
// len = 35
// == 15
len := 50;
rest := len - test_case.path.count;
for i: 0..rest {
print(" ");
}
run_test_new(test_case.path, test_case.stage_flags, results, output_type, allocator);
}
record_result :: (results : *[..]Result, result : Result) {
array_add(results, result);
}
run_test_suite :: (using suite : *Test_Suite, output_type : Output_Type = 0) {
if suite.name.count > 0 {
print("%Running suite: %\n", green(), suite.name);
print("%", reset_color());
}
Fail_Data :: struct {
path : string;
stage : string;
}
test_arena : Allocator = make_arena(Gigabytes(1));
failed_test_paths : [..]Fail_Data;
failed_test_paths.allocator = test_arena;
builder : String_Builder;
init_string_builder(*builder,, test_arena);
for test_case : test_cases {
run_test(test_case, *suite.results, output_type, allocator = test_arena);
for < suite.results {
result := suite.results[it_index];
if compare(result.path, test_case.path) == 0 {
if result.type == {
case .Failed; {
array_add(*failed_test_paths, .{ result.path, stage_to_string(result.stage) });
}
case .File_Read_Failed; {
array_add(*failed_test_paths, .{ result.path, "file not found" });
}
case .Golden_File_Not_Found; {
array_add(*failed_test_paths, .{ result.path, tprint("golden file not found for %", stage_to_string(result.stage)) });
}
}
evaluate_result(result);
} else {
break;
}
}
// print("\n");
}
append(*builder, "\n");
if output_type == 0 {
if failed_test_paths.count == 0 {
green(*builder);
print_to_builder(*builder, "All % tests passed!\n", test_cases.count);
reset_color(*builder);
} else {
print_to_builder(*builder, "%/% tests passed\n", test_cases.count - failed_test_paths.count, test_cases.count);
red(*builder);
print_to_builder(*builder, "% failed\n", failed_test_paths.count);
for failed_test : failed_test_paths {
print_to_builder(*builder, "% failed with error: %\n", failed_test.path, failed_test.stage);
}
reset_color(*builder);
}
}
print("%\n", builder_to_string(*builder,, test_arena));
}
read_suite :: (file_path : string, suite : *Test_Suite, allocator := temp) -> bool {
sc := get_scratch();
defer scratch_end(sc);
bytes, ok := read_entire_file(file_path,, sc.allocator);
if !ok {
log_error("Unable to read suite file %\n", file_path);
return false;
}
path := parse_path(file_path,, sc.allocator);
file_without_extension := split(path.words[path.words.count - 1], ".",, sc.allocator);
suite.name = copy_string(file_without_extension[0],, allocator);
split_lines := split(bytes, "\n",, sc.allocator);
for split_line : split_lines {
if split_line.count == 0 {
break;
}
if split_line[0] == #char "#" {
continue;
}
line := split(split_line, " ",, sc.allocator);
if line[0].count == 0 {
continue;
}
if line[0].data[0] == #char "#" {
continue;
}
if line.count == 1 {
line = split(split_line, "\t",, sc.allocator);
if line.count == 1 {
log_error("Invalid line - % - \n", it_index + 1);
continue;
}
}
test_case_path := line[0];
stage_flags : Stage_Flags;
for i: 0..line.count - 1 {
trimmed := trim(line[i]);
if equal(trimmed, "lex") {
stage_flags |= .Lexer;
} else if equal(trimmed, "parse") {
stage_flags |= .Parser;
} else if equal(trimmed, "check") {
stage_flags |= .Check;
} else if equal(trimmed, "codegen") {
stage_flags |= .Codegen;
} else if equal(trimmed, "compile") {
stage_flags |= .Compile;
}
}
test_case := make_test_case(test_case_path, stage_flags, allocator);
array_add(*suite.test_cases, test_case);
}
return true;
}
read_test :: () {
}
stage_to_string :: (stage : Stage_Flags) -> string {
if #complete stage == {
case .Lexer; return "lexing";
case .Parser; return "parsing";
case .Check; return "checking";
case .Codegen; return "codegen";
case .Compile; return "compiled";
case; return "";
}
}
evaluate_result :: (result : Result) {
stage : string = stage_to_string(result.stage);
if #complete result.type == {
case .File_Read_Failed; {
print(" %", red());
print("failed with File_Read_Failed\n");
}
case .Golden_File_Not_Found; {
print(" %", red());
print("failed with Golden File Not Found for stage %\n", stage);
}
case .StdOut; {
}
case .Golden_Output; {
print(" %", yellow());
print("output new golden file at %\n", result.golden_path);
}
case .Passed; {
print(" %", green());
print("passed %\n", stage);
}
case .Failed; {
print(" %", red());
print("failed %\n", stage);
}
}
if result.info_text.count > 0 {
print("%", cyan());
print("--- Info text ---\n");
print("%", yellow());
print("%\n", result.info_text);
}
print("%", reset_color());
}
main :: () {
args := get_command_line_arguments();
init_context_allocators();
local_temp := make_arena(Megabytes(128));
suites : [..]Test_Suite;
suites.allocator = local_temp;
output_type : Output_Type = 0;
Argument_Parse_State :: enum {
None;
Compile;
Run_Suite;
Run_Test;
}
arg_parse_state : Argument_Parse_State;
current_suite : *Test_Suite;
path : string;
for i: 1..args.count - 1 {
arg := args[i];
if arg == "-output-as-golden" {
output_type |= .Golden;
continue;
} else if arg == "-output" {
output_type |= .StdOut;
continue;
}
if arg_parse_state == {
case .Run_Suite; {
if arg == "-output-as-golden" {
output_type |= .Golden;
} else if arg == "-output" {
output_type |= .StdOut;
} else {
print("%Unknown argument % %\n", red(), arg, reset_color());
}
}
case .Run_Test; {
cases := current_suite.test_cases.count;
if arg == "-lex" {
current_suite.test_cases[cases - 1].stage_flags |= .Lexer;
} else if arg == "-parse" {
current_suite.test_cases[cases - 1].stage_flags |= .Parser;
} else if arg == "-check" {
current_suite.test_cases[cases - 1].stage_flags |= .Check;
} else if arg == "-codegen" {
current_suite.test_cases[cases - 1].stage_flags |= .Codegen;
} else if arg == "-compile" {
current_suite.test_cases[cases - 1].stage_flags |= .Compile;
} else if contains(arg, ".") {
sc := get_scratch();
defer scratch_end(sc);
path_split := split(arg, "\\",, sc.allocator);
split_path := split(path_split[path_split.count - 1], ".",, sc.allocator);
extension := split_path[1];
if extension == SHADER_EXTENSION {
path := copy_string(arg,, local_temp);
test_case := make_test_case(path, 0, local_temp);
array_add(*current_suite.test_cases, test_case);
} else {
print("%Invalid file as argument % %\n", red(), arg, reset_color());
}
} else {
print("%Unknown argument % %\n", red(), arg, reset_color());
}
}
case .None; {
if contains(arg, ".") {
sc := get_scratch();
defer scratch_end(sc);
path_split := split(arg, "\\",, sc.allocator);
split_path := split(path_split[path_split.count - 1], ".",, sc.allocator);
extension := split_path[1];
if extension == SHADER_EXTENSION {
if arg_parse_state == .Run_Suite {
log_error("Unable to run a test while already running suite.");
continue;
}
if !current_suite {
suite : Test_Suite;
suite.results.allocator = local_temp;
suite.test_cases.allocator = local_temp;
array_add(*suites, suite);
current_suite = *suites[0];
}
arg_parse_state = .Run_Test;
path := copy_string(arg,, local_temp);
test_case := make_test_case(path, 0, local_temp);
array_add(*current_suite.test_cases, test_case);
} else if extension == SUITE_EXTENSION {
if arg_parse_state == .Run_Test {
log_error("Unable to run a suite while already running test.");
continue;
}
arg_parse_state = .Run_Suite;
path := copy_string(arg);
suite : Test_Suite;
suite.results.allocator = local_temp;
suite.test_cases.allocator = local_temp;
read_suite(path, *suite, local_temp);
array_add(*suites, suite);
current_suite = *suites[0];
} else {
print("%Invalid file as argument % %\n", red(), arg, reset_color());
}
}
}
}
}
for suite : suites {
run_test_suite(*suite, output_type);
}
clear(local_temp);
}

View File

@@ -5,18 +5,13 @@ Lexer :: struct {
current_line : int; current_line : int;
current_column : int; current_column : int;
result : Lexing_Result; ctx : *Compiler_Context;
path : string; path : string;
} }
Lexing_Result :: struct {
tokens : [..]Token;
had_error : bool;
messages : [..]Compiler_Message;
}
Token_Kind :: enum { Token_Kind :: enum {
TOKEN_INVALID :: 0;
TOKEN_FLOATLITERAL; TOKEN_FLOATLITERAL;
TOKEN_INTLITERAL; TOKEN_INTLITERAL;
@@ -54,11 +49,13 @@ Token_Kind :: enum {
TOKEN_SEMICOLON; TOKEN_SEMICOLON;
TOKEN_COMMA; TOKEN_COMMA;
TOKEN_DOT; TOKEN_DOT;
TOKEN_DOTDOT;
TOKEN_IDENTIFIER; TOKEN_IDENTIFIER;
// Keywords // Keywords
TOKEN_BOOL; TOKEN_BOOL;
TOKEN_BUFFER;
TOKEN_CASE; TOKEN_CASE;
TOKEN_CBUFFER; TOKEN_CBUFFER;
@@ -95,11 +92,12 @@ Token_Kind :: enum {
TOKEN_OUT; TOKEN_OUT;
TOKEN_PIXEL; TOKEN_PIXEL;
TOKEN_PROPERTIES; TOKEN_PLEX;
TOKEN_RETURN; TOKEN_RETURN;
TOKEN_REGISTER; TOKEN_REGISTER;
TOKEN_STRING;
TOKEN_STRUCT; TOKEN_STRUCT;
TOKEN_SWITCH; TOKEN_SWITCH;
@@ -137,6 +135,8 @@ Token :: struct {
index : int; index : int;
error : string; error : string;
builtin : bool; // @Incomplete: This is kind of a bad idea, but let's just do it for now...
} }
Source_Range :: struct { Source_Range :: struct {
@@ -217,10 +217,11 @@ identifier_kind :: (using lexer : *Lexer) -> Token_Kind {
identifier.count = length; identifier.count = length;
if identifier == "bool" return .TOKEN_BOOL; if identifier == "bool" return .TOKEN_BOOL;
if identifier == "Buffer" return .TOKEN_BUFFER;
if identifier == "case" return .TOKEN_CASE; if identifier == "case" return .TOKEN_CASE;
if identifier == "columnmajor" return .TOKEN_COLUMNMAJOR; if identifier == "columnmajor" return .TOKEN_COLUMNMAJOR;
if identifier == "const" return .TOKEN_CONST; if identifier == "const" return .TOKEN_CONST;
if identifier == "constant_buffer" return .TOKEN_CONSTANT_BUFFER; if identifier == "Constant_Buffer" return .TOKEN_CONSTANT_BUFFER;
if identifier == "continue" return .TOKEN_CONTINUE; if identifier == "continue" return .TOKEN_CONTINUE;
if identifier == "default" return .TOKEN_DEFAULT; if identifier == "default" return .TOKEN_DEFAULT;
if identifier == "directive" return .TOKEN_DIRECTIVE; if identifier == "directive" return .TOKEN_DIRECTIVE;
@@ -244,10 +245,10 @@ identifier_kind :: (using lexer : *Lexer) -> Token_Kind {
if identifier == "optional" return .TOKEN_OPTIONAL; if identifier == "optional" return .TOKEN_OPTIONAL;
if identifier == "out" return .TOKEN_OUT; if identifier == "out" return .TOKEN_OUT;
if identifier == "pixel" return .TOKEN_PIXEL; if identifier == "pixel" return .TOKEN_PIXEL;
if identifier == "properties" return .TOKEN_PROPERTIES;
if identifier == "return" return .TOKEN_RETURN; if identifier == "return" return .TOKEN_RETURN;
if identifier == "register" return .TOKEN_REGISTER; if identifier == "register" return .TOKEN_REGISTER;
if identifier == "struct" return .TOKEN_STRUCT; if identifier == "struct" return .TOKEN_STRUCT;
if identifier == "plex" return .TOKEN_STRUCT;
if identifier == "switch" return .TOKEN_SWITCH; if identifier == "switch" return .TOKEN_SWITCH;
if identifier == "true" return .TOKEN_TRUE; if identifier == "true" return .TOKEN_TRUE;
if identifier == "unorm" return .TOKEN_UNORM; if identifier == "unorm" return .TOKEN_UNORM;
@@ -264,12 +265,32 @@ identifier_kind :: (using lexer : *Lexer) -> Token_Kind {
error_token :: (lexer : *Lexer, message : string) -> *Token { error_token :: (lexer : *Lexer, message : string) -> *Token {
token : *Token = new_token(lexer, .TOKEN_ERROR); token : *Token = new_token(lexer, .TOKEN_ERROR);
lexer.result.had_error = true; lexer.ctx.had_error = true;
token.error = copy_string(message); token.error = copy_string(message);
return token; return token;
} }
// unable_to_open_file :: (state : *Parse_State, path : string, token : Token) {
// builder : String_Builder;
// init_string_builder(*builder,, temp);
// print_to_builder(*builder, "Unable to open file '%' for reading\n\n", path);
// location := generate_source_location_from_token(state, token);
// indent(*builder, 1);
// cyan(*builder);
// print_to_builder(*builder, "%\n", print_from_source_location(location));
// indent(*builder, 1);
// loc := location.begin;
// print_token_pointer(*builder, loc);
// final_message := builder_to_string(*builder);
// record_error(state, token, final_message, false);
// }
record_error :: (lexer : *Lexer, message : string) { record_error :: (lexer : *Lexer, message : string) {
error : Compiler_Message; error : Compiler_Message;
error.message_kind = .Error; error.message_kind = .Error;
@@ -291,8 +312,8 @@ record_error :: (lexer : *Lexer, message : string) {
array_add(*error.source_locations, source_location); array_add(*error.source_locations, source_location);
lexer.result.had_error = true; lexer.ctx.had_error = true;
array_add(*lexer.result.messages, error); array_add(*lexer.ctx.messages, error);
} }
make_int :: (lexer : *Lexer) -> *Token { make_int :: (lexer : *Lexer) -> *Token {
@@ -322,10 +343,6 @@ make_float :: (lexer : *Lexer) -> *Token {
return token; return token;
} }
make_string :: () {
}
new_token :: (lexer : *Lexer, kind : Token_Kind) -> *Token { new_token :: (lexer : *Lexer, kind : Token_Kind) -> *Token {
length := lexer.cursor - lexer.start; length := lexer.cursor - lexer.start;
token : Token; token : Token;
@@ -342,13 +359,54 @@ new_token :: (lexer : *Lexer, kind : Token_Kind) -> *Token {
} }
lexer.current_column += length; lexer.current_column += length;
array_add(*lexer.result.tokens, token); array_add(*lexer.ctx.tokens, token);
return *lexer.result.tokens[lexer.result.tokens.count - 1]; return *lexer.ctx.tokens[lexer.ctx.tokens.count - 1];
} }
make_directive :: (lexer : *Lexer) -> *Token { make_directive :: (lexer : *Lexer) -> *Token {
lexer.start += 1; lexer.start += 1;
return make_identifier(lexer, .TOKEN_DIRECTIVE); ident := make_identifier(lexer, .TOKEN_DIRECTIVE);
if ident.ident_value == "load" {
path_tok := scan_next_token(lexer);
path := path_tok.string_value;
ctx : Compiler_Context;
ctx.environment = lexer.ctx.environment;
ctx.file = make_file(*ctx, path);
if ctx.file.source.count == 0 {
// unable_to_open_file(lexer, path, path_tok);
record_error(lexer, tprint("Unable to open file '%' for reading\n", path));
return error_token(lexer, tprint("Unable to open file '%' for reading\n", path));
}
lex(*ctx);
ctx.tokens.count -= 1; // @Note: remote TOKEN_EOF
lexer.ctx.tokens.count -= 2;
array_resize(*lexer.ctx.tokens, lexer.ctx.tokens.count + ctx.tokens.count);
for tok : ctx.tokens {
lexer.ctx.tokens[it_index] = tok;
}
return scan_next_token(lexer);
} else if ident.ident_value == "add_define" {
new_define := scan_next_token(lexer);
add_define(*lexer.ctx.environment, new_define.ident_value);
lexer.ctx.tokens.count -= 2;
return scan_next_token(lexer);
}
return ident;
}
make_string :: (lexer : *Lexer) -> *Token {
token : *Token = new_token(lexer, .TOKEN_STRING);
name : string = .{ count = token.length - 2,
data = *lexer.input.data[lexer.start + 1] };
token.string_value = name;
return token;
} }
make_identifier :: (lexer : *Lexer, kind : Token_Kind) -> *Token { make_identifier :: (lexer : *Lexer, kind : Token_Kind) -> *Token {
@@ -367,6 +425,7 @@ make_token :: (lexer : *Lexer, token_kind : Token_Kind) -> *Token {
skip_whitespace :: (lexer : *Lexer) { skip_whitespace :: (lexer : *Lexer) {
while true { while true {
if is_at_end(lexer) return;
c := peek_char(lexer); c := peek_char(lexer);
if c == { if c == {
@@ -421,6 +480,17 @@ scan_next_token :: (lexer : *Lexer) -> *Token {
if is_digit(c) return number(lexer); if is_digit(c) return number(lexer);
if c == { if c == {
case #char "\""; {
c = advance(lexer);
// lexer.start = lexer.cursor;
while c != #char "\"" {
c = advance(lexer);
}
// lexer.cursor -= 1;
tok := make_string(lexer);
// advance(lexer);
return tok;
}
case #char "+"; { case #char "+"; {
if match_character(lexer, #char "=") return make_token(lexer, .TOKEN_PLUSEQUALS); if match_character(lexer, #char "=") return make_token(lexer, .TOKEN_PLUSEQUALS);
return make_token(lexer, .TOKEN_PLUS); return make_token(lexer, .TOKEN_PLUS);
@@ -490,7 +560,10 @@ scan_next_token :: (lexer : *Lexer) -> *Token {
} }
case #char ";"; return make_token(lexer, .TOKEN_SEMICOLON); case #char ";"; return make_token(lexer, .TOKEN_SEMICOLON);
case #char ","; return make_token(lexer, .TOKEN_COMMA); case #char ","; return make_token(lexer, .TOKEN_COMMA);
case #char "."; return make_token(lexer, .TOKEN_DOT); case #char "."; {
if match_character(lexer, #char ".") return make_token(lexer, .TOKEN_DOTDOT);
return make_token(lexer, .TOKEN_DOT);
}
} }
s : string = .{ count = 1, data = *c }; s : string = .{ count = 1, data = *c };
@@ -499,45 +572,35 @@ scan_next_token :: (lexer : *Lexer) -> *Token {
// return error_token(lexer, tprint("Invalid token: %", s)); // return error_token(lexer, tprint("Invalid token: %", s));
} }
lex :: (ctx : *Compiler_Context, allocator := temp) {
if ctx.had_error {
lex :: (result : *Compile_Result) {
if result.had_error {
return; return;
} }
for *file : result.files { new_context := context;
new_context.allocator = allocator;
push_context new_context {
init_context_allocators();
defer clear_context_allocators();
lexer : Lexer; lexer : Lexer;
init_lexer_from_string(*lexer, file.file.source); lexer.ctx = ctx;
lexer.path = file.file.path; array_reserve(*lexer.ctx.tokens, 1024);
init_lexer_from_string(*lexer, ctx.file.source);
lexer.path = ctx.file.path;
token : *Token = scan_next_token(*lexer); token : *Token = scan_next_token(*lexer);
while token && token.kind != .TOKEN_EOF { while token && token.kind != .TOKEN_EOF {
token = scan_next_token(*lexer); token = scan_next_token(*lexer);
} }
array_copy(*file.tokens.tokens, lexer.result.tokens);
result.had_error |= lexer.result.had_error;
// @Incomplete(nb): Temporary until we figure out a good way of passing this stuff around
copy_messages(lexer.result.messages, *result.messages);
} }
} }
lex :: (lexer : *Lexer, allocator : Allocator = context.allocator) -> Lexing_Result {
lexer.result.tokens.allocator = allocator;
token : *Token = scan_next_token(lexer);
while token && token.kind != .TOKEN_EOF {
token = scan_next_token(lexer);
}
return lexer.result;
}
init_lexer_from_string :: (lexer : *Lexer, input : string) { init_lexer_from_string :: (lexer : *Lexer, input : string) {
ok := read_input_from_string(lexer, input); ok := read_input_from_string(lexer, input);
if !ok { if !ok {
record_error(lexer, "Unable to initialize from string\n"); record_error(lexer, "Unable to initialize from string\n");
lexer.result.had_error = true; lexer.ctx.had_error = true;
} }
} }
@@ -545,7 +608,7 @@ init_lexer_from_file :: (lexer : *Lexer, file_path : string) {
ok := read_input_from_file(lexer, file_path); ok := read_input_from_file(lexer, file_path);
if !ok { if !ok {
record_error(lexer, tprint("Unable to read file: %\n", file_path)); record_error(lexer, tprint("Unable to read file: %\n", file_path));
lexer.result.had_error = true; lexer.ctx.had_error = true;
} }
} }
@@ -684,10 +747,20 @@ print_token_pointer :: (builder : *String_Builder, token : Token) {
} }
} }
print_from_source_location :: (builder : *String_Builder, source_location : Source_Range, indentation : int = 0) { print_from_source_location :: (ctx : *Compiler_Context, builder : *String_Builder, source_location : Source_Range, indentation : int = 0) {
current := source_location.begin; current := source_location.begin;
begin := source_location.begin; begin := source_location.begin;
end := source_location.end; end := source_location.end;
if begin.builtin {
for i : begin.index..end.index - 1 {
tok := ctx.tokens[i];
text : string;
text.data = tok.source;
text.count = tok.length;
print_to_builder(builder, "%", text);
}
} else {
begin_pos := 0; begin_pos := 0;
token_string : string; token_string : string;
count := end.index - begin.index + end.length; count := end.index - begin.index + end.length;
@@ -713,11 +786,14 @@ print_from_source_location :: (builder : *String_Builder, source_location : Sour
print_to_builder(builder, "%", token_string); print_to_builder(builder, "%", token_string);
} }
} }
}
print_from_source_location :: (source_location : Source_Range, allocator := context.allocator, indentation : int = 0) -> string { print_from_source_location :: (ctx : *Compiler_Context, source_location : Source_Range, allocator := context.allocator, indentation : int = 0) -> string {
sc := get_scratch();
defer scratch_end(sc);
builder : String_Builder; builder : String_Builder;
init_string_builder(*builder,, allocator); init_string_builder(*builder,, sc.allocator);
print_from_source_location(*builder, source_location); print_from_source_location(ctx, *builder, source_location,, sc.allocator);
return builder_to_string(*builder,, allocator); return builder_to_string(*builder,, allocator);
} }

View File

@@ -1,49 +1,12 @@
#import "Flat_Pool";
/**
* TODO:
* if parsing
* for/while loop parsing
**/
//////////////////////////// ////////////////////////////
//@nb - Parse_state state //@nb - Parse_state state
Parse_State :: struct { Parse_State :: struct {
current : *Token; current : *Token;
previous : *Token; previous : *Token;
tokens : [..]Token;
current_token_index : int; current_token_index : int;
node_allocator : Allocator; ctx : *Compiler_Context;
node_arena : Arena;
child_allocator : Allocator;
child_arena : Arena;
// had_error : bool;
path : string;
result : Parse_Result;
}
////////////////////////////
//@nb - Result and error handling
Parse_Result :: struct {
root : *AST_Node;
nodes : [..]AST_Node;
had_error : bool;
messages : [..]Compiler_Message;
}
Parse_Error_Kind :: enum {
Parse_Error_Type_Missing;
Parse_Error_Expected_Expression;
Parse_Error_Empty_Block;
Parse_Error_Unexpected_Token;
} }
//////////////////////////// ////////////////////////////
@@ -93,7 +56,7 @@ parse_rules :: #run -> [(cast(int)Token_Kind.TOKEN_ERROR) + 1]Parse_Rule {
rules[Token_Kind.TOKEN_RIGHTBRACKET] = .{null, null, .PREC_NONE}; rules[Token_Kind.TOKEN_RIGHTBRACKET] = .{null, null, .PREC_NONE};
rules[Token_Kind.TOKEN_COMMA] = .{null, null, .PREC_NONE}; rules[Token_Kind.TOKEN_COMMA] = .{null, null, .PREC_NONE};
rules[Token_Kind.TOKEN_DOT] = .{null, dot, .PREC_CALL}; rules[Token_Kind.TOKEN_DOT] = .{null, dot, .PREC_CALL};
rules[Token_Kind.TOKEN_PROPERTIES] = .{named_variable, null, .PREC_CALL}; // rules[Token_Kind.TOKEN_PROPERTIES] = .{named_variable, null, .PREC_CALL};
rules[Token_Kind.TOKEN_MINUS] = .{unary, binary, .PREC_TERM}; rules[Token_Kind.TOKEN_MINUS] = .{unary, binary, .PREC_TERM};
rules[Token_Kind.TOKEN_PLUS] = .{null, binary, .PREC_TERM}; rules[Token_Kind.TOKEN_PLUS] = .{null, binary, .PREC_TERM};
rules[Token_Kind.TOKEN_SEMICOLON] = .{null, null, .PREC_NONE}; rules[Token_Kind.TOKEN_SEMICOLON] = .{null, null, .PREC_NONE};
@@ -129,16 +92,6 @@ parse_rules :: #run -> [(cast(int)Token_Kind.TOKEN_ERROR) + 1]Parse_Rule {
return rules; return rules;
} }
init_parse_state :: (parse_state : *Parse_State, tokens : [..]Token, path : string) {
parse_state.tokens = tokens;
parse_state.path = path;
parse_state.node_allocator = make_arena(*parse_state.node_arena);
parse_state.child_allocator = make_arena(*parse_state.child_arena);
parse_state.result.nodes.allocator = parse_state.node_allocator;
array_reserve(*parse_state.result.nodes, 4096);
parse_state.current_token_index = 0;
}
//////////////////////////// ////////////////////////////
//@nb - Error handling functions //@nb - Error handling functions
@@ -147,7 +100,7 @@ record_error :: (parse_state : *Parse_State, token : Token, message : string, re
error : Compiler_Message; error : Compiler_Message;
error.message_kind = .Error; error.message_kind = .Error;
error.message = message; error.message = message;
error.path = parse_state.path; error.path = parse_state.ctx.file.path;
source_location : Source_Range; source_location : Source_Range;
source_location.begin = token; source_location.begin = token;
@@ -155,14 +108,17 @@ record_error :: (parse_state : *Parse_State, token : Token, message : string, re
source_location.begin.source = source_location.begin.source - source_location.begin.column; source_location.begin.source = source_location.begin.source - source_location.begin.column;
source_location.main_token = token; source_location.main_token = token;
snap := snapshot_state(parse_state);
advance_to_sync_point(parse_state); advance_to_sync_point(parse_state);
error.report_source_location = report_source_location; error.report_source_location = report_source_location;
source_location.end = parse_state.current; source_location.end = parse_state.current;
array_add(*error.source_locations, source_location); array_add(*error.source_locations, source_location);
parse_state.result.had_error = true; parse_state.ctx.had_error = true;
array_add(*parse_state.result.messages, error); array_add(*parse_state.ctx.messages, error);
rewind_to_snapshot(parse_state, snap);
} }
generate_source_location_from_token :: (state : *Parse_State, token : Token) -> Source_Range { generate_source_location_from_token :: (state : *Parse_State, token : Token) -> Source_Range {
@@ -190,8 +146,10 @@ unexpected_token :: (state : *Parse_State, token : Token, message : string) {
/* /*
*/ */
sc := get_scratch();
defer scratch_end(sc);
builder : String_Builder; builder : String_Builder;
init_string_builder(*builder,, temp); init_string_builder(*builder,, sc.allocator);
print_to_builder(*builder, "%\n\n", message); print_to_builder(*builder, "%\n\n", message);
@@ -209,12 +167,12 @@ unexpected_token :: (state : *Parse_State, token : Token, message : string) {
indent(*builder, 1); indent(*builder, 1);
cyan(*builder); cyan(*builder);
print_to_builder(*builder, "%\n", print_from_source_location(location)); print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
indent(*builder, 1); indent(*builder, 1);
print_token_pointer(*builder, token); print_token_pointer(*builder, token);
final_message := builder_to_string(*builder); final_message := builder_to_string(*builder,, context.allocator);
record_error(state, token, final_message, false); record_error(state, token, final_message, false);
} }
@@ -230,7 +188,7 @@ else_if_without_if :: (state : *Parse_State) {
indent(*builder, 1); indent(*builder, 1);
cyan(*builder); cyan(*builder);
print_to_builder(*builder, "%\n", print_from_source_location(location)); print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
indent(*builder, 1); indent(*builder, 1);
print_token_pointer(*builder, token); print_token_pointer(*builder, token);
@@ -252,7 +210,7 @@ else_without_if :: (state : *Parse_State) {
indent(*builder, 1); indent(*builder, 1);
cyan(*builder); cyan(*builder);
print_to_builder(*builder, "%\n", print_from_source_location(location)); print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
indent(*builder, 1); indent(*builder, 1);
print_token_pointer(*builder, token); print_token_pointer(*builder, token);
@@ -262,6 +220,26 @@ else_without_if :: (state : *Parse_State) {
record_error(state, token, final_message, false); record_error(state, token, final_message, false);
} }
unable_to_parse_statement :: (state : *Parse_State, token : Token, message : string = "") {
builder : String_Builder;
init_string_builder(*builder,, temp);
print_to_builder(*builder, "Unable to parse statement here. %\n", message);
location : Source_Range = generate_source_location_from_token(state, token);
indent(*builder, 1);
cyan(*builder);
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
indent(*builder, 1);
print_token_pointer(*builder, token);
final_message := builder_to_string(*builder);
record_error(state, token, final_message, false);
}
expected_expression :: (state : *Parse_State, token : Token, message : string) { expected_expression :: (state : *Parse_State, token : Token, message : string) {
builder : String_Builder; builder : String_Builder;
init_string_builder(*builder,, temp); init_string_builder(*builder,, temp);
@@ -272,7 +250,7 @@ expected_expression :: (state : *Parse_State, token : Token, message : string) {
indent(*builder, 1); indent(*builder, 1);
cyan(*builder); cyan(*builder);
print_to_builder(*builder, "%\n", print_from_source_location(location)); print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
indent(*builder, 1); indent(*builder, 1);
print_token_pointer(*builder, token); print_token_pointer(*builder, token);
@@ -291,7 +269,7 @@ missing_type_specifier :: (state : *Parse_State, token : Token, message : string
indent(*builder, 1); indent(*builder, 1);
cyan(*builder); cyan(*builder);
print_to_builder(*builder, "%\n", print_from_source_location(location)); print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
indent(*builder, 1); indent(*builder, 1);
loc := location.begin; loc := location.begin;
@@ -315,7 +293,7 @@ empty_block :: (state : *Parse_State, token : Token, message : string) {
indent(*builder, 1); indent(*builder, 1);
cyan(*builder); cyan(*builder);
print_to_builder(*builder, "%\n", print_from_source_location(location)); print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
indent(*builder, 1); indent(*builder, 1);
loc := location.begin; loc := location.begin;
@@ -329,6 +307,45 @@ empty_block :: (state : *Parse_State, token : Token, message : string) {
record_error(state, token, final_message, false); record_error(state, token, final_message, false);
} }
unable_to_open_file :: (state : *Parse_State, path : string, token : Token) {
builder : String_Builder;
init_string_builder(*builder,, temp);
print_to_builder(*builder, "Unable to open file '%' for reading\n\n", path);
location := generate_source_location_from_token(state, token);
indent(*builder, 1);
cyan(*builder);
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
indent(*builder, 1);
loc := location.begin;
print_token_pointer(*builder, loc);
final_message := builder_to_string(*builder);
record_error(state, token, final_message, false);
}
entry_point_requires_return_value :: (state : *Parse_State, token : Token) {
builder : String_Builder;
init_string_builder(*builder,, temp);
print_to_builder(*builder, "Entry point '%' requires return value\n\n", token.ident_value);
location := generate_source_location_from_token(state, token);
indent(*builder, 1);
cyan(*builder);
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
indent(*builder, 1);
loc := location.begin;
print_token_pointer(*builder, loc);
final_message := builder_to_string(*builder);
record_error(state, token, final_message, false);
}
error_node :: (parse_state : *Parse_State, message : string) -> *AST_Node { error_node :: (parse_state : *Parse_State, message : string) -> *AST_Node {
node := make_node(parse_state, .Error); node := make_node(parse_state, .Error);
node.name = copy_string(message); node.name = copy_string(message);
@@ -353,14 +370,154 @@ advance_to_sync_point :: (parse_state : *Parse_State) {
//////////////////////////// ////////////////////////////
//@nb - Base parsing functions //@nb - Base parsing functions
make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node { make_node :: (nodes : *[..]AST_Node, kind : AST_Kind) -> *AST_Node {
node : AST_Node; node : AST_Node;
node.kind = kind; node.kind = kind;
node.children.allocator = parse_state.child_allocator; array_add(nodes, node);
array_add(*parse_state.result.nodes, node);
return *parse_state.result.nodes[parse_state.result.nodes.count - 1]; return *(nodes.*[nodes.count - 1]);
}
make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node {
return make_node(*parse_state.ctx.nodes, kind);
}
make_builtin_token :: (tokens : *[..]Token, kind : Token_Kind, text : string, col : *int, line : *int) -> *Token {
tok : Token;
tok.kind = kind;
start := 0;
tok.column = col.*;
for c : text {
if c == #char "\n" {
line.* ++ 1;
col.* = 0;
} else {
col.* += 1;
}
}
tok.index = tokens.count;
tok.length = text.count;
tok.builtin = true;
tok.source = text.data;
tok.ident_value = text;
array_add(tokens, tok);
return *(tokens.*)[tokens.count - 1];
}
new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []Arg) -> *AST_Node {
sc := get_scratch(context.allocator);
defer scratch_end(sc);
node := make_node(*ctx.nodes, .Struct);
source_location : Source_Range;
col := 0;
line := 0;
tok_index := ctx.tokens.count;
ident_token := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, name, *col, *line);
ident_token.ident_value = name;
source_location.begin = ident_token;
make_builtin_token(*ctx.tokens, .TOKEN_DOUBLECOLON, " :: ", *col, *line);
make_builtin_token(*ctx.tokens, .TOKEN_STRUCT, "struct ", *col, *line);
make_builtin_token(*ctx.tokens, .TOKEN_LEFTBRACE, "{\n\t", *col, *line);
line += 1;
col = 0;
field_list := make_node(*ctx.nodes, .FieldList);
add_child(node, field_list);
for member : members {
field := make_node(*ctx.nodes, .Field);
field_source_loc : Source_Range;
field_ident := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, member.name, *col, *line);
field_source_loc.begin = field_ident;
field.token = field_ident;
field.name = member.name;
make_builtin_token(*ctx.tokens, .TOKEN_COLON, ": ", *col, *line);
make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, member.typename, *col, *line);
semicolon_tok := make_builtin_token(*ctx.tokens, .TOKEN_SEMICOLON, ";", *col, *line);
col = 0;
line += 1;
field_source_loc.end = semicolon_tok;
field.source_location = field_source_loc;
add_child(field_list, field);
}
brace_token := make_builtin_token(*ctx.tokens, .TOKEN_RIGHTBRACE, "\n}", *col, *line);
source_location.end = brace_token;
node.source_location = source_location;
return node;
}
new_builtin_function_node :: (ctx : *Compiler_Context, name : string, members : []Arg, return_var : Arg) -> *AST_Node {
sc := get_scratch(context.allocator);
defer scratch_end(sc);
node := make_node(*ctx.nodes, .Function);
source_location : Source_Range;
col := 0;
line := 0;
tok_index := ctx.tokens.count;
ident_token := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, name, *col, *line);
source_location.begin = ident_token;
make_builtin_token(*ctx.tokens, .TOKEN_DOUBLECOLON, " :: ", *col, *line);
make_builtin_token(*ctx.tokens, .TOKEN_LEFTPAREN, "(", *col, *line);
field_list := make_node(*ctx.nodes, .FieldList);
add_child(node, field_list);
for member : members {
field := make_node(*ctx.nodes, .Field);
field_source_loc : Source_Range;
type_tok := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, member.typename, *col, *line);
field_source_loc.begin = type_tok;
field.token = type_tok;
if it_index < members.count - 1 {
make_builtin_token(*ctx.tokens, .TOKEN_COMMA, ", ", *col, *line);
}
field_source_loc.end = type_tok;
field.source_location = field_source_loc;
add_child(field_list, field);
}
make_builtin_token(*ctx.tokens, .TOKEN_RIGHTPAREN, ")", *col, *line);
semicolon_tok := make_builtin_token(*ctx.tokens, .TOKEN_SEMICOLON, ";", *col, *line);
source_location.end = semicolon_tok;
node.source_location = source_location;
return node;
}
get_field_list :: (struct_or_func : *AST_Node) -> *AST_Node {
assert(struct_or_func.kind == .Function || struct_or_func.kind == .Struct || struct_or_func.kind == .CBuffer);
return struct_or_func.children[0];
} }
add_child :: (node : *AST_Node, child : *AST_Node) { add_child :: (node : *AST_Node, child : *AST_Node) {
@@ -393,10 +550,10 @@ advance :: (parse_state : *Parse_State) {
parse_state.previous = parse_state.current; parse_state.previous = parse_state.current;
while true { while true {
if parse_state.current_token_index >= parse_state.tokens.count { if parse_state.current_token_index >= parse_state.ctx.tokens.count {
break; break;
} }
parse_state.current = *parse_state.tokens[parse_state.current_token_index]; parse_state.current = *parse_state.ctx.tokens[parse_state.current_token_index];
parse_state.current_token_index += 1; parse_state.current_token_index += 1;
if parse_state.current.kind != .TOKEN_ERROR break; if parse_state.current.kind != .TOKEN_ERROR break;
@@ -429,7 +586,7 @@ check_any :: (parse_state : *Parse_State, kinds : ..Token_Kind) -> bool {
//nb - Checks if the next token is of a certain kind //nb - Checks if the next token is of a certain kind
check_next :: (parse_state : *Parse_State, kind : Token_Kind) -> bool { check_next :: (parse_state : *Parse_State, kind : Token_Kind) -> bool {
return parse_state.tokens[parse_state.current_token_index].kind == kind; return parse_state.ctx.tokens[parse_state.current_token_index].kind == kind;
} }
//nb - Consume a token if //nb - Consume a token if
@@ -440,9 +597,13 @@ consume :: (parse_state : *Parse_State, kind : Token_Kind, message : string) {
} }
token := parse_state.previous; token := parse_state.previous;
advance(parse_state); advance_to_sync_point(parse_state);
unexpected_token(parse_state, token, message); unexpected_token(parse_state, token, message);
consume(parse_state, kind, message);
if parse_state.current.kind == .TOKEN_EOF {
return;
}
} }
//////////////////////////// ////////////////////////////
@@ -451,16 +612,21 @@ get_rule :: (kind : Token_Kind) -> *Parse_Rule {
return *parse_rules[kind]; return *parse_rules[kind];
} }
precedence :: (parse_state : *Parse_State, precedence : Precedence) -> *AST_Node { precedence :: (parse_state : *Parse_State, precedence : Precedence, message : string = "") -> *AST_Node {
prev := parse_state.previous;
advance(parse_state); advance(parse_state);
prefix_rule := get_rule(parse_state.previous.kind).prefix; prefix_rule := get_rule(parse_state.previous.kind).prefix;
if prefix_rule == null { if prefix_rule == null {
tok_s : string; tok_s : string;
tok_s.data = parse_state.previous.source; tok_s.data = prev.source;
tok_s.count = parse_state.previous.length; tok_s.count = prev.length;
expected_expression(parse_state, parse_state.previous, tprint("Expected expression after '%'.", tok_s)); if message {
// @Incomplete: Add error node here? expected_expression(parse_state, prev, tprint("Expected expression after '%'. %", tok_s, message));
} else {
expected_expression(parse_state, prev, tprint("Expected expression after '%'.", tok_s));
}
return error_node(parse_state, "Expected expression."); return error_node(parse_state, "Expected expression.");
} }
@@ -473,7 +639,6 @@ precedence :: (parse_state : *Parse_State, precedence : Precedence) -> *AST_Node
tok_s.data = parse_state.previous.source; tok_s.data = parse_state.previous.source;
tok_s.count = parse_state.previous.length; tok_s.count = parse_state.previous.length;
expected_expression(parse_state, parse_state.current, tprint("Reached end of file. Expected expression after '%'.", tok_s)); expected_expression(parse_state, parse_state.current, tprint("Reached end of file. Expected expression after '%'.", tok_s));
// expected_expression(parse_state, parse_state.current, "Reached end of file. Expected expression.");
// @Incomplete: Add error node here? // @Incomplete: Add error node here?
return null; return null;
} }
@@ -540,16 +705,15 @@ binary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
} }
array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node { array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
identifier := parse_state.tokens[parse_state.current_token_index - 3]; identifier := parse_state.ctx.tokens[parse_state.current_token_index - 3];
left_bracket := parse_state.tokens[parse_state.current_token_index - 2]; left_bracket := parse_state.ctx.tokens[parse_state.current_token_index - 2];
array_access := make_node(parse_state, .Unary); array_access := make_node(parse_state, .Binary);
array_access.token = left_bracket; array_access.token = left_bracket;
array_index := expression(parse_state); array_index := expression(parse_state);
add_child(array_access, left);
add_child(array_access, array_index); add_child(array_access, array_index);
add_child(left, array_access);
consume(parse_state, .TOKEN_RIGHTBRACKET, "Expected ']' after array index."); consume(parse_state, .TOKEN_RIGHTBRACKET, "Expected ']' after array index.");
source_location : Source_Range; source_location : Source_Range;
@@ -566,8 +730,8 @@ array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
} }
source_location.end = parse_state.previous; source_location.end = parse_state.previous;
left.source_location = source_location; array_access.source_location = source_location;
return left; return array_access;
} }
unary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node { unary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
@@ -606,6 +770,35 @@ directive :: (state : *Parse_State) -> *AST_Node {
func := function_declaration(state, identifier_token, .None, false, false); func := function_declaration(state, identifier_token, .None, false, false);
func.foreign_declaration = true; func.foreign_declaration = true;
return func; return func;
} else if state.current.ident_value == "if" {
if_directive := make_node(state, .If_Directive);
source_location : Source_Range;
if state.previous {
source_location.begin = state.previous;
} else {
source_location.begin = state.current;
}
advance(state);
cond := expression(state);
add_child(if_directive, cond);
source_location.end = state.previous;
advance_to_sync_point(state);
if_body := block(state);
add_child(if_directive, if_body);
if match(state, .TOKEN_ELSE) {
else_node := else_statement(state);
add_child(if_directive, else_node);
}
if_directive.source_location = source_location;
return if_directive;
} }
return null; return null;
@@ -643,34 +836,31 @@ dot :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
source_location : Source_Range; source_location : Source_Range;
source_location.begin = left.source_location.begin; source_location.begin = left.source_location.begin;
source_location.main_token = identifier;
access := make_node(parse_state, .Access);
variable := make_node(parse_state, .Variable);
variable.name = identifier.ident_value;
add_child(access, left);
add_child(access, variable);
if check_any(parse_state, .TOKEN_ASSIGN, .TOKEN_MINUSEQUALS, .TOKEN_PLUSEQUALS, .TOKEN_DIVEQUALS, .TOKEN_MODEQUALS, .TOKEN_TIMESEQUALS) { if check_any(parse_state, .TOKEN_ASSIGN, .TOKEN_MINUSEQUALS, .TOKEN_PLUSEQUALS, .TOKEN_DIVEQUALS, .TOKEN_MODEQUALS, .TOKEN_TIMESEQUALS) {
advance(parse_state); advance(parse_state);
variable := make_node(parse_state, .Variable); access.source_location = generate_source_location_from_token(parse_state, identifier);
variable.source_location = generate_source_location_from_token(parse_state, identifier);
variable.name = identifier.ident_value;
add_child(left, variable);
node := make_node(parse_state, .Binary); node := make_node(parse_state, .Binary);
node.token = parse_state.previous; node.token = parse_state.previous;
add_child(node, left); node.source_location = generate_source_location_from_token(parse_state, node.token);
add_child(node, access);
add_child(node, expression(parse_state)); add_child(node, expression(parse_state));
return node; return node;
} }
variable := make_node(parse_state, .Variable);
variable.name = identifier.ident_value;
if check(parse_state, .TOKEN_DOT) { source_location.end = parse_state.current;
advance(parse_state); access.source_location = source_location;
dot(parse_state, variable); return access;
}
add_child(left, variable);
source_location.end = parse_state.previous;
variable.source_location = source_location;
return left;
} }
integer :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node { integer :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
@@ -693,8 +883,8 @@ floating :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
return node; return node;
} }
expression :: (parse_state : *Parse_State) -> *AST_Node { expression :: (parse_state : *Parse_State, message : string = "") -> *AST_Node {
expression := precedence(parse_state, .PREC_ASSIGNMENT); expression := precedence(parse_state, .PREC_ASSIGNMENT, message);
return expression; return expression;
} }
@@ -749,7 +939,7 @@ field_declaration :: (parse_state : *Parse_State, identifier_token : *Token) ->
node.array_field = true; node.array_field = true;
} else { } else {
if !check(parse_state, .TOKEN_ASSIGN) { if !check(parse_state, .TOKEN_ASSIGN) {
internal_error_message(*parse_state.result.messages, "Unimplemented error message.", parse_state.path); internal_error_message(*parse_state.ctx.messages, "Unimplemented error message.", parse_state.ctx.file.path);
return node; return node;
} }
// missing_type_specifier(parse_state, identifier_token, "Expected type specifier after field name."); // missing_type_specifier(parse_state, identifier_token, "Expected type specifier after field name.");
@@ -771,7 +961,6 @@ field_declaration :: (parse_state : *Parse_State, identifier_token : *Token) ->
advance(parse_state); advance(parse_state);
} }
} }
} else if match(parse_state, .TOKEN_ASSIGN) { } else if match(parse_state, .TOKEN_ASSIGN) {
add_child(node, expression(parse_state)); add_child(node, expression(parse_state));
} }
@@ -794,8 +983,8 @@ argument_list :: (parse_state : *Parse_State) -> *AST_Node {
source_location.main_token = parse_state.current; source_location.main_token = parse_state.current;
error_before := parse_state.result.had_error; error_before := parse_state.ctx.had_error;
parse_state.result.had_error = false; parse_state.ctx.had_error = false;
while !check(parse_state, .TOKEN_RIGHTPAREN) { while !check(parse_state, .TOKEN_RIGHTPAREN) {
arg := expression(parse_state); arg := expression(parse_state);
@@ -808,12 +997,12 @@ argument_list :: (parse_state : *Parse_State) -> *AST_Node {
if check(parse_state, .TOKEN_RIGHTPAREN) break; if check(parse_state, .TOKEN_RIGHTPAREN) break;
consume(parse_state, .TOKEN_COMMA, "Expect ',' after function argument."); consume(parse_state, .TOKEN_COMMA, "Expect ',' after function argument.");
if parse_state.result.had_error { if parse_state.ctx.had_error {
break; break;
} }
} }
parse_state.result.had_error = error_before || parse_state.result.had_error; parse_state.ctx.had_error = error_before || parse_state.ctx.had_error;
consume(parse_state, .TOKEN_RIGHTPAREN, "Expect ')' after function call."); consume(parse_state, .TOKEN_RIGHTPAREN, "Expect ')' after function call.");
@@ -873,9 +1062,10 @@ statement :: (parse_state : *Parse_State) -> *AST_Node {
source_location : Source_Range; source_location : Source_Range;
source_location.begin = parse_state.previous; source_location.begin = parse_state.previous;
if_cond := expression(parse_state); if_cond := expression(parse_state, "Expected if condition.");
add_child(node, if_cond); add_child(node, if_cond);
source_location.end = parse_state.previous; source_location.end = parse_state.previous;
advance_to_sync_point(parse_state);
if_body := block(parse_state); if_body := block(parse_state);
add_child(node, if_body); add_child(node, if_body);
@@ -905,6 +1095,59 @@ statement :: (parse_state : *Parse_State) -> *AST_Node {
return error_node(parse_state, "'else' without 'if'."); return error_node(parse_state, "'else' without 'if'.");
} }
} else if match(parse_state, .TOKEN_FOR) {
if check(parse_state, .TOKEN_IDENTIFIER) {
node := make_node(parse_state, .For);
source_location : Source_Range;
source_location.begin = parse_state.previous;
loop_iterator := parse_state.current;
node.token = loop_iterator;
advance(parse_state);
consume(parse_state, .TOKEN_COLON, "Expect ':' after for loop iterator.");
snap := snapshot_state(parse_state);
begin_iter := expression(parse_state, "Expected beginning of iterator.");
if begin_iter.kind == .Error {
unable_to_parse_statement(parse_state, source_location.begin);
rewind_to_snapshot(parse_state, snap);
if parse_state.current.kind == .TOKEN_LEFTBRACE {
block(parse_state);
}
return error_node(parse_state, "'for' without well-formed iterator expression.");
}
add_child(node, begin_iter);
consume(parse_state, .TOKEN_DOTDOT, "Expect '..' after for loop iter left hand side.");
snap = snapshot_state(parse_state);
end_iter := expression(parse_state, "Expected end of iterator.");
if end_iter.kind == .Error {
unable_to_parse_statement(parse_state, source_location.begin);
rewind_to_snapshot(parse_state, snap);
if parse_state.current.kind == .TOKEN_LEFTBRACE {
block(parse_state);
}
return error_node(parse_state, "'for' without well-formed iterator expression.");
}
add_child(node, end_iter);
if check(parse_state, .TOKEN_LEFTBRACE) {
for_body := block(parse_state);
add_child(node, for_body);
} else {
unable_to_parse_statement(parse_state, source_location.begin, "'for' currently expects a brace-enclosed block as a body.");
return error_node(parse_state, "'for' currently expects a brace-enclosed block as a body.");
}
node.source_location = source_location;
return node;
}
} else { } else {
return expression_statement(parse_state); return expression_statement(parse_state);
} }
@@ -915,13 +1158,15 @@ statement :: (parse_state : *Parse_State) -> *AST_Node {
else_statement :: (parse_state : *Parse_State) -> *AST_Node { else_statement :: (parse_state : *Parse_State) -> *AST_Node {
if check(parse_state, .TOKEN_IF) { if check(parse_state, .TOKEN_IF) {
return statement(parse_state); return statement(parse_state);
} else if check(parse_state, .TOKEN_DIRECTIVE) && parse_state.current.ident_value == "if" {
return directive(parse_state);
} }
return block(parse_state); return block(parse_state);
} }
block :: (parse_state : *Parse_State) -> *AST_Node { block :: (parse_state : *Parse_State) -> *AST_Node {
node : *AST_Node = make_node(parse_state, .Block); node : *AST_Node = make_node(parse_state, .Block);
array_reserve(*node.children, 1024); array_reserve(*node.children, 32);
source_location : Source_Range; source_location : Source_Range;
@@ -1020,10 +1265,22 @@ function_declaration :: (parse_state : *Parse_State, identifier_token : *Token,
case .Vertex; { case .Vertex; {
node.vertex_entry_point = true; node.vertex_entry_point = true;
name = sprint("vs_%", function_name_token.ident_value); name = sprint("vs_%", function_name_token.ident_value);
// if return_type_token.kind == .TOKEN_INVALID {
// entry_point_requires_return_value(parse_state, function_name_token);
// advance_to_sync_point(parse_state);
// return error_node(parse_state, "");
// }
} }
case .Pixel; { case .Pixel; {
node.pixel_entry_point = true; node.pixel_entry_point = true;
name = sprint("ps_%", function_name_token.ident_value); name = sprint("ps_%", function_name_token.ident_value);
// if return_type_token.kind == .TOKEN_INVALID {
// entry_point_requires_return_value(parse_state, function_name_token);
// advance_to_sync_point(parse_state);
// return error_node(parse_state, "");
// }
} }
} }
@@ -1045,73 +1302,57 @@ function_declaration :: (parse_state : *Parse_State, identifier_token : *Token,
return node; return node;
} }
instance_block :: (parse_state : *Parse_State) -> *AST_Node { buffer :: (state : *Parse_State, identifier_token : *Token = null) -> *AST_Node {
node : *AST_Node; node : *AST_Node = make_node(state, .Buffer);
source_location : Source_Range; source_location : Source_Range;
source_location.begin = parse_state.current; source_location.begin = state.current;
consume(parse_state, .TOKEN_LEFTBRACE, "Expect '{' after 'instance' keyword"); if check(state, .TOKEN_AT) {
properties := field_list(parse_state, .Semicolon); while check(state, .TOKEN_AT) {
advance(state);
node = make_node(parse_state, .Instance); // @Incomplete(niels): this is a mapping
add_child(node, properties); if check(state, .TOKEN_IDENTIFIER) {
array_add(*node.hint_tokens, state.current);
consume(parse_state, .TOKEN_RIGHTBRACE, "Expect '}' after instance block"); advance(state);
source_location.end = parse_state.previous; }
node.source_location = source_location; }
return node;
} }
meta_block :: (parse_state : *Parse_State) -> *AST_Node { consume(state, .TOKEN_LEFTBRACE, "Expect '{' after 'buffer' keyword");
node : *AST_Node; buffer := field_list(state, .Semicolon);
node.array_field = true;
source_location : Source_Range;
source_location.begin = parse_state.current;
consume(parse_state, .TOKEN_LEFTBRACE, "Expect '{' after 'meta' keyword");
properties := field_list(parse_state, .Semicolon);
node = make_node(parse_state, .Meta);
add_child(node, properties);
consume(parse_state, .TOKEN_RIGHTBRACE, "Expect '}' after meta block");
source_location.end = parse_state.previous;
node.source_location = source_location;
return node;
}
property_block :: (parse_state : *Parse_State, identifier_token : *Token = null) -> *AST_Node {
node : *AST_Node;
source_location : Source_Range;
source_location.begin = parse_state.current;
consume(parse_state, .TOKEN_LEFTBRACE, "Expect '{' after 'property' keyword");
properties := field_list(parse_state, .Semicolon);
node = make_node(parse_state, .Properties);
if identifier_token { if identifier_token {
node.name = identifier_token.ident_value; node.name = identifier_token.ident_value;
} }
add_child(node, properties); add_child(node, buffer);
consume(parse_state, .TOKEN_RIGHTBRACE, "Expect '}' after 'property' keyword"); consume(state, .TOKEN_RIGHTBRACE, "Expect '}' after 'buffer' block");
source_location.end = parse_state.previous; source_location.end = state.previous;
node.source_location = source_location; node.source_location = source_location;
return node; return node;
} }
constant_buffer :: (parse_state : *Parse_State, identifier_token : *Token = null) -> *AST_Node { constant_buffer :: (parse_state : *Parse_State, identifier_token : *Token = null) -> *AST_Node {
node : *AST_Node; node : *AST_Node = make_node(parse_state, .CBuffer);
source_location : Source_Range; source_location : Source_Range;
source_location.begin = parse_state.current; source_location.begin = parse_state.current;
if check(parse_state, .TOKEN_AT) {
while check(parse_state, .TOKEN_AT) {
advance(parse_state);
// @Incomplete(niels): this is a mapping
if check(parse_state, .TOKEN_IDENTIFIER) {
array_add(*node.hint_tokens, parse_state.current);
advance(parse_state);
}
}
}
consume(parse_state, .TOKEN_LEFTBRACE, "Expect '{' after 'constant_buffer' keyword"); consume(parse_state, .TOKEN_LEFTBRACE, "Expect '{' after 'constant_buffer' keyword");
buffer := field_list(parse_state, .Semicolon); buffer := field_list(parse_state, .Semicolon);
node = make_node(parse_state, .CBuffer);
if identifier_token { if identifier_token {
node.name = identifier_token.ident_value; node.name = identifier_token.ident_value;
} }
@@ -1153,23 +1394,18 @@ const_declaration :: (parse_state : *Parse_State, identifier_token : *Token) ->
return struct_declaration(parse_state, identifier_token); return struct_declaration(parse_state, identifier_token);
} else if check(parse_state, .TOKEN_LEFTPAREN) { } else if check(parse_state, .TOKEN_LEFTPAREN) {
return function_declaration(parse_state, identifier_token, .None); return function_declaration(parse_state, identifier_token, .None);
} else if match(parse_state, .TOKEN_PROPERTIES) {
return property_block(parse_state, identifier_token);
} else if match(parse_state, .TOKEN_CONSTANT_BUFFER) { } else if match(parse_state, .TOKEN_CONSTANT_BUFFER) {
return constant_buffer(parse_state, identifier_token); return constant_buffer(parse_state, identifier_token);
} else if match(parse_state, .TOKEN_BUFFER) {
return buffer(parse_state, identifier_token);
} }
return error_node(parse_state, tprint("Couldn't parse constant declaration at token %\n", parse_state.current.*)); return error_node(parse_state, tprint("Couldn't parse constant declaration at token %\n", parse_state.current.*));
} }
declaration :: (parse_state : *Parse_State) -> *AST_Node { declaration :: (parse_state : *Parse_State) -> *AST_Node {
skip_statement := false;
decl_node : *AST_Node; decl_node : *AST_Node;
if match(parse_state, .TOKEN_PROPERTIES) { if match(parse_state, .TOKEN_VERTEX) {
decl_node = property_block(parse_state);
} else if match(parse_state, .TOKEN_INSTANCE) {
decl_node = instance_block(parse_state);
} else if match(parse_state, .TOKEN_META) {
decl_node = meta_block(parse_state);
} else if match(parse_state, .TOKEN_VERTEX) {
vertex_token := parse_state.previous; vertex_token := parse_state.previous;
identifier := parse_state.current; identifier := parse_state.current;
@@ -1185,10 +1421,9 @@ declaration :: (parse_state : *Parse_State) -> *AST_Node {
consume(parse_state, .TOKEN_DOUBLECOLON, "Expect '::' after pixel entry point declaration."); consume(parse_state, .TOKEN_DOUBLECOLON, "Expect '::' after pixel entry point declaration.");
decl_node = function_declaration(parse_state, identifier, .Pixel); decl_node = function_declaration(parse_state, identifier, .Pixel);
} else if check(parse_state, .TOKEN_LEFTPAREN) {
decl_node = call(parse_state, null);
} else if check(parse_state, .TOKEN_DIRECTIVE) { } else if check(parse_state, .TOKEN_DIRECTIVE) {
decl_node = directive(parse_state); decl_node = directive(parse_state);
skip_statement = true;
} else if check(parse_state, .TOKEN_IDENTIFIER) { } else if check(parse_state, .TOKEN_IDENTIFIER) {
identifier := parse_state.current; identifier := parse_state.current;
@@ -1211,7 +1446,7 @@ declaration :: (parse_state : *Parse_State) -> *AST_Node {
decl_node = error; decl_node = error;
} }
if !decl_node { if !decl_node && !skip_statement {
decl_node = statement(parse_state); decl_node = statement(parse_state);
} }
@@ -1222,20 +1457,28 @@ declaration :: (parse_state : *Parse_State) -> *AST_Node {
return decl_node; return decl_node;
} }
parse :: (result : *Compile_Result) { parse :: (ctx : *Compiler_Context, allocator := temp) {
if result.had_error { if ctx.had_error {
return; return;
} }
for *file : result.files { new_context := context;
new_context.allocator = allocator;
push_context new_context {
init_context_allocators();
defer clear_context_allocators();
parse_state : Parse_State; parse_state : Parse_State;
init_parse_state(*parse_state, file.tokens.tokens, file.file.path); array_reserve(*ctx.nodes, 4096);
parse_state.current_token_index = 0;
parse_state.ctx = ctx;
advance(*parse_state); advance(*parse_state);
if !match(*parse_state, .TOKEN_EOF) { if !match(*parse_state, .TOKEN_EOF) {
parse_state.result.root = make_node(*parse_state, .Program); parse_state.ctx.root = make_node(*parse_state, .Program);
array_reserve(*parse_state.result.root.children, 1024); array_reserve(*parse_state.ctx.root.children, 1024);
program := parse_state.result.root; program := parse_state.ctx.root;
while !check(*parse_state, .TOKEN_EOF) { while !check(*parse_state, .TOKEN_EOF) {
decl := declaration(*parse_state); decl := declaration(*parse_state);
@@ -1244,34 +1487,7 @@ parse :: (result : *Compile_Result) {
} }
} }
} }
//@Incomplete(nb): will this straight copy just work?
// Might need to rething how we do this.
file.ast_root = parse_state.result.root;
file.ast_nodes = parse_state.result.nodes;
copy_messages(parse_state.result.messages, *result.messages);
result.had_error |= parse_state.result.had_error;
} }
} }
parse :: (parse_state : *Parse_State) -> Parse_Result { #load "ast.jai";
advance(parse_state);
if !match(parse_state, .TOKEN_EOF) {
parse_state.result.root = make_node(parse_state, .Program);
array_reserve(*parse_state.result.root.children, 1024);
program := parse_state.result.root;
while !check(parse_state, .TOKEN_EOF) {
decl := declaration(parse_state);
if decl {
add_child(program, decl);
}
}
}
return parse_state.result;
}
#load "AST.jai";

View File

@@ -20,15 +20,15 @@ There is basic support for most HLSL built-in math functions for the following t
- Vector types: float2, float3, float4, int2, int3, int4 - Vector types: float2, float3, float4, int2, int3, int4
- Matrices: float4x4 - Matrices: float4x4
All of the above can be constructed with their namesake constructors i.e. `float4(x, y, z, w);`. All of the above can be constructed with their namesake constructors i.e. `float4(x, y, z, w);`.
We don't yet support textures and samplers. We also support Samplers and Texture2D
If you want to declare and use variables you can do it as follows If you want to declare and use variables you can do it as follows
```hlsl ```hlsl
x : float = 2.0; // no 'f' suffix required or even supported (it gives an error) x : float = 2.0; // no 'f' suffix required or even supported (it gives an error)
y : float = 4.0; y : float = 4.0;
v : float2 = float2(x, y); v : float2 = float2(x, y);
v2 := float2(x, y);
``` ```
For now it is required to specify the type of the variable (no type inference).
You can also do arithmetic as you would expect You can also do arithmetic as you would expect
``` ```
@@ -43,6 +43,7 @@ Camera_Data :: struct {
} }
``` ```
And there is a special struct called `properties`, which is used for custom data you want to pass in. And there is a special struct called `properties`, which is used for custom data you want to pass in.
#### ** Note: Properties will likely be deprecated, since the language now supports `@` hints to easily mark buffers and values with metadata.**
```hlsl ```hlsl
properties { properties {
projection : float4x4; projection : float4x4;
@@ -53,13 +54,14 @@ which will be exposed in the compiled result. `properties` can be renamed to a c
``` ```
p :: properties { p :: properties {
... ...
}
``` ```
You can also define constant buffers You can also define constant buffers
``` ```
camera :: Constant_Buffer { camera :: constant_buffer {
projection : float4x4; projection : float4x4;
view : float4x4; view : float4x4;
} }
@@ -70,69 +72,68 @@ camera :: Constant_Buffer {
To compile a shader and use the result, you can do the following in jai To compile a shader and use the result, you can do the following in jai
```jai ```jai
parse_shader :: (path : string, allocator : Allocator) -> Compilation_Result {
// In the future, you can pass environment defines to the compiler. // In the future, you can pass environment defines to the compiler.
compiler : Shader_Compiler; ctx : Compiler_Context;
compile_file(*compiler, "shader.shd", allocator);
return compile_file(*compiler, path,, allocator); if ctx.had_error {
} log_error("%\n", report_messages(ctx.messages),, temp);
result := parse_shader("shader.shd", allocator);
if result.had_error {
log_error("%\n", report_messages(result.messages),, temp);
return; return;
} }
collection := result.collection; // The ctx now contains all the needed information like the source text, entry points, constant buffers etc.
variant := collection.variants[0];
}
``` ```
When parsing a shader you get the following struct as a result When parsing a shader you get the following struct as a result
``` ```
Compilation_Result :: struct { Compiler_Context :: struct {
messages : [..]Compiler_Message; file : Input_File;
had_error : bool; environment : Environment;
collection : Shader_Variant_Collection; tokens : [..]Token;;
} root : *AST_Node;
``` nodes : [..]AST_Node;
A `Shader_Variant_Collection` looks as follows codegen_result_text : string;
```
Shader_Variant_Collection :: struct {
properties : Properties;
max_constant_buffers :: 16; constant_buffers : Static_Array(Type_Variable_Handle, 16);
cbuffers : Static_Array(Constant_Buffer, max_constant_buffers);
variants : [..]Shader_Variant; scope_stack : Scope_Stack;
} type_variables : [..]Type_Variable;
Shader_Variant :: struct { property_name : string;
text : string;
vertex_entry_point : struct { vertex_entry_point : struct {
node : *AST_Node;
name : string; name : string;
input : [..]Field; input : [..]Field;
} }
pixel_entry_point : struct { pixel_entry_point : struct {
node : *AST_Node;
name : string; name : string;
return_value : Field; return_value : Field;
} }
properties : Properties;
max_constant_buffers :: 16;
cbuffers : Static_Array(Constant_Buffer, max_constant_buffers);
had_error : bool;
messages : [..]Compiler_Message;
} }
Constant_Buffer :: struct { Constant_Buffer :: struct {
register : int;
name : string; name : string;
fields : Static_Array(Property_Field, 16); fields : Static_Array(Property_Field, 16);
// hints : Field_Hint; // optional hint...
hints : [..]Field_Hint;
buffer_index : u32; buffer_index : u32;
} }
@@ -192,11 +193,10 @@ Hint_Kind :: enum {
## Notable missing features ## Notable missing features
- Control flow: if/else, for, while, switch etc. - While
- Arrays - Arrays
- Textures and samplers
- Multiple render targets - Multiple render targets
- Custom buffers/structured buffers - Custom buffers/structured buffers
- Interpolation specifiers - Interpolation specifiers
- Proper variant handling with environment defines - Proper variant handling with environment defines
- Include/importing files such as shared utils etc. - Importing files such as shared utils etc. with something other than textual `#load`

File diff suppressed because it is too large Load Diff

681
Test.jai
View File

@@ -1,681 +0,0 @@
/////////////////////////////////////
//~ nbr: General improvements
//
// [x] Print out all failed tests in a list at the end
// [ ] Use new compiler API with Compile_Result and Compiled_File instead
// [ ] Use unix (posix? bash? ascii?) color codes for errors
// [ ] Print golden file as green and new output as red
#import "Basic";
#import "File";
#import "String";
#import "File_Utilities";
#import "Print_Color";
#load "module.jai";
GOLDEN_EXTENSION :: "golden";
LEXER_FOLDER :: "lex";
PARSER_FOLDER :: "parse";
CODEGEN_FOLDER :: "codegen";
COMPILED_FOLDER :: "compiled";
SEMANTIC_ANALYSIS_FOLDER :: "semant";
TESTS_FOLDER :: "test";
SHADER_EXTENSION :: "ink";
SUITE_EXTENSION :: "suite";
Stage_Flags :: enum_flags u16 {
Lexer :: 0x1;
Parser :: 0x2;
Semantic_Analysis :: 0x4;
Codegen :: 0x8;
Compile :: 0x10;
}
Output_Type :: enum_flags u16 {
Golden :: 0x1;
StdOut :: 0x2;
}
Result_Type :: enum {
File_Read_Failed;
Golden_File_Not_Found;
StdOut;
Golden_Output;
Passed;
Failed;
}
Result :: struct {
type : Result_Type;
path : string;
stage : Stage_Flags;
golden_path : string;
info_text : string;
}
Test_Case :: struct {
path : string;
stage_flags : Stage_Flags;
}
Test_Suite :: struct {
name : string;
test_cases : [..]Test_Case;
results : [..]Result;
}
get_golden_path :: (file_path : string, stage : Stage_Flags, allocator := context.allocator) -> string {
path := parse_path(file_path);
file_without_extension := split(path.words[path.words.count - 1], ".");
builder : String_Builder;
builder.allocator = temp;
final_path_length := file_path.count - SHADER_EXTENSION.count + GOLDEN_EXTENSION.count + 1; // +1 for dot
path.words.count -= 1;
if stage == {
case .Lexer; {
dir := tprint("%/%", TESTS_FOLDER, LEXER_FOLDER);
make_directory_if_it_does_not_exist(dir);
array_add(*path.words, LEXER_FOLDER);
}
case .Parser; {
dir := tprint("%/%", TESTS_FOLDER, PARSER_FOLDER);
make_directory_if_it_does_not_exist(dir);
array_add(*path.words, PARSER_FOLDER);
}
case .Semantic_Analysis; {
dir := tprint("%/%", TESTS_FOLDER, SEMANTIC_ANALYSIS_FOLDER);
make_directory_if_it_does_not_exist(dir);
array_add(*path.words, SEMANTIC_ANALYSIS_FOLDER);
}
case .Codegen; {
dir := tprint("%/%", TESTS_FOLDER, CODEGEN_FOLDER);
make_directory_if_it_does_not_exist(dir);
array_add(*path.words, CODEGEN_FOLDER);
}
case .Compile; {
dir := tprint("%/%", TESTS_FOLDER, COMPILED_FOLDER);
make_directory_if_it_does_not_exist(dir);
array_add(*path.words, COMPILED_FOLDER);
}
}
init_string_builder(*builder, file_without_extension.count + GOLDEN_EXTENSION.count + 1);
append(*builder, file_without_extension[0]);
append(*builder, ".");
append(*builder, GOLDEN_EXTENSION);
golden_path := builder_to_string(*builder);
array_add(*path.words, golden_path);
final_path := path_to_string(path);
return final_path;
}
do_golden_comparison :: (golden_path : string, comparison_text : string, result_data : *Result, output_type : Output_Type) {
if output_type & .Golden {
// Output the comparison file
write_entire_file(golden_path, comparison_text);
result_data.golden_path = copy_string(golden_path);
result_data.type = .Golden_Output;
return;
} else {
// Do the comparison
if !file_exists(golden_path) {
result_data.info_text = tprint("Golden file % does not exist. Please run with -output-as-golden at least once.\n", golden_path);
result_data.type = .Golden_File_Not_Found;
return;
}
golden_text, ok := read_entire_file(golden_path);
if !ok {
result_data.info_text = tprint("Unable to open golden file %\n", golden_path);
result_data.type = .Golden_File_Not_Found;
return;
}
comp := replace(comparison_text, "\r\n", "\n");
gold := replace(golden_text, "\r\n", "\n");
result := compare(comp, gold) == 0;
if !result {
result_data.type = .Failed;
result_data.info_text = tprint("Golden file:\n%\n===============\n%", gold, comp);
} else {
result_data.type = .Passed;
}
}
}
run_codegen_test :: (file_path : string, result : *Compile_Result, output_type : Output_Type = 0) -> Result {
add_file(result, file_path);
result_data : Result;
result_data.path = file_path;
lex(result);
parse(result);
check(result);
if result.had_error {
result_data.type = .Failed;
return result_data;
}
result_data = run_codegen_test(result, output_type);
return result_data;
}
run_codegen_test :: (result : *Compile_Result, output_type : Output_Type = 0) -> Result {
result_data : Result;
result_data.path = result.files[0].file.path;
result_text : string;
codegen(result);
if result.had_error {
result_data.type = .Failed;
result_text = report_messages(result.messages);
return result_data;
}
result_text = result.files[0].codegen_result_text;
if output_type & .StdOut {
result_data.info_text = result_text;
result_data.type = .StdOut;
return result_data;
}
golden_path := get_golden_path(result.files[0].file.path, .Codegen);
do_golden_comparison(golden_path, result_text, *result_data, output_type);
return result_data;
}
run_compile_test :: (path : string, output_type : Output_Type = 0) -> Result, Compile_Result {
compiler : Shader_Compiler;
result : Result;
compilation_result := compile_file(*compiler, path);
print("\n");
if compilation_result.had_error {
result.type = .Failed;
result.info_text = tprint("Failed compiling: %\n", path);
}
return result, compilation_result;
}
run_lexer_test :: (file_path : string, result : *Compile_Result, output_type : Output_Type = 0) -> Result {
result_data : Result;
result_data.path = file_path;
result_data.stage = .Lexer;
result_text : string;
add_file(result, file_path);
lex(result);
if result.had_error {
result_data.type = .Failed;
result_text = report_messages(result.messages);
} else {
result_text = pretty_print_tokens(result.files[0].tokens.tokens, *temp);
}
if output_type & .StdOut {
result_data.info_text = result_text;
result_data.type = .StdOut;
return result_data;
}
golden_path := get_golden_path(file_path, .Lexer);
do_golden_comparison(golden_path, result_text, *result_data, output_type);
return result_data;
}
run_parser_test :: (file_path : string, result : *Compile_Result, output_type : Output_Type = 0) -> Result {
result_data : Result;
result_data.path = file_path;
add_file(result, file_path);
lex(result);
if result.had_error {
result_data.type = .Passed;
return result_data;
}
result_data = run_parser_test(result, output_type);
return result_data;
}
run_parser_test :: (result : *Compile_Result, output_type : Output_Type = 0) -> Result {
parse(result);
result_data : Result;
result_data.path = result.files[0].file.path;
result_text : string;
if result.had_error {
result_data.type = .Failed;
result_text = report_messages(result.messages,, temp);
} else {
result_text = pretty_print_ast(result.files[0].ast_root, *temp);
}
if output_type & .StdOut {
result_data.info_text = result_text;
result_data.type = .StdOut;
return result_data;
}
golden_path := get_golden_path(result.files[0].file.path, .Parser);
do_golden_comparison(golden_path, result_text, *result_data, output_type);
return result_data;
}
run_semantic_analysis_test :: (result : *Compile_Result, output_type : Output_Type = 0) -> Result {
result_data : Result;
result_data.path = result.files[0].file.path;
result_text : string;
check(result);
if result.had_error {
result_data.type = .Failed;
result_text = report_messages(result.messages);
} else {
result_text = pretty_print_symbol_table(result, temp);
}
if output_type & .StdOut {
result_data.info_text = result_text;
result_data.type = .StdOut;
return result_data;
}
golden_path := get_golden_path(result.files[0].file.path, .Semantic_Analysis);
do_golden_comparison(golden_path, result_text, *result_data, output_type);
return result_data;
}
run_semantic_analysis_test :: (file_path : string, result : *Compile_Result, output_type : Output_Type = 0) -> Result {
add_file(result, file_path);
result_data : Result;
result_data.path = file_path;
lex(result);
parse(result);
if result.had_error {
result_data.type = .Failed;
return result_data;
}
result_data = run_semantic_analysis_test(result, output_type);;
return result_data;
}
make_test_case :: (path : string, stage_flags : Stage_Flags, allocator := context.allocator) -> Test_Case {
test_case : Test_Case;
test_case.path = copy_string(path,, allocator);
replace_chars(test_case.path, "\\", #char "/");
test_case.stage_flags = stage_flags;
return test_case;
}
run_test_new :: (file_path : string, stage_flags : Stage_Flags, results : *[..]Result, output_type : Output_Type = 0) {
compile_result : Compile_Result;
result : Result;
if stage_flags & .Lexer {
result = run_lexer_test(file_path, *compile_result, output_type);
record_result(results, result);
}
if stage_flags & .Parser {
if stage_flags & .Lexer && result.type == .Passed || result.type == .Golden_Output {
result = run_parser_test(*compile_result, output_type);
} else {
result = run_parser_test(file_path, *compile_result, output_type);
}
record_result(results, result);
}
if stage_flags & .Semantic_Analysis {
if stage_flags & .Parser && (result.type == .Passed || result.type == .Golden_Output) {
result = run_semantic_analysis_test(*compile_result, output_type);
} else {
result = run_semantic_analysis_test(file_path, *compile_result, output_type);
}
record_result(results, result);
}
if stage_flags & .Codegen {
if stage_flags & .Semantic_Analysis && (result.type == .Passed || result.type == .Golden_Output) {
result = run_codegen_test(*compile_result, output_type);
} else {
result = run_codegen_test(file_path, *compile_result, output_type);
}
record_result(results, result);
}
if stage_flags & .Compile {
result = run_compile_test(file_path, output_type);
}
}
run_test :: (test_case : Test_Case, results : *[..]Result, output_type : Output_Type = 0, builder : *String_Builder) {
print_to_builder(builder, "%Running test: %......", cyan(), test_case.path);
// path 30
// len 35
// == 5
// path 20
// len = 35
// == 15
len := 50;
rest := len - test_case.path.count;
for i: 0..rest {
append(builder, " ");
}
run_test_new(test_case.path, test_case.stage_flags, results, output_type);
// run_test(test_case.path, test_case.stage_flags, results, output_type);
}
record_result :: (results : *[..]Result, result : Result) {
array_add(results, result);
}
run_test_suite :: (using suite : *Test_Suite, output_type : Output_Type = 0) {
if suite.name.count > 0 {
print("%Running suite: %\n", green(), suite.name);
print("%", reset_color());
}
Fail_Data :: struct {
path : string;
stage : string;
}
failed_test_paths : [..]Fail_Data;
failed_test_paths.allocator = temp;
builder : String_Builder;
init_string_builder(*builder,, temp);
for test_case : test_cases {
run_test(test_case, *suite.results, output_type, *builder);
for < suite.results {
result := suite.results[it_index];
if compare(result.path, test_case.path) == 0 {
if result.type == {
case .Failed; {
array_add(*failed_test_paths, .{ result.path, stage_to_string(result.stage) });
}
case .File_Read_Failed; {
array_add(*failed_test_paths, .{ result.path, "file not found" });
}
case .Golden_File_Not_Found; {
array_add(*failed_test_paths, .{ result.path, tprint("golden file not found for %", stage_to_string(result.stage)) });
}
}
evaluate_result(result, *builder);
} else {
break;
}
}
// print("\n");
}
append(*builder, "\n");
if output_type == 0 {
if failed_test_paths.count == 0 {
green(*builder);
print_to_builder(*builder, "All % tests passed!\n", test_cases.count);
reset_color(*builder);
} else {
print_to_builder(*builder, "%/% tests passed\n", test_cases.count - failed_test_paths.count, test_cases.count);
red(*builder);
print_to_builder(*builder, "% failed\n", failed_test_paths.count);
for failed_test : failed_test_paths {
print_to_builder(*builder, "% failed with error: %\n", failed_test.path, failed_test.stage);
}
reset_color(*builder);
}
}
print("%\n", builder_to_string(*builder));
}
read_suite :: (file_path : string, suite : *Test_Suite) -> bool {
bytes, ok := read_entire_file(file_path);
if !ok {
log_error("Unable to read suite file %\n", file_path);
return false;
}
path := parse_path(file_path);
file_without_extension := split(path.words[path.words.count - 1], ".");
suite.name = copy_string(file_without_extension[0]);
split_lines := split(bytes, "\n");
for split_line : split_lines {
line := split(split_line, " ");
if line[0].count == 0 {
continue;
}
if line[0].data[0] == #char "#" {
continue;
}
if line.count == 1 {
line = split(split_line, "\t");
if line.count == 1 {
log_error("Invalid line - % - \n", it_index + 1);
continue;
}
}
test_case_path := line[0];
stage_flags : Stage_Flags;
for i: 0..line.count - 1 {
trimmed := trim(line[i]);
if equal(trimmed, "lex") {
stage_flags |= .Lexer;
} else if equal(trimmed, "parse") {
stage_flags |= .Parser;
} else if equal(trimmed, "semant") {
stage_flags |= .Semantic_Analysis;
} else if equal(trimmed, "codegen") {
stage_flags |= .Codegen;
} else if equal(trimmed, "compile") {
stage_flags |= .Compile;
}
}
test_case := make_test_case(test_case_path, stage_flags);
array_add(*suite.test_cases, test_case);
}
return true;
}
read_test :: () {
}
stage_to_string :: (stage : Stage_Flags) -> string {
if #complete stage == {
case .Lexer; return "lexing";
case .Parser; return "parsing";
case .Semantic_Analysis; return "semantic checking";
case .Codegen; return "codegen";
case .Compile; return "compiled";
case; return "";
}
}
evaluate_result :: (result : Result, builder : *String_Builder) {
stage : string = stage_to_string(result.stage);
if #complete result.type == {
case .File_Read_Failed; {
print_to_builder(builder, " %", red());
print_to_builder(builder, "failed with File_Read_Failed\n");
}
case .Golden_File_Not_Found; {
print_to_builder(builder, " %", red());
print_to_builder(builder, "failed with Golden File Not Found for stage %\n", stage);
}
case .StdOut; {
}
case .Golden_Output; {
print_to_builder(builder, " %", yellow());
print_to_builder(builder, "output new golden file at %\n", result.golden_path);
}
case .Passed; {
print_to_builder(builder, " %", green());
print_to_builder(builder, "passed %\n", stage);
}
case .Failed; {
print_to_builder(builder, " %", red());
print_to_builder(builder, "failed %\n", stage);
}
}
if result.info_text.count > 0 {
print_to_builder(builder, "%", cyan());
print_to_builder(builder, "--- Info text ---\n");
print_to_builder(builder, "%", yellow());
print_to_builder(builder, "%\n", result.info_text);
}
print_to_builder(builder, "%", reset_color());
}
main :: () {
lexer : Lexer;
args := get_command_line_arguments();
suites : [..]Test_Suite;
output_type : Output_Type = 0;
Argument_Parse_State :: enum {
None;
Run_Suite;
Run_Test;
}
arg_parse_state : Argument_Parse_State;
current_suite : *Test_Suite;
path : string;
for i: 1..args.count - 1 {
arg := args[i];
if arg == "-output-as-golden" {
output_type |= .Golden;
continue;
} else if arg == "-output" {
output_type |= .StdOut;
continue;
}
if arg_parse_state == {
case .Run_Suite; {
if arg == "-output-as-golden" {
output_type |= .Golden;
} else if arg == "-output" {
output_type |= .StdOut;
} else {
print("%Unknown argument % %\n", red(), arg, reset_color());
}
}
case .Run_Test; {
cases := current_suite.test_cases.count;
if arg == "-lex" {
current_suite.test_cases[cases - 1].stage_flags |= .Lexer;
} else if arg == "-parse" {
current_suite.test_cases[cases - 1].stage_flags |= .Parser;
} else if arg == "-semant" {
current_suite.test_cases[cases - 1].stage_flags |= .Semantic_Analysis;
} else if arg == "-codegen" {
current_suite.test_cases[cases - 1].stage_flags |= .Codegen;
} else if arg == "-compile" {
current_suite.test_cases[cases - 1].stage_flags |= .Compile;
} else if contains(arg, ".") {
path_split := split(arg, "\\");
split_path := split(path_split[path_split.count - 1], ".");
extension := split_path[1];
if extension == SHADER_EXTENSION {
path := copy_string(arg);
test_case := make_test_case(path, 0);
array_add(*current_suite.test_cases, test_case);
} else {
print("%Invalid file as argument % %\n", red(), arg, reset_color());
}
} else {
print("%Unknown argument % %\n", red(), arg, reset_color());
}
}
case .None; {
if contains(arg, ".") {
path_split := split(arg, "\\");
split_path := split(path_split[path_split.count - 1], ".");
extension := split_path[1];
if extension == SHADER_EXTENSION {
if arg_parse_state == .Run_Suite {
log_error("Unable to run a test while already running suite.");
continue;
}
if !current_suite {
suite : Test_Suite;
array_add(*suites, suite);
current_suite = *suites[0];
}
arg_parse_state = .Run_Test;
path := copy_string(arg);
test_case := make_test_case(path, 0);
array_add(*current_suite.test_cases, test_case);
} else if extension == SUITE_EXTENSION {
if arg_parse_state == .Run_Test {
log_error("Unable to run a suite while already running test.");
continue;
}
arg_parse_state = .Run_Suite;
path := copy_string(arg);
suite : Test_Suite;
read_suite(path, *suite);
array_add(*suites, suite);
current_suite = *suites[0];
} else {
print("%Invalid file as argument % %\n", red(), arg, reset_color());
}
}
}
}
}
for suite : suites {
run_test_suite(*suite, output_type);
}
}

View File

@@ -1,52 +1,95 @@
#import "Basic"; #import "Basic";
#import "File"; #import "File";
#import "Compiler"; #import "Compiler";
#import "Metaprogram_Plugins";
plugins: [..] *Metaprogram_Plugin;
build :: () { build :: () {
w := compiler_create_workspace("Shader Compiler Test Build"); w := compiler_create_workspace("Ink Build");
if !w { if !w {
print("Workspace creation failed.\n"); print("Workspace creation failed.\n");
return; return;
} }
EXECUTABLE_NAME :: "test"; EXECUTABLE_NAME :: "ink";
MAIN_FILE :: "Test.jai"; MAIN_FILE :: "ink.jai";
options := get_build_options(w); options := get_build_options(w);
options.write_added_strings = true;
args := options.compile_time_command_line; args := options.compile_time_command_line;
profile : bool = false;
for arg : args { for arg : args {
if arg == { if arg == {
case "check"; { case "check"; {
options.output_type = .NO_OUTPUT; options.output_type = .NO_OUTPUT;
} }
case "profile"; {
} }
} }
}
intercept_flags: Intercept_Flags;
plugins_to_create: [..] Plugin_To_Create;
if profile {
tracy : Plugin_To_Create;
tracy.name = "tracy";
array_add(*plugins_to_create, tracy);
}
success := init_plugins(plugins_to_create, *plugins, w);
if !success {
log_error("A plugin init() failed. Exiting.\n");
exit(0);
}
new_path: [..] string; new_path: [..] string;
array_add(*new_path, ..options.import_path); array_add(*new_path, ..options.import_path);
array_add(*new_path, "modules"); array_add(*new_path, "modules");
// array_add(*new_path, "modules/shader_parsing");
options.import_path = new_path; options.import_path = new_path;
options.output_executable_name = EXECUTABLE_NAME; options.output_executable_name = EXECUTABLE_NAME;
wd := get_working_directory(); wd := get_working_directory();
set_build_options(options, w); set_build_options(options, w);
compiler_begin_intercept(w); for plugins {
if it.before_intercept it.before_intercept(it, *intercept_flags);
}
compiler_begin_intercept(w, intercept_flags);
for plugins if it.add_source it.add_source(it);
add_build_file(MAIN_FILE, w); add_build_file(MAIN_FILE, w);
// Call message_loop(), which is a routine of ours below that will receive the messages.
message_loop(w);
compiler_end_intercept(w); compiler_end_intercept(w);
for plugins if it.finish it.finish (it);
for plugins if it.shutdown it.shutdown(it);
print("\nDone!\n\n"); print("\nDone!\n\n");
set_build_options_dc(.{do_output=false}); set_build_options_dc(.{do_output=false, write_added_strings=false});
} }
#run build(); message_loop :: (w: Workspace) {
while true {
// We ask the compiler for the next message. If one is not available,
// we will wait until it becomes available.
message := compiler_wait_for_message();
// Pass the message to all plugins.
for plugins if it.message it.message(it, message);
if message.kind == .COMPLETE break;
}
}
#run, stallable build();

View File

@@ -1,8 +1,34 @@
#load "Lexing.jai"; #load "lexing.jai";
#load "Error.jai"; #load "error.jai";
#load "Parsing.jai"; #load "parsing.jai";
#load "Semantic_Analysis.jai"; #load "check.jai";
#load "Codegen.jai"; #load "codegen.jai";
#import "File_Utilities";
/* TODO
- [x] Remove builtin stringbuilding and replace it with ad-hoc string building when error reporting. In that case we are already building a string anyway, so we can just pass in the string builder
- [ ] Support structured buffers (ro, rw, w)
- [ ] Support mesh and amplification shaders
- [ ] Support compute shaders
- [x] Support #if at top level
- [x] Support #if at block level
- [x] Remove properties block and just use hinted constant buffers instead
```
props :: constant_buffer @properties {
[...]
}
```
- [ ] while loops
- [ ] for-each loops
- [ ] add parameters to hints (meta properties, resource binding indices if needed)
- [ ] consider @entry(stage) syntax instead of the forced keyword
- [ ] Add flags to compiler
- [ ] Generate output flag(s)
- [ ] Possibly final stage flag, so you can just call compile_file and it only does what you need.
- Probably this flag is about which stage you need as the _last_ and not which stages to do, as that doesn't make sense.
- [ ] Multiple output languages?
*/
add_define :: (env : *Environment, key : string) { add_define :: (env : *Environment, key : string) {
for define : env.defines { for define : env.defines {
@@ -26,10 +52,6 @@ Environment :: struct {
defines : [..]string; defines : [..]string;
} }
Shader_Compiler :: struct {
environment : Environment;
}
Field_Kind :: enum { Field_Kind :: enum {
Int :: 0; Int :: 0;
Half :: 1; Half :: 1;
@@ -57,10 +79,33 @@ Hint_Kind :: enum {
Position; Position;
UV; UV;
Target; Target;
Output_Position;
Custom; Custom;
} }
Hint_Names :: #run -> [(cast(int)Hint_Kind.Target) + 1]string {
names : [(cast(int)Hint_Kind.Target) + 1]string;
names[Hint_Kind.Position] = "position";
names[Hint_Kind.UV] = "uv";
names[Hint_Kind.Target] = "target";
return names;
}
lookup_hint :: (name : string) -> Hint_Kind {
if name == "position" {
return Hint_Kind.Position;
} else if name == "uv" {
return Hint_Kind.UV;
} else if starts_with(name, "target") {
return Hint_Kind.Target;
} else if name == "outposition" {
return Hint_Kind.Output_Position;
}
return .None;
}
Field_Hint :: struct { Field_Hint :: struct {
kind : Hint_Kind; kind : Hint_Kind;
@@ -83,78 +128,44 @@ Entry_Point :: struct {
return_value : Field; return_value : Field;
} }
Shader_Variant :: struct { Buffer_Kind :: enum {
text : string; Constant;
Structured;
}
vertex_entry_point : struct { Buffer :: struct {
kind : Buffer_Kind;
name : string; name : string;
input : [..]Field; fields : Static_Array(Field, 16);
}
pixel_entry_point : struct { hints : [..]Field_Hint;
name : string;
return_value : Field;
}
}
Property_Field :: struct {
base_field : Field;
// @Incomplete(nb): Editor information, min max, etc.
// This should also be compiled out for ship
}
Properties :: struct {
fields : [..]Property_Field;
buffer_index : u32; buffer_index : u32;
} }
Constant_Buffer :: struct {
register : int;
name : string;
fields : Static_Array(Property_Field, 16);
buffer_index : u32;
}
Shader_Variant_Collection :: struct {
properties : Properties;
max_constant_buffers :: 16;
cbuffers : Static_Array(Constant_Buffer, max_constant_buffers);
variants : [..]Shader_Variant;
}
Input_File :: struct { Input_File :: struct {
source : string; source : string;
path : string; path : string;
} }
Token_Stream :: struct { Compiler_Context :: struct {
tokens : [..]Token;
}
Compiled_File :: struct {
file : Input_File; file : Input_File;
tokens : Token_Stream;
ast_root : *AST_Node; environment : Environment;
ast_nodes : [..]AST_Node;
tokens : [..]Token;;
root : *AST_Node;
nodes : [..]AST_Node;
codegen_result_text : string; codegen_result_text : string;
constant_buffers : Static_Array(Type_Variable_Handle, 16); typed_buffers : Static_Array(Type_Variable_Handle, 32);
// structured_buffers : Static_Array(Type_Variable_Handle, 16);
scope_stack : Scope_Stack; scope_stack : Scope_Stack;
type_variables : [..]Type_Variable; type_variables : [..]Type_Variable;
property_name : string;
vertex_entry_point : struct { vertex_entry_point : struct {
node : *AST_Node; node : *AST_Node;
name : string; name : string;
@@ -167,26 +178,42 @@ Compiled_File :: struct {
return_value : Field; return_value : Field;
} }
properties : Properties; max_buffers :: 32;
max_constant_buffers :: 16; buffers : Static_Array(Buffer, max_buffers);
cbuffers : Static_Array(Constant_Buffer, max_constant_buffers);
allocator : Allocator;
arena : Arena;
}
Compile_Result :: struct {
files : [..]Compiled_File;
had_error : bool; had_error : bool;
messages : [..]Compiler_Message; messages : [..]Compiler_Message;
allocator : Allocator;
arena : Arena;
} }
record_error :: (result : *Compile_Result, format : string, args : .. Any) { #add_context scratch_allocators : [2]Allocator;
#add_context scratch_id : int = 0;
init_context_allocators :: () {
if get_arena(context.scratch_allocators[0]) == null {
context.scratch_allocators[0] = make_arena(Megabytes(128));
context.scratch_allocators[1] = make_arena(Megabytes(128));
}
}
clear_context_allocators :: () {
if get_arena(context.scratch_allocators[0]) != null {
clear(context.scratch_allocators[0]);
clear(context.scratch_allocators[1]);
}
}
get_scratch :: (conflict : Allocator = .{}) -> Scratch {
arena := cast(*Arena)conflict.data;
if arena == get_arena(context.scratch_allocators[0]) || context.scratch_id == 0 {
context.scratch_id = 1;
return scratch_begin(*context.scratch_allocators[1]);
}
context.scratch_id = 0;
return scratch_begin(*context.scratch_allocators[0]);
}
record_error :: (result : *Compiler_Context, format : string, args : .. Any) {
error : Compiler_Message; error : Compiler_Message;
error.message_kind = .Error; error.message_kind = .Error;
error.message = sprint(format, args); error.message = sprint(format, args);
@@ -194,52 +221,30 @@ record_error :: (result : *Compile_Result, format : string, args : .. Any) {
array_add(*result.messages, error); array_add(*result.messages, error);
} }
//@Incomplete(niels): need to consider allocation make_file :: (result : *Compiler_Context, path : string) -> Input_File {
add_file :: (result : *Compile_Result, path : string) { if !file_exists(path) {
record_error(result, "Unable to load file: %", path);
return .{};
}
file_string, ok := read_entire_file(path); file_string, ok := read_entire_file(path);
if !ok { if !ok {
record_error(result, "Unable to load file: %", path); record_error(result, "Unable to load file: %", path);
return; return .{};
} }
return make_file_from_string(file_string, path);
}
make_file_from_string :: (source : string, path : string = "") -> Input_File {
input_file : Input_File; input_file : Input_File;
input_file.source = file_string; input_file.source = source;
input_file.path = path; input_file.path = path;
compiled_file : Compiled_File; return input_file;
compiled_file.file = input_file;
compiled_file.allocator = make_arena(*compiled_file.arena);
array_add(*result.files, compiled_file);
} }
// @Incomplete(nb): Will we ever even use this?
from_file :: (path : string) -> Compile_Result {
arr : [1]string;
arr[0] = path;
return from_files(arr);
}
from_files :: (paths : []string) -> Compile_Result {
result : Compile_Result;
for path : paths {
add_file(*result, path);
}
return result;
}
// Compilation_Result :: struct {
// messages : [..]Compiler_Message;
// had_error : bool;
// collection : Shader_Variant_Collection;
// }
pretty_print_field :: (field : *Field) -> string { pretty_print_field :: (field : *Field) -> string {
builder : String_Builder; builder : String_Builder;
init_string_builder(*builder,, temp); init_string_builder(*builder,, temp);
@@ -254,7 +259,7 @@ Min_Field_Name :: 10;
pretty_print_field :: (builder : *String_Builder, field : *Field) { pretty_print_field :: (builder : *String_Builder, field : *Field) {
if field.name.count > 0 { if field.name.count > 0 {
print_to_builder(builder, "% ", field.name); print_to_builder(builder, "% ", field.name);
append(builder, "- "); append(builder, ": ");
} else { } else {
append(builder, "return - "); append(builder, "return - ");
} }
@@ -283,10 +288,17 @@ pretty_print_field :: (builder : *String_Builder, field : *Field) {
case .Struct; { case .Struct; {
print_to_builder(builder, "struct : % {", type.name); print_to_builder(builder, "struct : % {", type.name);
newline_after := type.children.count / 4;
for *child : type.children { for *child : type.children {
pretty_print_field(builder, child); pretty_print_field(builder, child);
if it_index < type.children.count - 1 { if it_index < type.children.count - 1 {
append(builder, " "); append(builder, ", ");
}
if it_index % newline_after == 0 {
append(builder, "\n");
indent(builder, 4);
} }
} }
@@ -316,11 +328,7 @@ pretty_print_field :: (builder : *String_Builder, field : *Field) {
} }
} }
type_variable_to_field :: (checker : *Semantic_Checker, variable : Type_Variable_Handle) -> Field { type_variable_to_field :: (ctx : *Compiler_Context, variable : *Type_Variable) -> Field {
return type_variable_to_field(checker, from_handle(checker, variable));
}
type_variable_to_field :: (type_variables : []Type_Variable, scope_stack : Scope_Stack, variable : *Type_Variable) -> Field {
field : Field; field : Field;
field.name = variable.name; field.name = variable.name;
@@ -353,14 +361,14 @@ type_variable_to_field :: (type_variables : []Type_Variable, scope_stack : Scope
case .Struct; { case .Struct; {
type.kind = Field_Kind.Struct; type.kind = Field_Kind.Struct;
find_result := find_symbol(scope_stack, variable.typename, xx 1); find_result := find_symbol(ctx.scope_stack, variable.typename, xx 1);
assert(find_result != null, "Internal compiler error\n"); assert(find_result != null, "Internal compiler error\n");
type_var := from_handle(type_variables, find_result.type_variable); type_var := from_handle(ctx.type_variables, find_result.type_variable);
for i : 0..type_var.children.count - 1 { for i : 0..type_var.children.count - 1 {
child := type_var.children[i]; child := type_var.children[i];
child_field := type_variable_to_field(type_variables, scope_stack, child); child_field := type_variable_to_field(ctx, child);
array_add(*type.children, child_field); array_add(*type.children, child_field);
} }
@@ -371,13 +379,11 @@ type_variable_to_field :: (type_variables : []Type_Variable, scope_stack : Scope
for hint : variable.source_node.hint_tokens { for hint : variable.source_node.hint_tokens {
field_hint : Field_Hint; field_hint : Field_Hint;
if hint.ident_value == "position" { if lookup_hint(hint.ident_value) == .Position {
// @Incomplete(nb): Should be a lookup table somewhere
field_hint.kind = .Position; field_hint.kind = .Position;
} else if hint.ident_value == "uv" { } else if lookup_hint(hint.ident_value) == .UV {
field_hint.kind = .UV; field_hint.kind = .UV;
} else if starts_with(hint.ident_value, "target") { } else if lookup_hint(hint.ident_value) == .Target {
// @Incomplete(nb): Should be a lookup table somewhere
index_str : string; index_str : string;
index_str.data = *hint.ident_value.data[7]; index_str.data = *hint.ident_value.data[7];
index_str.count = 1; index_str.count = 1;
@@ -388,7 +394,7 @@ type_variable_to_field :: (type_variables : []Type_Variable, scope_stack : Scope
} }
field_hint.kind = .Target; field_hint.kind = .Target;
} else { } else {
field_hint.custom_hint_name = copy_string(hint.ident_value); field_hint.custom_hint_name = hint.ident_value;
field_hint.kind = .Custom; field_hint.kind = .Custom;
} }
array_add(*field.hints, field_hint); array_add(*field.hints, field_hint);
@@ -399,20 +405,50 @@ type_variable_to_field :: (type_variables : []Type_Variable, scope_stack : Scope
return field; return field;
} }
type_variable_to_field :: (type_variables : []Type_Variable, scope_stack : Scope_Stack, variable : Type_Variable_Handle) -> Field { type_variable_to_field :: (ctx : *Compiler_Context, variable : Type_Variable_Handle) -> Field {
return type_variable_to_field(type_variables, scope_stack, from_handle(type_variables, variable)); return type_variable_to_field(ctx, from_handle(ctx.type_variables, variable));
} }
type_variable_to_field :: (checker : *Semantic_Checker, variable : *Type_Variable) -> Field { generate_buffer :: (ctx : *Compiler_Context, type_handle : Type_Variable_Handle, buffers : *Static_Array) {
return type_variable_to_field(checker.result_file.type_variables, checker.result_file.scope_stack, variable); variable := from_handle(ctx.type_variables, type_handle);
buffer := array_add(buffers);
if variable.type == {
case .CBuffer; {
buffer.kind = .Constant;
}
case .Buffer; {
buffer.kind = .Structured;
}
}
buffer.name = variable.name;
for i : 0..variable.children.count - 1 {
child := variable.children[i];
field : Field = type_variable_to_field(ctx, from_handle(ctx.type_variables, child));
array_add(*buffer.fields, field);
} }
generate_output_data :: (result : *Compile_Result) { buffer.buffer_index = variable.resource_index;
for *file : result.files {
if file.vertex_entry_point.node {
file.vertex_entry_point.name = file.vertex_entry_point.node.name;
type_variable := from_handle(file.type_variables, file.vertex_entry_point.node.type_variable); for hint : variable.source_node.hint_tokens {
field_hint : Field_Hint;
field_hint.custom_hint_name = hint.ident_value;
field_hint.kind = .Custom;
array_add(*buffer.hints, field_hint);
}
}
generate_output_data :: (ctx : *Compiler_Context) {
if ctx.had_error {
return;
}
if ctx.vertex_entry_point.node {
ctx.vertex_entry_point.name = ctx.vertex_entry_point.node.name;
type_variable := from_handle(ctx.type_variables, ctx.vertex_entry_point.node.type_variable);
assert(type_variable.type == .Function); assert(type_variable.type == .Function);
node := type_variable.source_node; node := type_variable.source_node;
@@ -420,58 +456,32 @@ generate_output_data :: (result : *Compile_Result) {
if node.children[0].kind == .FieldList { if node.children[0].kind == .FieldList {
field_list := node.children[0]; field_list := node.children[0];
for child : field_list.children { for child : field_list.children {
tv := from_handle(file.type_variables, child.type_variable); tv := from_handle(ctx.type_variables, child.type_variable);
field := type_variable_to_field(file.type_variables, file.scope_stack, tv); field := type_variable_to_field(ctx, tv);
array_add(*file.vertex_entry_point.input, field); array_add(*ctx.vertex_entry_point.input, field);
} }
} }
} }
} }
for buffer_variable : file.constant_buffers { for buffer_variable : ctx.typed_buffers {
variable := from_handle(file.type_variables, buffer_variable); generate_buffer(ctx, buffer_variable, *ctx.buffers);
cb := array_add(*file.cbuffers);
for i : 0..variable.children.count - 1 {
child := variable.children[i];
field : Property_Field;
field.base_field = type_variable_to_field(file.type_variables, file.scope_stack, from_handle(file.type_variables, child));
array_add(*cb.fields, field);
} }
cb.buffer_index = variable.resource_index; if ctx.pixel_entry_point.node {
} ctx.pixel_entry_point.name = ctx.pixel_entry_point.node.name;
find_result := find_symbol(*file.scope_stack, file.property_name, xx 1); type_variable := from_handle(ctx.type_variables, ctx.pixel_entry_point.node.type_variable);
if find_result {
property_variable := from_handle(file.type_variables, find_result.type_variable);
for i : 0..property_variable.children.count - 1 {
child := property_variable.children[i];
field := type_variable_to_field(file.type_variables, file.scope_stack, from_handle(file.type_variables, child));
prop_field : Property_Field;
prop_field.base_field = field;
array_add(*file.properties.fields, prop_field);
}
file.properties.buffer_index = property_variable.resource_index;
}
if file.pixel_entry_point.node {
file.pixel_entry_point.name = file.pixel_entry_point.node.name;
type_variable := from_handle(file.type_variables, file.pixel_entry_point.node.type_variable);
assert(type_variable.type == .Function); assert(type_variable.type == .Function);
field := type_variable_to_field(file.type_variables, file.scope_stack, type_variable.return_type_variable); if type_variable.return_type_variable > 0 {
field := type_variable_to_field(ctx, type_variable.return_type_variable);
for hint : type_variable.source_node.hint_tokens { for hint : type_variable.source_node.hint_tokens {
field_hint : Field_Hint; field_hint : Field_Hint;
if hint.ident_value == "position" { if lookup_hint(hint.ident_value) == .Position {
// @Incomplete(nb): Should be a lookup table somewhere
field_hint.kind = .Position; field_hint.kind = .Position;
} else if starts_with(hint.ident_value, "target") { } else if lookup_hint(hint.ident_value) == .Target {
// @Incomplete(nb): Should be a lookup table somewhere
index_str : string; index_str : string;
index_str.data = *hint.ident_value.data[7]; index_str.data = *hint.ident_value.data[7];
index_str.count = 1; index_str.count = 1;
@@ -486,26 +496,24 @@ generate_output_data :: (result : *Compile_Result) {
} }
array_add(*field.hints, field_hint); array_add(*field.hints, field_hint);
} }
ctx.pixel_entry_point.return_value = field;
file.pixel_entry_point.return_value = field;
} }
} }
} }
compile_file :: (compiler : *Shader_Compiler, paths : ..string) -> Compile_Result { compile_file :: (ctx : *Compiler_Context, path : string, allocator : Allocator = temp) {
result : Compile_Result; new_context := context;
new_context.allocator = allocator;
push_context new_context {
init_context_allocators();
defer clear_context_allocators();
for path : paths { ctx.file = make_file(ctx, path);
add_file(*result, path);
lex(ctx, allocator);
parse(ctx, allocator);
check(ctx, allocator);
codegen(ctx, allocator);
generate_output_data(ctx);
} }
lex(*result);
parse(*result);
check(*result);
codegen(*result);
generate_output_data(*result);
return result;
} }

1
modules/tracy Submodule

Submodule modules/tracy added at 9668d7b8ab

BIN
output.tracy Normal file

Binary file not shown.

View File

@@ -0,0 +1,4 @@
vertex main :: () {
v : float2;
v.x = (2.0 + ((4.0 - 2.0) * 1.5)) * 3.0;
}

View File

@@ -1,5 +1,6 @@
vertex main :: () -> float4 @position { vertex main :: () -> float4 @position {
arr : [16].float4; arr : [16].float4;
arr[0] = float4(1,1,1); arr[0] = float4(1, 1, 1, 1);
return arr[0]; pos := arr[1];
return pos;
} }

View File

@@ -0,0 +1,5 @@
vertex main :: () {
a : float2;
b : float2;
(a + b).x = 2.0;
}

View File

@@ -0,0 +1,10 @@
P :: struct {
v : float2;
}
vertex main :: () {
p : P;
p.v.x.y = 2.0;
// v : float2;
// v.x.y.z = 2.0;
}

View File

@@ -1,4 +1,4 @@
properties { properties :: Constant_Buffer @properties {
color : float4; color : float4;
} }

11
test/buffers.ink Normal file
View File

@@ -0,0 +1,11 @@
property_buffer :: Buffer {
color : float4;
}
const_buffer :: Constant_Buffer {
color : float4;
}
pixel main :: (index : int) {
return property_buffer[index].color;
}

34
test/builtin_types.ink Normal file
View File

@@ -0,0 +1,34 @@
vertex main :: () {
v2 : float2 = float2(2.0, 2.0);
v2 = float2(2.0);
v2 = float2(v2);
v3 : float3 = float3(2.0, 2.0, 2.0);
v3 = float3(v2, 1.0);
v3 = float3(1.0, v2);
v3 = float3(1.0);
v3 = float3(v3);
v4 : float4 = float4(2.0, 2.0, 2.0, 2.0);
v4 = float4(v4);
v4 = float4(v2, v2);
v4 = float4(v2, 1.0, 1.0);
v4 = float4(1.0, v2, 1.0);
v4 = float4(1.0, 1.0, v2);
v4 = float4(v3, 2.0);
v4 = float4(2.0, v3);
v4 = float4(2.0);
v4 = float4(1.0, 1.0, v2);
v4 = float4(2.0);
v2.x = 2.0;
v2.y = 2.0;
p := v2.x + v3.z;
q := v4.w + v2.x;
m : float4x4;
}

View File

@@ -0,0 +1,6 @@
scope (global) [
[vertex__vs_main] : ()
scope (vertex__vs_main) [
[v] : float2
]
]

7
test/check/arrays.golden Normal file
View File

@@ -0,0 +1,7 @@
scope (global) [
[vertex__vs_main] : () -> float4
scope (vertex__vs_main) [
[pos] : float4
[arr] : [16].float4
]
]

View File

@@ -0,0 +1,6 @@
test/bad_double_access.ink:7,4: error: Attempting to access a field on a primitive type 'float'.
p.v.x.
^
declaration:
x: float


View File

@@ -1,7 +1,7 @@
scope (global) [ scope (global) [
[properties] : {color : float4}
[pixel__ps_main] : () -> float4 [pixel__ps_main] : () -> float4
[vertex__vs_main] : (pos : float3) -> float3 [vertex__vs_main] : (pos : float3) -> float3
[properties] : {color : float4}
scope (properties) [ scope (properties) [
[color] : float4 [color] : float4
] ]

View File

@@ -0,0 +1,11 @@
scope (global) [
[vertex__vs_main] : ()
scope (vertex__vs_main) [
[v2] : float2
[v4] : float4
[v3] : float3
[p] : float
[m] : float4x4
[q] : float
]
]

View File

@@ -1,7 +1,7 @@
scope (global) [ scope (global) [
[camera] : {projection : float4x4, view : float4x4}
[pixel__ps_main] : () -> float4 [pixel__ps_main] : () -> float4
[vertex__vs_main] : (pos : float4) -> float4 [vertex__vs_main] : (pos : float4) -> float4
[camera] : {projection : float4x4, view : float4x4}
scope (camera) [ scope (camera) [
[projection] : float4x4 [projection] : float4x4
[view] : float4x4 [view] : float4x4

View File

@@ -0,0 +1,10 @@
scope (global) [
[vertex__vs_main] : ()
[p] : {v : float2}
scope (p) [
[v] : float2
]
scope (vertex__vs_main) [
[x] : float
]
]

View File

@@ -0,0 +1,10 @@
scope (global) [
[vertex__vs_main] : ()
scope (vertex__vs_main) [
[x] : int
scope (block) [
[i] : int
scope (block) []
]
]
]

View File

@@ -0,0 +1,4 @@
test/for_index_outside.ink:6,0: error: Use of undeclared symbol 'i'
 i += 1;
^


View File

@@ -0,0 +1,13 @@
scope (global) [
[vertex__vs_main] : (pos : float4) -> float4
[props] : {projection : float4x4, view : float4x4}
scope (props) [
[projection] : float4x4
[view] : float4x4
]
scope (vertex__vs_main) [
[pos] : float4
[mv] : float4
[mvp] : float4
]
]

View File

@@ -0,0 +1,8 @@
scope (global) [
[pixel__ps_main] : ()
scope (pixel__ps_main) [ scope (block) [
[alpha_color] : float4
[f] : float
]
]
]

View File

@@ -0,0 +1,4 @@
scope (global) [
[vertex__vs_console_main] : ()
scope (vertex__vs_console_main) []
]

8
test/check/ifdefs.golden Normal file
View File

@@ -0,0 +1,8 @@
scope (global) [
[vertex__vs_skinning_main] : ()
[pixel__ps_main] : ()
scope (vertex__vs_skinning_main) [
[x] : float
]
scope (pixel__ps_main) []
]

View File

@@ -2,5 +2,8 @@ scope (global) [
[vertex__vs_main] : (pos : float3) -> float4 [vertex__vs_main] : (pos : float3) -> float4
scope (vertex__vs_main) [ scope (vertex__vs_main) [
[pos] : float3 [pos] : float3
scope (block) [ scope (block) []
scope (block) []
]
] ]
] ]

View File

@@ -1,7 +1,7 @@
scope (global) [ scope (global) [
[props] : {color : float4}
[pixel__ps_main] : () -> float4 [pixel__ps_main] : () -> float4
[vertex__vs_main] : (pos : float4) -> float4 [vertex__vs_main] : (pos : float4) -> float4
[props] : {color : float4}
scope (props) [ scope (props) [
[color] : float4 [color] : float4
] ]

View File

@@ -0,0 +1,8 @@
scope (global) [
[vertex__vs_main] : ()
scope (vertex__vs_main) [
[b] : float2
[x] : float
[a] : float2
]
]

View File

@@ -2,5 +2,8 @@ scope (global) [
[vertex__vs_main] : (pos : float3) -> float4 [vertex__vs_main] : (pos : float3) -> float4
scope (vertex__vs_main) [ scope (vertex__vs_main) [
[pos] : float3 [pos] : float3
scope (block) []
scope (block) []
scope (block) []
] ]
] ]

View File

@@ -2,5 +2,6 @@ scope (global) [
[vertex__vs_main] : (pos : float3) -> float4 [vertex__vs_main] : (pos : float3) -> float4
scope (vertex__vs_main) [ scope (vertex__vs_main) [
[pos] : float3 [pos] : float3
scope (block) []
] ]
] ]

View File

@@ -2,5 +2,7 @@ scope (global) [
[vertex__vs_main] : (pos : float3) -> float4 [vertex__vs_main] : (pos : float3) -> float4
scope (vertex__vs_main) [ scope (vertex__vs_main) [
[pos] : float3 [pos] : float3
scope (block) []
scope (block) []
] ]
] ]

View File

@@ -0,0 +1,6 @@
test/temp_access.ink:5,10: error: Cannot assign to an lvalue.
 (a + b).x = 2.0;
^^^^^^^^^^^


View File

@@ -0,0 +1,30 @@
test/wrong_multiply.ink:4,18: error: Procedure call did not match any of the possible overloads for 'float4'
 found:
result : float4 = float4(1.0, foo * res, 0.0, 1.0);
^^^^^^
 While matching argument 2 in function call.
 result : float4 = float4(1.0, foo * res, 0.0, 1.0);
^
 Possible overloads:
 float4 :: (float, float, float, float)
 float4 :: (float2, float2)
 float4 :: (float2, float, float)
 float4 :: (float, float2, float)
 float4 :: (float, float, float2)
 float4 :: (float, float3)
 float4 :: (float3, float)
 float4 :: (float4)
 float4 :: (float)
test/wrong_multiply.ink:4,34: error: Type mismatch. Expected float got float2
 found:
result : float4 = float4(1.0, foo * res, 0.0, 1.0);
^
expected:
float
got:
result : float4 = float4(1.0, foo * res, 0.0, 1.0);


View File

@@ -0,0 +1,30 @@
test/wrong_type_for_function.ink:11,17: error: Procedure call did not match any of the possible overloads for 'float4'
 found:
color : float4 = float4(y, 1.0, 1.0, 1.0);
^^^^^^
 While matching argument 1 in function call.
 color : float4 = float4(y, 1.0, 1.0, 1.0);
^
 Possible overloads:
 float4 :: (float, float, float, float)
 float4 :: (float2, float2)
 float4 :: (float2, float, float)
 float4 :: (float, float2, float)
 float4 :: (float, float, float2)
 float4 :: (float, float3)
 float4 :: (float3, float)
 float4 :: (float4)
 float4 :: (float)
test/wrong_type_for_function.ink:11,24: error: Type mismatch. Expected float got float2
 found:
color : float4 = float4(y, 1.0, 1.0, 1.0);
^
expected:
float
got:
y : float2 = foo()


47
test/check_all.suite Normal file
View File

@@ -0,0 +1,47 @@
test/assign_arithmetic_expression.ink check
test/arithmetic_parens.ink check
test/basic_property_and_return_value.ink check
test/builtin_types.ink check
test/complicated_computation.ink check
test/constant_buffer.ink check
test/bad_double_access.ink check
test/double_access.ink check
test/empty_struct.ink check
test/empty_vertex_main.ink check
test/empty_vertex_main_with_position_parameter.ink check
test/field_assignment.ink check
test/for_i_loop.ink check
test/function_call.ink check
test/function_call_out_of_order_declaration.ink check
test/function_call_return.ink check
test/functions_with_same_name.ink check
test/function_with_int_return.ink check
test/if_cond_assign.ink check
test/ifdefs.ink check
test/if_def_block.ink check
test/if_def_expression.ink check
test/inferred_types.ink check
test/multiple_functions.ink check
test/multiple_semicolons_everywhere.ink check
test/nested_if.ink check
test/non_bool_cond.ink check
test/pass_and_access_struct_fields_in_functions.ink check
test/passthrough.ink check
test/redeclared_variable.ink check
test/rvalue_binary.ink check
test/simple_else_if.ink check
test/simple_if_else.ink check
test/simple_if.ink check
test/simple_struct_access.ink check
test/struct_access_primitive_type.ink check
test/struct_within_struct.ink check
test/temp_access.ink check
test/type_as_variable_name.ink check
test/unary.ink check
test/undeclared_function.ink check
test/undeclared_symbol.ink check
test/unknown_overload.ink check
test/use_builtin_functions.ink check
test/wrong_argument_count.ink check
test/wrong_multiply.ink check
test/wrong_type_for_function.ink check

View File

@@ -0,0 +1,6 @@
void vs_main()
{
float2 v;
v.x = (2.0f + ((4.0f - 2.0f) * 1.5f)) * 3.0f;
}

View File

@@ -0,0 +1,6 @@
float4 vs_main() : SV_POSITION
{
float4 arr[16];
return arr[0];
}

View File

@@ -1,5 +1,5 @@
void vs_main() void vs_main()
{ {
float x = (2.0f + 5.0f); float x = 2.0f + 5.0f;
} }

View File

@@ -1,9 +1,8 @@
cbuffer __PROPERTIES : register(b0) cbuffer properties : register(b0)
{ {
float4 __PROPERTIES__color; float4 color;
} }
float3 vs_main(float3 pos : POSITION) : SV_POSITION float3 vs_main(float3 pos : POSITION) : SV_POSITION
{ {
return pos; return pos;
@@ -11,6 +10,6 @@ float3 vs_main(float3 pos : POSITION) : SV_POSITION
float4 ps_main() : SV_TARGET float4 ps_main() : SV_TARGET
{ {
return __PROPERTIES__color; return properties.color;
} }

View File

@@ -0,0 +1,28 @@
void vs_main()
{
float2 v2 = float2(2.0f, 2.0f);
v2 = float2(2.0f, 2.0f);
v2 = float2(v2, v2);
float3 v3 = float3(2.0f, 2.0f, 2.0f);
v3 = float3(v2, 1.0f);
v3 = float3(1.0f, v2);
v3 = float3(1.0f, 1.0f, 1.0f);
v3 = float3(v3, v3, v3);
float4 v4 = float4(2.0f, 2.0f, 2.0f, 2.0f);
v4 = float4(v4, v4, v4, v4);
v4 = float4(v2, v2);
v4 = float4(v2, 1.0f, 1.0f);
v4 = float4(1.0f, v2, 1.0f);
v4 = float4(1.0f, 1.0f, v2);
v4 = float4(v3, 2.0f);
v4 = float4(2.0f, v3);
v4 = float4(2.0f, 2.0f, 2.0f, 2.0f);
v4 = float4(1.0f, 1.0f, v2);
v4 = float4(2.0f, 2.0f, 2.0f, 2.0f);
v2.x = 2.0f;
v2.y = 2.0f;
float p = v2.x + v3.z;
float q = v4.w + v2.x;
float4x4 m;
}

View File

@@ -2,6 +2,6 @@ void vs_main()
{ {
float x = 5.0f; float x = 5.0f;
float y = 3000.0f; float y = 3000.0f;
float z = ((y * y) + x); float z = (y * y) + x;
} }

View File

@@ -0,0 +1,13 @@
cbuffer props : register(b0)
{
float4x4 projection;
float4x4 view;
}
float4 vs_main(float4 pos : POSITION) : SV_POSITION
{
float4 mv = mul(props.view, pos);
float4 mvp = mul(props.projection, mv);
return mvp;
}

View File

@@ -0,0 +1,7 @@
void ps_main()
{
float4 alpha_color = float4(1, 0, 0, 1);
float f = 2.0f;
}

View File

@@ -0,0 +1,4 @@
void vs_console_main()
{
}

View File

@@ -0,0 +1,9 @@
void ps_main()
{
}
void vs_skinning_main()
{
float x = 5.0f;
}

View File

@@ -8,7 +8,7 @@ int foo()
float bar() float bar()
{ {
return (1235.0f * 500); return 1235.0f * 500;
} }
void vs_main() void vs_main()

View File

@@ -7,7 +7,7 @@ struct Foo
float foo(Foo f) float foo(Foo f)
{ {
return (f.some_data * 2.0f); return f.some_data * 2.0f;
} }
void vs_main() void vs_main()

View File

@@ -0,0 +1,7 @@
void vs_main()
{
float2 a;
float2 b;
float x = (a + b).x;
}

View File

@@ -1,5 +1,7 @@
test/assign_arithmetic_expression.ink codegen test/assign_arithmetic_expression.ink codegen
test/arithmetic_parens.ink codegen
test/basic_property_and_return_value.ink codegen test/basic_property_and_return_value.ink codegen
test/builtin_types.ink codegen
test/complicated_computation.ink codegen test/complicated_computation.ink codegen
test/constant_buffer.ink codegen test/constant_buffer.ink codegen
test/empty_struct.ink codegen test/empty_struct.ink codegen
@@ -9,14 +11,16 @@ test/field_assignment.ink codegen
test/function_call.ink codegen test/function_call.ink codegen
test/function_call_out_of_order_declaration.ink codegen test/function_call_out_of_order_declaration.ink codegen
test/function_call_return.ink codegen test/function_call_return.ink codegen
test/ifdefs.ink codegen
test/if_def_block.ink codegen
test/if_def_expression.ink codegen
test/inferred_types.ink codegen test/inferred_types.ink codegen
test/meta_block.ink codegen
test/multiple_functions.ink codegen test/multiple_functions.ink codegen
test/multiple_semicolons_everywhere.ink codegen test/multiple_semicolons_everywhere.ink codegen
test/nested_if.ink codegen test/nested_if.ink codegen
test/pass_and_access_struct_fields_in_functions.ink codegen test/pass_and_access_struct_fields_in_functions.ink codegen
test/passthrough.ink codegen test/passthrough.ink codegen
test/property_rename.ink codegen test/rvalue_binary.ink codegen
test/simple_else_if.ink codegen test/simple_else_if.ink codegen
test/simple_if_else.ink codegen test/simple_if_else.ink codegen
test/simple_if.ink codegen test/simple_if.ink codegen

View File

@@ -1,5 +1,7 @@
test/assign_arithmetic_expression.ink compile test/assign_arithmetic_expression.ink compile
test/arithmetic_parens.ink compile
test/basic_property_and_return_value.ink compile test/basic_property_and_return_value.ink compile
test/builtin_types.ink compile
test/complicated_computation.ink compile test/complicated_computation.ink compile
test/empty_struct.ink compile test/empty_struct.ink compile
test/empty_vertex_main.ink compile test/empty_vertex_main.ink compile
@@ -10,12 +12,15 @@ test/function_call.ink compile
test/function_call_out_of_order_declaration.ink compile test/function_call_out_of_order_declaration.ink compile
test/function_call_return.ink compile test/function_call_return.ink compile
test/functions_with_same_name.ink compile test/functions_with_same_name.ink compile
test/ifdefs.ink compile
test/if_def_block.ink compile
test/if_def_expression.ink compile
test/inferred_types.ink compile test/inferred_types.ink compile
test/meta_block.ink compile
test/multiple_functions.ink compile test/multiple_functions.ink compile
test/multiple_semicolons_everywhere.ink compile test/multiple_semicolons_everywhere.ink compile
test/pass_and_access_struct_fields_in_functions.ink compile test/pass_and_access_struct_fields_in_functions.ink compile
test/passthrough.ink compile test/passthrough.ink compile
test/rvalue_binary.ink compile
test/simple_else_if.ink compile test/simple_else_if.ink compile
test/simple_if_else.ink compile test/simple_if_else.ink compile
test/simple_if.ink compile test/simple_if.ink compile

View File

@@ -0,0 +1 @@
[vertex entry point] - vs_main

View File

@@ -0,0 +1 @@
[vertex entry point] - vs_main

Some files were not shown because too many files have changed in this diff Show More