Compare commits

14 Commits

22 changed files with 420 additions and 106 deletions

2
.gitmodules vendored
View File

@@ -1,3 +1,3 @@
[submodule "modules/nbrutil"]
path = modules/ncore
url = git@git.nbross.com:nielsbross/nbrutil.git
url = git@git.nbross.com:nielsbross/NCore.git

View File

@@ -283,7 +283,6 @@ pretty_print_variable :: (node : *AST_Node, indentation : int, builder : *String
print_to_builder(builder, "%", node.name);
for child : node.children {
print("%\n", child.kind);
if child.kind == .Variable {
append(builder, ".");
pretty_print_variable(child, indentation, builder);

View File

@@ -1,3 +1,12 @@
/////////////////////////////////////
//~ nbr:
//
/////////////////////////////////////
//~ nbr: Codegen TODOs
//
// [ ] Prefix output of property values with __PROPERTIES so we don't get name clashes
Output_Language :: enum {
HLSL;
GLSL; // @Incomplete
@@ -41,13 +50,57 @@ indent :: (state : *Codegen_State, indentation : int) {
for 1..indentation append(*state.builder, " ");
}
dx11_type_to_string :: (type_variable : Type_Variable) -> string {
if type_variable.type == {
case .Invalid;
return "{{invalid}}";
case .Unit;
return "()";
case .Int; {
return "int";
}
case .Half; {
return "half";
}
case .Float; {
return "float";
}
case .Double; {
return "double";
}
case .Sampler; {
return "SamplerState";
}
case .Texture2D; {
return "Texture2D";
}
case .Function; #through;
case .Struct; {
return type_variable.typename;
}
case .Array;
return "array";
}
return "";
}
emit_field :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
find_result := find_symbol(state.scope_stack, node.name, state.current_scope);
field := h2tv(state.type_variables, find_result.type_variable);
indent(state, indentation);
print_to_builder(*state.builder, "% ", type_to_string(field));
print_to_builder(*state.builder, "% ", dx11_type_to_string(field));
if field.struct_field_parent {
parent_tv := h2tv(state.type_variables, field.struct_field_parent.type_variable);
if parent_tv.typename == "properties" {
append(*state.builder, "__PROPERTIES__");
}
}
print_to_builder(*state.builder, "%", node.name);
if field.type == .Sampler {
@@ -65,7 +118,7 @@ emit_field :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
emit_node(state, child, 0);
}
for i :0..field.child_count - 1 {
for i :0..field.children.count - 1 {
child := h2tv(state.type_variables, field.children[i]);
emit_node(state, child.source_node, 0);
}
@@ -74,6 +127,10 @@ emit_field :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
if hint.ident_value == "position" {
// @Incomplete(nb): Should be a lookup table somewhere
append(*state.builder, " : POSITION");
} else if hint.ident_value == "uv" {
append(*state.builder, " : TEXCOORD0");
} else if hint.ident_value == "outposition" {
append(*state.builder, " : SV_POSITION");
}
}
}
@@ -97,7 +154,7 @@ emit_call :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
emit_node(state, args.children[0], 0);
append(*state.builder, ".");
print_to_builder(*state.builder, "%(", node.name);
print_to_builder(*state.builder, "Sample(");
for i : 1..args.children.count - 1 {
child := args.children[i];
@@ -199,7 +256,7 @@ emit_function :: (state : *Codegen_State, node : *AST_Node, indentation : int, e
if function_variable.return_type_variable {
return_variable := h2tv(state.type_variables, function_variable.return_type_variable);
print_to_builder(*state.builder, "% ", type_to_string(return_variable));
print_to_builder(*state.builder, "% ", dx11_type_to_string(return_variable));
} else {
append(*state.builder, "void ");
}
@@ -326,6 +383,13 @@ emit_node :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
is_properties := type_var.typename == "properties";
if !is_properties {
if type_var.struct_field_parent {
parent_tv := h2tv(state.type_variables, type_var.struct_field_parent.type_variable);
if parent_tv.typename == "properties" {
append(*state.builder, "__PROPERTIES__");
}
}
print_to_builder(*state.builder, "%", node.name);
}
@@ -338,6 +402,11 @@ emit_node :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
}
case .Binary; {
indent(*state.builder, indentation);
if node.token.kind != .TOKEN_ASSIGN {
append(*state.builder, "(");
}
lhs := node.children[0];
rhs := node.children[1];
emit_node(state, lhs, 0);
@@ -346,6 +415,9 @@ emit_node :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
emit_operator(state, node.token.kind);
append(*state.builder, " ");
emit_node(state, rhs, 0);
if node.token.kind != .TOKEN_ASSIGN {
append(*state.builder, ")");
}
}
case .Unary; {
assert(false, "Not implemented yet: unary");
@@ -390,7 +462,7 @@ emit_struct :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
emit_field_list(state, field_list, indentation);
append(*state.builder, "}\n\n");
append(*state.builder, "};\n\n");
state.current_scope = current_scope;
}
@@ -434,20 +506,20 @@ emit_declaration :: (state : *Codegen_State, node : *AST_Node) {
codegen :: (state : *Codegen_State) -> Codegen_Result {
found_function : bool = false;
found_struct : bool = false;
// found_struct : bool = false;
for variable : state.type_variables {
if variable.type == .Struct && variable.kind == .Declaration && !variable.builtin {
if variable.source_node.kind == .Properties continue;
if variable.source_node.kind == .Meta continue;
print_to_builder(*state.builder, "struct %;\n", variable.source_node.name);
found_struct = true;
}
}
// for variable : state.type_variables {
// if variable.type == .Struct && variable.kind == .Declaration && !variable.builtin {
// if variable.source_node.kind == .Properties continue;
// if variable.source_node.kind == .Meta continue;
// print_to_builder(*state.builder, "struct %;\n", variable.source_node.name);
// found_struct = true;
// }
// }
if found_struct {
append(*state.builder, "\n");
}
// if found_struct {
// append(*state.builder, "\n");
// }
for variable : state.type_variables {
if variable.type == .Function && !variable.builtin

View File

@@ -500,6 +500,25 @@ scan_next_token :: (lexer : *Lexer) -> *Token {
}
lex :: (result : *Compile_Result) {
if result.had_error {
return;
}
for file : result.files {
lexer : Lexer;
init_lexer_from_string(*lexer, file.file.source);
token : *Token = scan_next_token(*lexer);
while token && token.kind != .TOKEN_EOF {
token = scan_next_token(*lexer);
}
// @Incomplete(nb): Temporary until we figure out a good way of passing this stuff around
copy_messages(lexer.result.messages, *result.messages);
}
}
lex :: (lexer : *Lexer, allocator : Allocator = context.allocator) -> Lexing_Result {
lexer.result.tokens.allocator = allocator;
token : *Token = scan_next_token(lexer);

View File

@@ -14,7 +14,12 @@ Parse_State :: struct {
tokens : [..]Token;
current_token_index : int;
allocator : Allocator;
node_allocator : Allocator;
node_arena : Arena;
child_allocator : Allocator;
child_arena : Arena;
had_error : bool;
@@ -119,11 +124,12 @@ parse_rules :: #run -> [(cast(int)Token_Kind.TOKEN_ERROR) + 1]Parse_Rule {
return rules;
}
init_parse_state :: (parse_state : *Parse_State, tokens : [..]Token, path : string, allocator : Allocator) {
init_parse_state :: (parse_state : *Parse_State, tokens : [..]Token, path : string) {
parse_state.tokens = tokens;
parse_state.path = path;
parse_state.allocator = allocator;
parse_state.result.nodes.allocator = parse_state.allocator;
parse_state.node_allocator = make_arena(*parse_state.node_arena);
parse_state.child_allocator = make_arena(*parse_state.child_arena);
parse_state.result.nodes.allocator = parse_state.node_allocator;
array_reserve(*parse_state.result.nodes, 4096);
parse_state.current_token_index = 0;
}
@@ -274,6 +280,7 @@ make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node {
node : AST_Node;
node.kind = kind;
node.children.allocator = parse_state.child_allocator;
array_add(*parse_state.result.nodes, node);
return *parse_state.result.nodes[parse_state.result.nodes.count - 1];
@@ -751,6 +758,7 @@ statement :: (parse_state : *Parse_State) -> *AST_Node {
block :: (parse_state : *Parse_State) -> *AST_Node {
node : *AST_Node = make_node(parse_state, .Block);
array_reserve(*node.children, 1024);
source_location : Source_Range;
@@ -1051,6 +1059,33 @@ declaration :: (parse_state : *Parse_State) -> *AST_Node {
return decl_node;
}
parse :: (result : *Compile_Result) {
for *file : result.files {
parse_state : Parse_State;
init_parse_state(*parse_state, file.tokens.tokens, file.file.path);
advance(*parse_state);
if !match(*parse_state, .TOKEN_EOF) {
parse_state.result.root = make_node(*parse_state, .Program);
array_reserve(*parse_state.result.root.children, 1024);
program := parse_state.result.root;
while !check(*parse_state, .TOKEN_EOF) {
decl := declaration(*parse_state);
if decl {
add_child(program, decl);
}
}
}
//@Incomplete(nb): will this straight copy just work?
// Might need to rething how we do this.
file.ast_root = parse_state.result.root;
file.ast_nodes = parse_state.result.nodes;
copy_messages(parse_state.result.messages, *result.messages);
}
}
parse :: (parse_state : *Parse_State) -> Parse_Result {
advance(parse_state);

View File

@@ -5,7 +5,8 @@
/////////////////////////////////////
//~ nbr: Error reporting TODOs
//
// [x] Improve error reporting on mismatched overloads when types don't match, but arity does
// [ ] Add and error for using keywords as names, or rename the dx11 keywords in the resulting hlsl shader.
// [x] Improve error reporting on mismatched overloads when types don't match, but arity does.
// [x] Improve error reporting for type mismatches in general. It seems like the expect node is not always correct.
VERTEX_MAIN_FUNCTION_PREFIX :: "vertex";
@@ -70,9 +71,8 @@ Type_Variable :: struct {
typename : string;
is_array : bool;
MAX_TYPE_VARIABLE_CHILDREN :: 16;
children : [MAX_TYPE_VARIABLE_CHILDREN]Type_Variable_Handle;
child_count : int;
MAX_TYPE_VARIABLE_CHILDREN :: 32;
children : Static_Array(Type_Variable_Handle, MAX_TYPE_VARIABLE_CHILDREN);
//@Note(niels): For constant buffers
resource_index : u32;
@@ -126,6 +126,7 @@ Type_Constraint :: struct {
Scope_Stack :: struct {
allocator : Allocator;
arena : Arena;
stack : [..]Scope;
}
@@ -166,8 +167,10 @@ Scope :: struct {
Scope_Handle :: #type, distinct u32;
Semantic_Check_Result :: struct {
messages : [..]Compiler_Message;
had_error : bool;
messages : [..]Compiler_Message;
message_arena : Arena;
message_allocator : Allocator;
had_error : bool;
vertex_entry_point : *AST_Node;
pixel_entry_point : *AST_Node;
@@ -176,6 +179,8 @@ Semantic_Check_Result :: struct {
scope_stack : Scope_Stack;
type_variables : [..]Type_Variable;
type_var_arena : Arena;
type_var_allocator : Allocator;
property_name : string;
}
@@ -356,10 +361,9 @@ no_matching_overload_found :: (checker : *Semantic_Checker, call : *AST_Node, ov
func_var := h2tv(checker, func.type_variable);
if arg_list.children.count != func_var.child_count {
print_to_builder(*builder, "Not enough arguments: Wanted %, got %.\n\n", func_var.child_count, arg_list.children.count);
if arg_list.children.count != func_var.children.count {
print_to_builder(*builder, "Not enough arguments: Wanted %, got %.\n\n", func_var.children.count, arg_list.children.count);
}
}
}
@@ -538,7 +542,7 @@ type_mismatch :: (checker : *Semantic_Checker, usage_site : *AST_Node, expect_no
if got_var.builtin {
print_to_builder(*builder, "% :: (", got_var.name);
for i: 0..got_var.child_count - 1{
for i: 0..got_var.children.count - 1{
child_handle := got_var.children[i];
child := h2tv(checker, child_handle);
@@ -600,7 +604,7 @@ record_error :: (checker : *Semantic_Checker, error_string : string, locations :
error.message = error_string;
checker.result.had_error = true;
array_add(*checker.result.messages, error);
array_add(*checker.result.messages, error);
}
is_proper :: (var : Type_Variable) -> bool {
@@ -636,6 +640,8 @@ push_scope :: (checker : *Semantic_Checker, name := "", kind : Scope_Kind = .Glo
scope.builtin = true;
}
scope.children.allocator = checker.result.scope_stack.allocator;
if checker.current_scope {
scope := get_current_scope(checker);
array_add(*scope.children, xx count);
@@ -709,16 +715,16 @@ new_type_variable :: (checker : *Semantic_Checker) -> *Type_Variable, Type_Varia
}
add_child :: (variable : *Type_Variable, child : Type_Variable_Handle) {
assert(variable.child_count < Type_Variable.MAX_TYPE_VARIABLE_CHILDREN);
variable.children[variable.child_count] = child;
variable.child_count += 1;
assert(variable.children.count < Type_Variable.MAX_TYPE_VARIABLE_CHILDREN);
array_add(*variable.children, child);
// variable.children[variable.children.count] = child;
// variable.children.count += 1;
}
add_child :: (checker : *Semantic_Checker, handle : Type_Variable_Handle, child : Type_Variable_Handle) {
variable := h2tv(checker, handle);
assert(variable.child_count < Type_Variable.MAX_TYPE_VARIABLE_CHILDREN);
variable.children[variable.child_count] = child;
variable.child_count += 1;
assert(variable.children.count < Type_Variable.MAX_TYPE_VARIABLE_CHILDREN);
array_add(*variable.children, child);
}
init_semantic_checker :: (checker : *Semantic_Checker, root : *AST_Node, path : string) {
@@ -726,7 +732,10 @@ init_semantic_checker :: (checker : *Semantic_Checker, root : *AST_Node, path :
checker.path = path;
// @Incomplete(niels): Use other allocator and/or add static array with convenience functions
checker.result.type_var_allocator = make_arena(*checker.result.type_var_arena);
array_reserve(*checker.result.type_variables, 2048);
checker.result.scope_stack.allocator = make_arena(*checker.result.scope_stack.arena);
array_reserve(*checker.result.scope_stack.stack, 256);
global_scope, global_handle := push_scope(checker, kind = .Global);
@@ -1018,7 +1027,7 @@ declare_function :: (checker : *Semantic_Checker, node : *AST_Node, builtin : bo
}
all_same : bool = true;
for i : 0..func_var.child_count - 1 {
for i : 0..func_var.children.count - 1 {
arg := func_var.children[i];
node_child := field_list.children[i];
@@ -1118,7 +1127,7 @@ create_function_constraint :: (checker : *Semantic_Checker, node : *AST_Node) {
constraint.kind = .Function_Decl;
constraint.function.symbol_variable = function.type_variable;
for i : 0..variable.child_count - 1 {
for i : 0..variable.children.count - 1 {
arg_var := variable.children[i];
if arg_var > 0 {
@@ -1260,7 +1269,6 @@ create_field :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variable
size_var := check_node(checker, size_node);
if h2tv(checker, size_var).type != .Int {
//@Incomplete(niels): Type mismatch here. With integral type required message.
print("Shiet\n");
}
}
@@ -1369,11 +1377,11 @@ create_call_constraint :: (checker : *Semantic_Checker, node : *AST_Node, type_v
function := h2tv(checker, func.type_variable);
if arg_count != function.child_count {
if arg_count != function.children.count {
continue;
}
if node.children.count == 0 && function.child_count == 0 {
if node.children.count == 0 && function.children.count == 0 {
overload_found = true;
break;
}
@@ -1383,7 +1391,7 @@ create_call_constraint :: (checker : *Semantic_Checker, node : *AST_Node, type_v
for arg : arg_vars {
function_param := function.children[it_index];
if !types_compatible(checker, arg.var, function_param) {
if !types_compatible(checker, arg.var, function_param, true) {
if all_args_match {
arg_node = arg.node;
}
@@ -1446,6 +1454,7 @@ check_node :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variable_H
variable, handle := new_type_variable(checker);
lhs_type := h2tv(checker, lhs_var);
variable.type = lhs_type.type;
variable.typename = lhs_type.typename;
variable.scope = lhs_type.scope;
variable.source_node = node;
node.type_variable = handle;
@@ -1460,7 +1469,9 @@ check_node :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variable_H
create_equivalence_constraint(checker, rhs_var, lhs_var, node);
proper_variable, rhs_handle := new_type_variable(checker);
proper_variable.type = h2tv(checker, lhs_var).type;
lhs_type_var := h2tv(checker, lhs_var);
proper_variable.type = lhs_type_var.type;
proper_variable.typename = lhs_type_var.typename;
proper_variable.source_node = h2tv(checker, lhs_var).source_node;
proper_variable.struct_field_parent = h2tv(checker, lhs_var).struct_field_parent;
@@ -1560,7 +1571,7 @@ Unification_Result :: enum {
Unification_Failure;
}
types_compatible :: (checker : *Semantic_Checker, lhs : Type_Variable_Handle, rhs : Type_Variable_Handle) -> bool {
types_compatible :: (checker : *Semantic_Checker, lhs : Type_Variable_Handle, rhs : Type_Variable_Handle, param_matching : bool = false) -> bool {
lhs_var := h2tv(checker, lhs);
rhs_var := h2tv(checker, rhs);
@@ -1569,6 +1580,17 @@ types_compatible :: (checker : *Semantic_Checker, lhs : Type_Variable_Handle, rh
case .Half; #through;
case .Float; #through;
case .Double; {
if !param_matching {
if rhs_var.type == .Struct {
if rhs_var.typename == {
case "float2"; #through;
case "float3"; #through;
case "float4"; {
return true;
}
}
}
}
return rhs_var.type == .Int || rhs_var.type == .Half ||
rhs_var.type == .Float || rhs_var.type == .Double;
}
@@ -1584,6 +1606,16 @@ types_compatible :: (checker : *Semantic_Checker, lhs : Type_Variable_Handle, rh
lhs_node := lhs_var.source_node;
rhs_node := rhs_var.source_node;
if rhs_var.type != .Struct {
if lhs_var.typename == {
case "float2"; #through;
case "float3"; #through;
case "float4"; {
return rhs_var.type == .Int || rhs_var.type == .Half || rhs_var.type == .Double || rhs_var.type == .Float;
}
}
}
lhs_struct := find_symbol(checker, lhs_var.typename, xx 1);
rhs_struct := find_symbol(checker, rhs_var.typename, xx 1);
@@ -1598,14 +1630,16 @@ types_compatible :: (checker : *Semantic_Checker, lhs : Type_Variable_Handle, rh
lhs_struct_var := h2tv(checker, lhs_struct.type_variable);
rhs_struct_var := h2tv(checker, rhs_struct.type_variable);
if lhs_struct_var.child_count != rhs_struct_var.child_count {
if lhs_struct_var.children.count != rhs_struct_var.children.count {
return false;
}
for i : 0..lhs_struct_var.child_count - 1 {
for i : 0..lhs_struct_var.children.count - 1 {
lhs_child := lhs_struct_var.children[i];
rhs_child := rhs_struct_var.children[i];
if !types_compatible(checker, lhs_child, rhs_child) return false;
if !types_compatible(checker, lhs_child, rhs_child) {
return false;
}
}
return true;
@@ -1738,7 +1772,7 @@ add_hlsl_builtins :: (checker : *Semantic_Checker) {
}
parse_state : Parse_State;
init_parse_state(*parse_state, lex_result.tokens, lexer.path, context.allocator);
init_parse_state(*parse_state, lex_result.tokens, lexer.path);
parse_result := parse(*parse_state);
if parse_result.had_error {
@@ -1802,6 +1836,7 @@ check :: (checker : *Semantic_Checker) -> Semantic_Check_Result {
// ===========================================================
// Pretty printing
#scope_file
type_to_string :: (type_variable : Type_Variable) -> string {
if type_variable.type == {
@@ -1818,14 +1853,18 @@ type_to_string :: (type_variable : Type_Variable) -> string {
return Typenames[type_variable.type];
}
case .Function; #through;
case .Struct;
return type_variable.typename;
case .Struct; {
return type_variable.typename;
}
case .Array;
return "array";
}
return "";
}
#scope_export
print_key :: (checker : *Semantic_Checker, builder : *String_Builder, name : string) {
scope := get_current_scope(checker);
target_length := scope.longest_key_length + 1;
@@ -1877,14 +1916,14 @@ pretty_print_struct :: (checker : *Semantic_Checker, builder : *String_Builder,
print_key(checker, builder, name);
append(builder, "{");
for 0..struct_type.child_count - 1 {
for 0..struct_type.children.count - 1 {
child_handle := struct_type.children[it];
child := h2tv(checker, child_handle);
print_to_builder(builder, child.name);
append(builder, " : ");
print_to_builder(builder, type_to_string(child));
if it < struct_type.child_count - 1 {
if it < struct_type.children.count - 1 {
append(builder, ", ");
}
}
@@ -2148,14 +2187,14 @@ pretty_print_type_variable :: (checker : *Semantic_Checker, type_variable : *Typ
if type_variable.kind == .Declaration {
append(builder, "{");
for 0..type_variable.child_count - 1 {
for 0..type_variable.children.count - 1 {
child_handle := type_variable.children[it];
child := h2tv(checker, child_handle);
print_to_builder(builder, child.name);
append(builder, " : ");
print_to_builder(builder, type_to_string(child));
if it < type_variable.child_count - 1 {
if it < type_variable.children.count - 1 {
append(builder, ", ");
}
}

View File

@@ -216,7 +216,7 @@ run_parser_test :: (lexer : *Lexer, output_type : Output_Type = 0) -> Result, *A
result_data : Result;
result_data.path = lexer.path;
result_data.stage = .Parser;
init_parse_state(*parse_state, lexer.result.tokens, lexer.path, context.allocator);
init_parse_state(*parse_state, lexer.result.tokens, lexer.path);
result := parse(*parse_state);
result_node : *AST_Node;
@@ -272,7 +272,7 @@ run_semantic_analysis_test :: (file_path : string, output_type : Output_Type = 0
parse_state : Parse_State;
result_data.stage = .Parser;
init_parse_state(*parse_state, lex_result.tokens, lexer.path, context.allocator);
init_parse_state(*parse_state, lex_result.tokens, lexer.path);
parse_result := parse(*parse_state);
if parse_result.had_error {
@@ -310,8 +310,8 @@ run_semantic_analysis_test :: (file_path : string, root : *AST_Node, output_type
result_text = pretty_print_symbol_table(*checker, temp);
constraints := pretty_print_type_constraints(*checker, temp);
type_vars := pretty_print_type_variables(*checker, temp);
print("Constraints\n%\n", constraints);
print("Solution\n%\n", type_vars);
// print("Constraints\n%\n", constraints);
// print("Solution\n%\n", type_vars);
}
if output_type & .StdOut {
@@ -398,7 +398,7 @@ run_codegen_test :: (path : string, output_type : Output_Type = 0) -> Result, Co
parse_state : Parse_State;
result_data.stage = .Parser;
init_parse_state(*parse_state, lex_result.tokens, lexer.path, context.allocator);
init_parse_state(*parse_state, lex_result.tokens, lexer.path);
parse_result := parse(*parse_state);
if parse_result.had_error {
@@ -477,7 +477,23 @@ run_test :: (file_path : string, stage_flags : Stage_Flags, results : *[..]Resul
}
run_test :: (test_case : Test_Case, results : *[..]Result, output_type : Output_Type = 0) {
print("%Running test: %\n", cyan(), test_case.path);
print("%Running test: %......", cyan(), test_case.path);
// path 30
// len 35
// == 5
// path 20
// len = 35
// == 15
len := 50;
rest := len - test_case.path.count;
for i: 0..rest {
print(" ");
}
run_test(test_case.path, test_case.stage_flags, results, output_type);
}
@@ -525,7 +541,7 @@ run_test_suite :: (using suite : *Test_Suite, output_type : Output_Type = 0) {
}
}
print("\n");
// print("\n");
}
print("\n");
@@ -620,26 +636,26 @@ evaluate_result :: (result : Result) {
if #complete result.type == {
case .File_Read_Failed; {
print("%", red());
print("% failed with File_Read_Failed\n", result.path);
print(" %", red());
print("failed with File_Read_Failed\n");
}
case .Golden_File_Not_Found; {
print("%", red());
print("% failed with Golden File Not Found for stage %\n", result.path, stage);
print(" %", red());
print("failed with Golden File Not Found for stage %\n", stage);
}
case .StdOut; {
}
case .Golden_Output; {
print("%", yellow());
print("% output new golden file at %\n", result.path, result.golden_path);
print(" %", yellow());
print("output new golden file at %\n", result.golden_path);
}
case .Passed; {
print("%", green());
print("% passed %\n", result.path, stage);
print(" %", green());
print("passed %\n", stage);
}
case .Failed; {
print("%", red());
print("% failed %\n", result.path, stage);
print(" %", red());
print("failed %\n", stage);
}
}

View File

@@ -259,4 +259,4 @@ int4x4 :: struct {
#foreign atan2 :: (float4, float4) -> float4;
#foreign atan2 :: (float4x4, float4x4) -> float4x4;
#foreign sample :: (Texture2D, float2, Sampler) -> float4;
#foreign sample :: (Texture2D, Sampler, float2) -> float4;

View File

@@ -56,6 +56,7 @@ Hint_Kind :: enum {
None;
Position;
UV;
Target;
Custom;
@@ -131,6 +132,68 @@ Shader_Variant_Collection :: struct {
variants : [..]Shader_Variant;
}
Input_File :: struct {
source : string;
path : string;
}
Token_Stream :: struct {
tokens : [..]Token;
}
Compiled_File :: struct {
file : Input_File;
tokens : Token_Stream;
ast_root : *AST_Node;
ast_nodes : [..]AST_Node;
}
Compile_Result :: struct {
files : [..]Compiled_File;
had_error : bool;
messages : [..]Compiler_Message;
allocator : Allocator;
arena : Arena;
}
//@Incomplete(niels): need to consider allocation
add_file :: (result : *Compile_Result, path : string) {
file_string, ok := read_entire_file(path);
if !ok {
// record_error(.File_Load_Failed, "Unable to load file: %", path);
return;
}
input_file : Input_File;
input_file.source = file_string;
input_file.path = path;
compiled_file : Compiled_File;
compiled_file.file = input_file;
array_add(*result.files, compiled_file);
}
// @Incomplete(nb): Will we ever even use this?
from_file :: (path : string) -> Compile_Result {
arr : [1]string;
arr[0] = path;
return from_files(arr);
}
from_files :: (paths : []string) -> Compile_Result {
result : Compile_Result;
for path : paths {
add_file(*result, path);
}
return result;
}
Compilation_Result :: struct {
messages : [..]Compiler_Message;
@@ -257,7 +320,7 @@ type_variable_to_field :: (checker : *Semantic_Checker, variable : *Type_Variabl
type_var := h2tv(checker, find_result.type_variable);
for i : 0..type_var.child_count - 1 {
for i : 0..type_var.children.count - 1 {
child := type_var.children[i];
child_field := type_variable_to_field(checker, h2tv(checker, child));
array_add(*type.children, child_field);
@@ -273,6 +336,8 @@ type_variable_to_field :: (checker : *Semantic_Checker, variable : *Type_Variabl
if hint.ident_value == "position" {
// @Incomplete(nb): Should be a lookup table somewhere
field_hint.kind = .Position;
} else if hint.ident_value == "uv" {
field_hint.kind = .UV;
} else if starts_with(hint.ident_value, "target") {
// @Incomplete(nb): Should be a lookup table somewhere
index_str : string;
@@ -295,6 +360,21 @@ type_variable_to_field :: (checker : *Semantic_Checker, variable : *Type_Variabl
return field;
}
compile_file :: (compiler : *Shader_Compiler, paths : []string) -> Compile_Result {
result : Compile_Result;
for path : paths {
add_file(*result, path);
}
lex(*result);
// parse(*result);
// check(*result);
// codegen(*result);
return result;
}
compile_file :: (compiler : *Shader_Compiler, path : string) -> Compilation_Result {
result : Compilation_Result;
@@ -315,7 +395,7 @@ compile_file :: (compiler : *Shader_Compiler, path : string) -> Compilation_Resu
}
parse_state : Parse_State;
init_parse_state(*parse_state, lex_result.tokens, lexer.path, context.allocator);
init_parse_state(*parse_state, lex_result.tokens, lexer.path);
parse_result := parse(*parse_state);
if parse_result.had_error {
@@ -376,7 +456,7 @@ compile_file :: (compiler : *Shader_Compiler, path : string) -> Compilation_Resu
cb := array_add(*result.collection.cbuffers);
for i : 0..variable.child_count - 1 {
for i : 0..variable.children.count - 1 {
child := variable.children[i];
field : Property_Field;
field.base_field = type_variable_to_field(*checker, h2tv(*checker, child));;
@@ -390,7 +470,7 @@ compile_file :: (compiler : *Shader_Compiler, path : string) -> Compilation_Resu
if find_result {
property_variable := h2tv(check_result.type_variables, find_result.type_variable);
for i : 0..property_variable.child_count - 1 {
for i : 0..property_variable.children.count - 1 {
child := property_variable.children[i];
field := type_variable_to_field(*checker, h2tv(*checker, child));
prop_field : Property_Field;

View File

@@ -1,5 +1,5 @@
void vs_main()
{
float x = 2.0f + 5.0f;
float x = (2.0f + 5.0f);
}

View File

@@ -1,8 +1,9 @@
cbuffer __PROPERTIES : register(b0)
{
float4 color;
float4 __PROPERTIES__color;
}
float3 vs_main(float3 pos : POSITION) : SV_POSITION
{
return pos;
@@ -10,6 +11,6 @@ float3 vs_main(float3 pos : POSITION) : SV_POSITION
float4 ps_main() : SV_TARGET
{
return color;
return __PROPERTIES__color;
}

View File

@@ -2,6 +2,6 @@ void vs_main()
{
float x = 5.0f;
float y = 3000.0f;
float z = y * y + x;
float z = ((y * y) + x);
}

View File

@@ -1,4 +1,2 @@
struct Foo;
struct Foo {}
struct Foo {};

View File

@@ -1,15 +1,16 @@
cbuffer __PROPERTIES : register(b0)
{
float4 color;
float4 __PROPERTIES__color;
}
float3 vs_main(float3 pos : POSITION, float2 uv) : SV_POSITION
float3 vs_main(float3 pos : POSITION, float2 uv : TEXCOORD0) : SV_POSITION
{
return pos;
}
float4 ps_main() : SV_TARGET
{
return color;
return __PROPERTIES__color;
}

View File

@@ -8,7 +8,7 @@ int foo()
float bar()
{
return 1235.0f * 500;
return (1235.0f * 500);
}
void vs_main()

View File

@@ -1,15 +1,13 @@
struct Foo;
float foo(Foo f);
struct Foo
{
float some_data;
}
};
float foo(Foo f)
{
return f.some_data * 2.0f;
return (f.some_data * 2.0f);
}
void vs_main()

View File

@@ -1,9 +1,7 @@
struct Data;
struct Data
{
float4 color;
}
};
void vs_main()
{

View File

@@ -1,15 +1,12 @@
struct Foo;
struct Bar;
struct Foo
{
float4 color;
}
};
struct Bar
{
Foo t;
}
};
void vs_main()
{

7
test/double_access.shd Normal file
View File

@@ -0,0 +1,7 @@
p :: properties {
v : float2;
}
vertex main ::() {
x : float = p.v.x / p.v.y;
}

42
test/large_block.shd Normal file
View File

@@ -0,0 +1,42 @@
p :: properties {
color : float4;
rect_position : float2;
rect_scale : float2;
resolution : float2;
texture : Texture2D;
sampler : Sampler;
}
PS_Input :: struct {
uv : float2 @uv;
pos : float4 @pos;
}
vertex main :: (pos : float4 @position) -> PS_Input {
res : float2 = p.resolution;
scale : float2 = p.rect_scale;
rect_pos : float2 = p.rect_position;;
center : float2 = rect_pos;
half_size : float2 = float2(scale.x / 2, scale.y / 2);
dst_pos : float4 = float4(pos.x * half_size.x + center.x, pos.y * half_size.y + center.y, 0.0, 1.0);
result : PS_Input;
src_p0 : float2 = float2(0.0, 1.0);
src_p1 : float2 = float2(1.0, 0.0);
src_half_size : float2 = (src_p1 - src_p0) / 2;
src_center : float2 = (src_p1 + src_p0) / 2;
src_pos : float2 = float2(pos.x, pos.y) * src_half_size + src_center;
result.uv = float2(1, 1);
result.pos = float4(2.0 * dst_pos.x / res.x - 1, 2.0 * dst_pos.y / res.y - 1, 0.0, 1.0);
return result;
}
pixel main :: (input : PS_Input) -> float4 @target0 {
color : float4 = p.color;
return color;
}

View File

@@ -6,8 +6,6 @@ test/empty_struct.shd semant
test/empty_vertex_main.shd semant
test/empty_vertex_main_with_position_parameter.shd semant
test/field_assignment.shd semant
test/field_without_type_specifier.shd semant
test/float_suffix.shd semant
test/function_call.shd semant
test/function_call_out_of_order_declaration.shd semant
test/function_call_return.shd semant

View File

@@ -0,0 +1,14 @@
vertex main :: (pos : float4 @position) -> float4 @position {
src_p0 : float2 = float2(0.0, 1.0);
src_p1 : float2 = float2(1.0, 0.0);
src_half_size : float2 = (src_p1 - src_p0) / 2;
src_center : float2 = (src_p1 + src_p0) / 2;
src_pos : float2 = float2(pos.x, pos.y) * src_half_size + src_center;
return float4(1, 1, 1, 1);
}
pixel main :: () -> float4 @target0 {
return float4(1, 1, 1, 1);
}