Compare commits

22 Commits

Author SHA1 Message Date
90fb1a035e Add Properties prefix. Fix semantic check regression. 2024-09-26 16:47:34 +02:00
e365067354 Changed type var children to static array. 2024-09-25 13:01:56 +02:00
243d83663a Simplified shader test output 2024-09-20 13:00:17 +02:00
3f93e1a92d Add double access shader test 2024-09-20 12:48:47 +02:00
fca325b761 Deleted some stuff 2024-09-12 07:18:27 +02:00
d3aa4fffeb Quick fix for binary expr codegen. Add uv hint 2024-09-11 13:21:03 +02:00
6eba51cc8c Fix position output hint. 2024-09-11 12:42:59 +02:00
5b237d34de Merge branch 'dev' of git.nbross.com:nielsbross/Shader-Compiler into dev 2024-09-11 07:25:21 +02:00
ff668b6c95 Fix type checker not promoting ints. Fix some issues in the codegen. 2024-09-11 07:24:50 +02:00
c84516d39f Fix type checker not promoting ints. Fix some issues in the codegen. 2024-09-11 07:24:10 +02:00
517209c886 A bunch of allocator stuff 2024-09-10 07:21:27 +02:00
d01fca146c Opened pandoras box 2024-09-09 22:32:36 +02:00
d9dfcc6354 Beginning of better API 2024-09-02 12:34:48 +02:00
c8cd15456d Update ncore reference 2024-09-02 12:34:40 +02:00
c225217676 NCore update 2024-08-12 07:17:41 +02:00
b2ee560145 Fix build.bat file. 2024-07-18 23:13:01 +02:00
b475357cf9 Fix ncore import 2024-07-17 16:24:33 +02:00
76994b2567 Rename nbrutil to ncore 2024-07-17 16:21:09 +02:00
0471dbe7d7 Add nbrutil to modules. Move static array to nbrutil 2024-07-16 14:31:54 +02:00
c5758bd023 Add array parsing. Missing rest of pipeline. 2024-07-13 11:46:14 +02:00
ae54f1374e Merge branch 'dev' of git.nbross.com:nielsbross/Shader-Compiler into dev 2024-07-02 22:52:06 +02:00
22b70f88b6 Fix builtin parsing 2024-07-02 22:51:55 +02:00
31 changed files with 647 additions and 162 deletions

3
.gitmodules vendored Normal file
View File

@@ -0,0 +1,3 @@
[submodule "modules/nbrutil"]
path = modules/ncore
url = git@git.nbross.com:nielsbross/NCore.git

19
AST.jai
View File

@@ -54,6 +54,8 @@ AST_Node :: struct {
token : Token;
array_field : bool;
source_location : Source_Range;
type_variable : Type_Variable_Handle;
@@ -105,7 +107,15 @@ pretty_print_field :: (node : *AST_Node, indentation : int, builder : *String_Bu
print_to_builder(builder, tprint("(:= %", node.name));
if node.kind != .Unnamed_Field && node.token.ident_value.count > 0 {
print_to_builder(builder, tprint(" %", node.token.ident_value));
if node.array_field {
append(builder, " [");
pretty_print_node(node.children[0], 0, builder);
append(builder, "].");
print_to_builder(builder, "%", node.token.ident_value);
} else {
print_to_builder(builder, " %", node.token.ident_value);
}
}
for hint : node.hint_tokens {
@@ -114,7 +124,7 @@ pretty_print_field :: (node : *AST_Node, indentation : int, builder : *String_Bu
}
}
if node.children.count > 0 {
if !node.array_field && node.children.count > 0 {
append(builder, " ");
pretty_print_children(node, indentation, builder);
}
@@ -271,10 +281,15 @@ pretty_print_node :: (node : *AST_Node, indentation : int, builder : *String_Bui
pretty_print_variable :: (node : *AST_Node, indentation : int, builder : *String_Builder) {
indent(builder, indentation);
print_to_builder(builder, "%", node.name);
for child : node.children {
if child.kind == .Variable {
append(builder, ".");
pretty_print_variable(child, indentation, builder);
} else if child.kind == .Unary {
append(builder, "[");
pretty_print_node(child.children[0], 0, builder);
append(builder, "]");
}
}
}

View File

@@ -1,3 +1,12 @@
/////////////////////////////////////
//~ nbr:
//
/////////////////////////////////////
//~ nbr: Codegen TODOs
//
// [ ] Prefix output of property values with __PROPERTIES so we don't get name clashes
Output_Language :: enum {
HLSL;
GLSL; // @Incomplete
@@ -41,13 +50,57 @@ indent :: (state : *Codegen_State, indentation : int) {
for 1..indentation append(*state.builder, " ");
}
dx11_type_to_string :: (type_variable : Type_Variable) -> string {
if type_variable.type == {
case .Invalid;
return "{{invalid}}";
case .Unit;
return "()";
case .Int; {
return "int";
}
case .Half; {
return "half";
}
case .Float; {
return "float";
}
case .Double; {
return "double";
}
case .Sampler; {
return "SamplerState";
}
case .Texture2D; {
return "Texture2D";
}
case .Function; #through;
case .Struct; {
return type_variable.typename;
}
case .Array;
return "array";
}
return "";
}
emit_field :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
find_result := find_symbol(state.scope_stack, node.name, state.current_scope);
field := h2tv(state.type_variables, find_result.type_variable);
indent(state, indentation);
print_to_builder(*state.builder, "% ", type_to_string(field));
print_to_builder(*state.builder, "% ", dx11_type_to_string(field));
if field.struct_field_parent {
parent_tv := h2tv(state.type_variables, field.struct_field_parent.type_variable);
if parent_tv.typename == "properties" {
append(*state.builder, "__PROPERTIES__");
}
}
print_to_builder(*state.builder, "%", node.name);
if field.type == .Sampler {
@@ -65,7 +118,7 @@ emit_field :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
emit_node(state, child, 0);
}
for i :0..field.child_count - 1 {
for i :0..field.children.count - 1 {
child := h2tv(state.type_variables, field.children[i]);
emit_node(state, child.source_node, 0);
}
@@ -74,6 +127,10 @@ emit_field :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
if hint.ident_value == "position" {
// @Incomplete(nb): Should be a lookup table somewhere
append(*state.builder, " : POSITION");
} else if hint.ident_value == "uv" {
append(*state.builder, " : TEXCOORD0");
} else if hint.ident_value == "outposition" {
append(*state.builder, " : SV_POSITION");
}
}
}
@@ -97,7 +154,7 @@ emit_call :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
emit_node(state, args.children[0], 0);
append(*state.builder, ".");
print_to_builder(*state.builder, "%(", node.name);
print_to_builder(*state.builder, "Sample(");
for i : 1..args.children.count - 1 {
child := args.children[i];
@@ -199,7 +256,7 @@ emit_function :: (state : *Codegen_State, node : *AST_Node, indentation : int, e
if function_variable.return_type_variable {
return_variable := h2tv(state.type_variables, function_variable.return_type_variable);
print_to_builder(*state.builder, "% ", type_to_string(return_variable));
print_to_builder(*state.builder, "% ", dx11_type_to_string(return_variable));
} else {
append(*state.builder, "void ");
}
@@ -326,6 +383,13 @@ emit_node :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
is_properties := type_var.typename == "properties";
if !is_properties {
if type_var.struct_field_parent {
parent_tv := h2tv(state.type_variables, type_var.struct_field_parent.type_variable);
if parent_tv.typename == "properties" {
append(*state.builder, "__PROPERTIES__");
}
}
print_to_builder(*state.builder, "%", node.name);
}
@@ -338,6 +402,11 @@ emit_node :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
}
case .Binary; {
indent(*state.builder, indentation);
if node.token.kind != .TOKEN_ASSIGN {
append(*state.builder, "(");
}
lhs := node.children[0];
rhs := node.children[1];
emit_node(state, lhs, 0);
@@ -346,6 +415,9 @@ emit_node :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
emit_operator(state, node.token.kind);
append(*state.builder, " ");
emit_node(state, rhs, 0);
if node.token.kind != .TOKEN_ASSIGN {
append(*state.builder, ")");
}
}
case .Unary; {
assert(false, "Not implemented yet: unary");
@@ -390,7 +462,7 @@ emit_struct :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
emit_field_list(state, field_list, indentation);
append(*state.builder, "}\n\n");
append(*state.builder, "};\n\n");
state.current_scope = current_scope;
}
@@ -434,20 +506,20 @@ emit_declaration :: (state : *Codegen_State, node : *AST_Node) {
codegen :: (state : *Codegen_State) -> Codegen_Result {
found_function : bool = false;
found_struct : bool = false;
// found_struct : bool = false;
for variable : state.type_variables {
if variable.type == .Struct && variable.kind == .Declaration && !variable.builtin {
if variable.source_node.kind == .Properties continue;
if variable.source_node.kind == .Meta continue;
print_to_builder(*state.builder, "struct %;\n", variable.source_node.name);
found_struct = true;
}
}
// for variable : state.type_variables {
// if variable.type == .Struct && variable.kind == .Declaration && !variable.builtin {
// if variable.source_node.kind == .Properties continue;
// if variable.source_node.kind == .Meta continue;
// print_to_builder(*state.builder, "struct %;\n", variable.source_node.name);
// found_struct = true;
// }
// }
if found_struct {
append(*state.builder, "\n");
}
// if found_struct {
// append(*state.builder, "\n");
// }
for variable : state.type_variables {
if variable.type == .Function && !variable.builtin
@@ -477,3 +549,6 @@ codegen :: (ast_root : *AST_Node, checker_result : Semantic_Check_Result, output
init_codegen_state(*codegen_state, ast_root, checker_result, output_language);
return codegen(*codegen_state);
}
#scope_module
#import "ncore";

View File

@@ -122,7 +122,10 @@ report_message :: (builder : *String_Builder, path : string, message : string, s
} else {
append(builder, "internal:");
}
if source_locations.count > 0 {
print_to_builder(builder, "%,%: ", source_locations[0].main_token.line, source_locations[0].main_token.column);
}
if kind == .Log {
append(builder, "\x1b[31mlog: ");

View File

@@ -500,6 +500,25 @@ scan_next_token :: (lexer : *Lexer) -> *Token {
}
lex :: (result : *Compile_Result) {
if result.had_error {
return;
}
for file : result.files {
lexer : Lexer;
init_lexer_from_string(*lexer, file.file.source);
token : *Token = scan_next_token(*lexer);
while token && token.kind != .TOKEN_EOF {
token = scan_next_token(*lexer);
}
// @Incomplete(nb): Temporary until we figure out a good way of passing this stuff around
copy_messages(lexer.result.messages, *result.messages);
}
}
lex :: (lexer : *Lexer, allocator : Allocator = context.allocator) -> Lexing_Result {
lexer.result.tokens.allocator = allocator;
token : *Token = scan_next_token(lexer);
@@ -700,3 +719,4 @@ print_from_source_location :: (source_location : Source_Range, allocator := cont
#import "Basic";
#import "File";

View File

@@ -14,7 +14,12 @@ Parse_State :: struct {
tokens : [..]Token;
current_token_index : int;
allocator : Allocator;
node_allocator : Allocator;
node_arena : Arena;
child_allocator : Allocator;
child_arena : Arena;
had_error : bool;
@@ -84,6 +89,8 @@ parse_rules :: #run -> [(cast(int)Token_Kind.TOKEN_ERROR) + 1]Parse_Rule {
rules[Token_Kind.TOKEN_RIGHTPAREN] = .{null, null, .PREC_NONE};
rules[Token_Kind.TOKEN_LEFTBRACE] = .{null, null, .PREC_NONE};
rules[Token_Kind.TOKEN_RIGHTBRACE] = .{null, null, .PREC_NONE};
rules[Token_Kind.TOKEN_LEFTBRACKET] = .{null, array_access, .PREC_CALL};
rules[Token_Kind.TOKEN_RIGHTBRACKET] = .{null, null, .PREC_NONE};
rules[Token_Kind.TOKEN_COMMA] = .{null, null, .PREC_NONE};
rules[Token_Kind.TOKEN_DOT] = .{null, dot, .PREC_CALL};
rules[Token_Kind.TOKEN_PROPERTIES] = .{named_variable, null, .PREC_CALL};
@@ -117,11 +124,12 @@ parse_rules :: #run -> [(cast(int)Token_Kind.TOKEN_ERROR) + 1]Parse_Rule {
return rules;
}
init_parse_state :: (parse_state : *Parse_State, tokens : [..]Token, path : string, allocator : Allocator) {
init_parse_state :: (parse_state : *Parse_State, tokens : [..]Token, path : string) {
parse_state.tokens = tokens;
parse_state.path = path;
parse_state.allocator = allocator;
parse_state.result.nodes.allocator = parse_state.allocator;
parse_state.node_allocator = make_arena(*parse_state.node_arena);
parse_state.child_allocator = make_arena(*parse_state.child_arena);
parse_state.result.nodes.allocator = parse_state.node_allocator;
array_reserve(*parse_state.result.nodes, 4096);
parse_state.current_token_index = 0;
}
@@ -272,6 +280,7 @@ make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node {
node : AST_Node;
node.kind = kind;
node.children.allocator = parse_state.child_allocator;
array_add(*parse_state.result.nodes, node);
return *parse_state.result.nodes[parse_state.result.nodes.count - 1];
@@ -429,6 +438,36 @@ binary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
return binary_expression;
}
array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
identifier := parse_state.tokens[parse_state.current_token_index - 3];
left_bracket := parse_state.tokens[parse_state.current_token_index - 2];
array_access := make_node(parse_state, .Unary);
array_access.token = left_bracket;
array_index := expression(parse_state);
add_child(array_access, array_index);
add_child(left, array_access);
consume(parse_state, .TOKEN_RIGHTBRACKET, "Expected ']' after array index.");
source_location : Source_Range;
source_location.begin = left.source_location.begin;
if check(parse_state, .TOKEN_ASSIGN) {
advance(parse_state);
node := make_node(parse_state, .Binary);
node.token = parse_state.previous;
add_child(node, left);
add_child(node, expression(parse_state));
return node;
}
source_location.end = parse_state.previous;
return left;
}
unary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
op := parse_state.previous.*;
rule := get_rule(op.kind);
@@ -441,6 +480,10 @@ unary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
case .TOKEN_MINUS; {
unary_expression.token = op;
}
case .TOKEN_LEFTBRACKET; {
unary_expression.token = op;
consume(parse_state, .TOKEN_RIGHTBRACKET, "Expect ']' after array access.");
}
}
return unary_expression;
@@ -588,6 +631,17 @@ field_declaration :: (parse_state : *Parse_State, identifier_token : *Token) ->
type_identifier := parse_state.current;
node.token = type_identifier;
advance(parse_state);
} else if check(parse_state, .TOKEN_LEFTBRACKET) {
advance(parse_state);
array_size_expression := expression(parse_state);
add_child(node, array_size_expression);
consume(parse_state, .TOKEN_RIGHTBRACKET, "Expected closing ']' in array declaration.");
consume(parse_state, .TOKEN_DOT, "Expected '.' before array type.");
type_identifier := parse_state.current;
node.token = type_identifier;
advance(parse_state);
node.array_field = true;
} else {
missing_type_specifier(parse_state, identifier_token, "Expected type specifier after field name.");
return node;
@@ -704,6 +758,7 @@ statement :: (parse_state : *Parse_State) -> *AST_Node {
block :: (parse_state : *Parse_State) -> *AST_Node {
node : *AST_Node = make_node(parse_state, .Block);
array_reserve(*node.children, 1024);
source_location : Source_Range;
@@ -1004,6 +1059,33 @@ declaration :: (parse_state : *Parse_State) -> *AST_Node {
return decl_node;
}
parse :: (result : *Compile_Result) {
for *file : result.files {
parse_state : Parse_State;
init_parse_state(*parse_state, file.tokens.tokens, file.file.path);
advance(*parse_state);
if !match(*parse_state, .TOKEN_EOF) {
parse_state.result.root = make_node(*parse_state, .Program);
array_reserve(*parse_state.result.root.children, 1024);
program := parse_state.result.root;
while !check(*parse_state, .TOKEN_EOF) {
decl := declaration(*parse_state);
if decl {
add_child(program, decl);
}
}
}
//@Incomplete(nb): will this straight copy just work?
// Might need to rething how we do this.
file.ast_root = parse_state.result.root;
file.ast_nodes = parse_state.result.nodes;
copy_messages(parse_state.result.messages, *result.messages);
}
}
parse :: (parse_state : *Parse_State) -> Parse_Result {
advance(parse_state);

View File

@@ -5,12 +5,10 @@
/////////////////////////////////////
//~ nbr: Error reporting TODOs
//
// [x] Improve error reporting on mismatched overloads when types don't match, but arity does
// [ ] Add and error for using keywords as names, or rename the dx11 keywords in the resulting hlsl shader.
// [x] Improve error reporting on mismatched overloads when types don't match, but arity does.
// [x] Improve error reporting for type mismatches in general. It seems like the expect node is not always correct.
#load "static_array.jai";
#import "Hash_Table";
VERTEX_MAIN_FUNCTION_PREFIX :: "vertex";
PIXEL_MAIN_FUNCTION_PREFIX :: "pixel";
PROPERTIES_PREFIX :: "properties";
@@ -71,10 +69,10 @@ Type_Variable :: struct {
struct_field_parent : *AST_Node;
typename : string;
is_array : bool;
MAX_TYPE_VARIABLE_CHILDREN :: 16;
children : [MAX_TYPE_VARIABLE_CHILDREN]Type_Variable_Handle;
child_count : int;
MAX_TYPE_VARIABLE_CHILDREN :: 32;
children : Static_Array(Type_Variable_Handle, MAX_TYPE_VARIABLE_CHILDREN);
//@Note(niels): For constant buffers
resource_index : u32;
@@ -128,6 +126,7 @@ Type_Constraint :: struct {
Scope_Stack :: struct {
allocator : Allocator;
arena : Arena;
stack : [..]Scope;
}
@@ -169,6 +168,8 @@ Scope_Handle :: #type, distinct u32;
Semantic_Check_Result :: struct {
messages : [..]Compiler_Message;
message_arena : Arena;
message_allocator : Allocator;
had_error : bool;
vertex_entry_point : *AST_Node;
@@ -178,6 +179,10 @@ Semantic_Check_Result :: struct {
scope_stack : Scope_Stack;
type_variables : [..]Type_Variable;
type_var_arena : Arena;
type_var_allocator : Allocator;
property_name : string;
}
Checker_State :: enum {
@@ -356,10 +361,9 @@ no_matching_overload_found :: (checker : *Semantic_Checker, call : *AST_Node, ov
func_var := h2tv(checker, func.type_variable);
if arg_list.children.count != func_var.child_count {
print_to_builder(*builder, "Not enough arguments: Wanted %, got %.\n\n", func_var.child_count, arg_list.children.count);
if arg_list.children.count != func_var.children.count {
print_to_builder(*builder, "Not enough arguments: Wanted %, got %.\n\n", func_var.children.count, arg_list.children.count);
}
}
}
@@ -538,7 +542,7 @@ type_mismatch :: (checker : *Semantic_Checker, usage_site : *AST_Node, expect_no
if got_var.builtin {
print_to_builder(*builder, "% :: (", got_var.name);
for i: 0..got_var.child_count - 1{
for i: 0..got_var.children.count - 1{
child_handle := got_var.children[i];
child := h2tv(checker, child_handle);
@@ -636,6 +640,8 @@ push_scope :: (checker : *Semantic_Checker, name := "", kind : Scope_Kind = .Glo
scope.builtin = true;
}
scope.children.allocator = checker.result.scope_stack.allocator;
if checker.current_scope {
scope := get_current_scope(checker);
array_add(*scope.children, xx count);
@@ -709,16 +715,16 @@ new_type_variable :: (checker : *Semantic_Checker) -> *Type_Variable, Type_Varia
}
add_child :: (variable : *Type_Variable, child : Type_Variable_Handle) {
assert(variable.child_count < Type_Variable.MAX_TYPE_VARIABLE_CHILDREN);
variable.children[variable.child_count] = child;
variable.child_count += 1;
assert(variable.children.count < Type_Variable.MAX_TYPE_VARIABLE_CHILDREN);
array_add(*variable.children, child);
// variable.children[variable.children.count] = child;
// variable.children.count += 1;
}
add_child :: (checker : *Semantic_Checker, handle : Type_Variable_Handle, child : Type_Variable_Handle) {
variable := h2tv(checker, handle);
assert(variable.child_count < Type_Variable.MAX_TYPE_VARIABLE_CHILDREN);
variable.children[variable.child_count] = child;
variable.child_count += 1;
assert(variable.children.count < Type_Variable.MAX_TYPE_VARIABLE_CHILDREN);
array_add(*variable.children, child);
}
init_semantic_checker :: (checker : *Semantic_Checker, root : *AST_Node, path : string) {
@@ -726,7 +732,10 @@ init_semantic_checker :: (checker : *Semantic_Checker, root : *AST_Node, path :
checker.path = path;
// @Incomplete(niels): Use other allocator and/or add static array with convenience functions
checker.result.type_var_allocator = make_arena(*checker.result.type_var_arena);
array_reserve(*checker.result.type_variables, 2048);
checker.result.scope_stack.allocator = make_arena(*checker.result.scope_stack.arena);
array_reserve(*checker.result.scope_stack.stack, 256);
global_scope, global_handle := push_scope(checker, kind = .Global);
@@ -930,6 +939,10 @@ declare_struct :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variab
declare_properties :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variable_Handle {
name := ifx node.name.count == 0 then "properties" else node.name;
if node.name.count > 0 {
checker.result.property_name = name;
}
type_var := declare_struct(checker, node, name);
var := h2tv(checker, type_var);
var.type = .Properties;
@@ -999,7 +1012,7 @@ declare_function :: (checker : *Semantic_Checker, node : *AST_Node, builtin : bo
symbol.name = name_to_check;
symbol.source_node = node;
symbol.type_variable = 0;
array_reserve(*symbol.functions, 16);
array_reserve(*symbol.functions, 32);
array_add(*symbol.functions, function);
add_symbol_to_scope(checker, checker.current_scope, name_to_check, symbol);
@@ -1014,7 +1027,7 @@ declare_function :: (checker : *Semantic_Checker, node : *AST_Node, builtin : bo
}
all_same : bool = true;
for i : 0..func_var.child_count - 1 {
for i : 0..func_var.children.count - 1 {
arg := func_var.children[i];
node_child := field_list.children[i];
@@ -1114,7 +1127,7 @@ create_function_constraint :: (checker : *Semantic_Checker, node : *AST_Node) {
constraint.kind = .Function_Decl;
constraint.function.symbol_variable = function.type_variable;
for i : 0..variable.child_count - 1 {
for i : 0..variable.children.count - 1 {
arg_var := variable.children[i];
if arg_var > 0 {
@@ -1249,6 +1262,16 @@ create_field :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variable
typename : string;
variable.type = get_type_from_identifier(checker, checker.current_scope, node, *typename);
variable.is_array = node.array_field;
if variable.is_array {
size_node := node.children[0];
size_var := check_node(checker, size_node);
if h2tv(checker, size_var).type != .Int {
//@Incomplete(niels): Type mismatch here. With integral type required message.
}
}
if variable.kind == .Declaration && variable.type == .Sampler {
variable.resource_index = checker.current_sampler_index;
checker.current_sampler_index += 1;
@@ -1354,11 +1377,11 @@ create_call_constraint :: (checker : *Semantic_Checker, node : *AST_Node, type_v
function := h2tv(checker, func.type_variable);
if arg_count != function.child_count {
if arg_count != function.children.count {
continue;
}
if node.children.count == 0 && function.child_count == 0 {
if node.children.count == 0 && function.children.count == 0 {
overload_found = true;
break;
}
@@ -1368,7 +1391,7 @@ create_call_constraint :: (checker : *Semantic_Checker, node : *AST_Node, type_v
for arg : arg_vars {
function_param := function.children[it_index];
if !types_compatible(checker, arg.var, function_param) {
if !types_compatible(checker, arg.var, function_param, true) {
if all_args_match {
arg_node = arg.node;
}
@@ -1431,6 +1454,7 @@ check_node :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variable_H
variable, handle := new_type_variable(checker);
lhs_type := h2tv(checker, lhs_var);
variable.type = lhs_type.type;
variable.typename = lhs_type.typename;
variable.scope = lhs_type.scope;
variable.source_node = node;
node.type_variable = handle;
@@ -1445,7 +1469,9 @@ check_node :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variable_H
create_equivalence_constraint(checker, rhs_var, lhs_var, node);
proper_variable, rhs_handle := new_type_variable(checker);
proper_variable.type = h2tv(checker, lhs_var).type;
lhs_type_var := h2tv(checker, lhs_var);
proper_variable.type = lhs_type_var.type;
proper_variable.typename = lhs_type_var.typename;
proper_variable.source_node = h2tv(checker, lhs_var).source_node;
proper_variable.struct_field_parent = h2tv(checker, lhs_var).struct_field_parent;
@@ -1545,7 +1571,7 @@ Unification_Result :: enum {
Unification_Failure;
}
types_compatible :: (checker : *Semantic_Checker, lhs : Type_Variable_Handle, rhs : Type_Variable_Handle) -> bool {
types_compatible :: (checker : *Semantic_Checker, lhs : Type_Variable_Handle, rhs : Type_Variable_Handle, param_matching : bool = false) -> bool {
lhs_var := h2tv(checker, lhs);
rhs_var := h2tv(checker, rhs);
@@ -1554,6 +1580,17 @@ types_compatible :: (checker : *Semantic_Checker, lhs : Type_Variable_Handle, rh
case .Half; #through;
case .Float; #through;
case .Double; {
if !param_matching {
if rhs_var.type == .Struct {
if rhs_var.typename == {
case "float2"; #through;
case "float3"; #through;
case "float4"; {
return true;
}
}
}
}
return rhs_var.type == .Int || rhs_var.type == .Half ||
rhs_var.type == .Float || rhs_var.type == .Double;
}
@@ -1569,6 +1606,16 @@ types_compatible :: (checker : *Semantic_Checker, lhs : Type_Variable_Handle, rh
lhs_node := lhs_var.source_node;
rhs_node := rhs_var.source_node;
if rhs_var.type != .Struct {
if lhs_var.typename == {
case "float2"; #through;
case "float3"; #through;
case "float4"; {
return rhs_var.type == .Int || rhs_var.type == .Half || rhs_var.type == .Double || rhs_var.type == .Float;
}
}
}
lhs_struct := find_symbol(checker, lhs_var.typename, xx 1);
rhs_struct := find_symbol(checker, rhs_var.typename, xx 1);
@@ -1583,14 +1630,16 @@ types_compatible :: (checker : *Semantic_Checker, lhs : Type_Variable_Handle, rh
lhs_struct_var := h2tv(checker, lhs_struct.type_variable);
rhs_struct_var := h2tv(checker, rhs_struct.type_variable);
if lhs_struct_var.child_count != rhs_struct_var.child_count {
if lhs_struct_var.children.count != rhs_struct_var.children.count {
return false;
}
for i : 0..lhs_struct_var.child_count - 1 {
for i : 0..lhs_struct_var.children.count - 1 {
lhs_child := lhs_struct_var.children[i];
rhs_child := rhs_struct_var.children[i];
if !types_compatible(checker, lhs_child, rhs_child) return false;
if !types_compatible(checker, lhs_child, rhs_child) {
return false;
}
}
return true;
@@ -1676,8 +1725,35 @@ union_find :: (checker : *Semantic_Checker) -> bool {
return true;
}
// HLSL_BUILTIN :: #run -> string {
// T := #load "hlsl_builtin.jai";
// return "";
// };
add_hlsl_builtins :: (checker : *Semantic_Checker) {
HLSL_BUILTIN := read_entire_file("hlsl_builtin.shd");
source_location := #location().fully_pathed_filename;
path_array := split(source_location, "/");
sb : String_Builder;
for i : 0..path_array.count - 2 {
print_to_builder(*sb, path_array[i]);
append(*sb, "/");
}
append(*sb, "hlsl_builtin.shd");
path := builder_to_string(*sb);
HLSL_BUILTIN, ok := read_entire_file(path);
if !ok {
messages : [..]Compiler_Message;
internal_error_message(*messages, "Error loading builtin functions.", checker.path);
print("%\n", report_messages(messages));
assert(false);
return;
}
checker.state = .Adding_Builtins;
@@ -1696,7 +1772,7 @@ add_hlsl_builtins :: (checker : *Semantic_Checker) {
}
parse_state : Parse_State;
init_parse_state(*parse_state, lex_result.tokens, lexer.path, context.allocator);
init_parse_state(*parse_state, lex_result.tokens, lexer.path);
parse_result := parse(*parse_state);
if parse_result.had_error {
@@ -1760,6 +1836,7 @@ check :: (checker : *Semantic_Checker) -> Semantic_Check_Result {
// ===========================================================
// Pretty printing
#scope_file
type_to_string :: (type_variable : Type_Variable) -> string {
if type_variable.type == {
@@ -1776,14 +1853,18 @@ type_to_string :: (type_variable : Type_Variable) -> string {
return Typenames[type_variable.type];
}
case .Function; #through;
case .Struct;
case .Struct; {
return type_variable.typename;
}
case .Array;
return "array";
}
return "";
}
#scope_export
print_key :: (checker : *Semantic_Checker, builder : *String_Builder, name : string) {
scope := get_current_scope(checker);
target_length := scope.longest_key_length + 1;
@@ -1835,14 +1916,14 @@ pretty_print_struct :: (checker : *Semantic_Checker, builder : *String_Builder,
print_key(checker, builder, name);
append(builder, "{");
for 0..struct_type.child_count - 1 {
for 0..struct_type.children.count - 1 {
child_handle := struct_type.children[it];
child := h2tv(checker, child_handle);
print_to_builder(builder, child.name);
append(builder, " : ");
print_to_builder(builder, type_to_string(child));
if it < struct_type.child_count - 1 {
if it < struct_type.children.count - 1 {
append(builder, ", ");
}
}
@@ -2106,14 +2187,14 @@ pretty_print_type_variable :: (checker : *Semantic_Checker, type_variable : *Typ
if type_variable.kind == .Declaration {
append(builder, "{");
for 0..type_variable.child_count - 1 {
for 0..type_variable.children.count - 1 {
child_handle := type_variable.children[it];
child := h2tv(checker, child_handle);
print_to_builder(builder, child.name);
append(builder, " : ");
print_to_builder(builder, type_to_string(child));
if it < type_variable.child_count - 1 {
if it < type_variable.children.count - 1 {
append(builder, ", ");
}
}
@@ -2153,3 +2234,9 @@ pretty_print_type_constraints :: (checker : *Semantic_Checker, allocator : Alloc
return builder_to_string(*builder,, allocator);
}
#scope_module
#import "ncore";
#import "Hash_Table";
#import "String";

View File

@@ -216,7 +216,7 @@ run_parser_test :: (lexer : *Lexer, output_type : Output_Type = 0) -> Result, *A
result_data : Result;
result_data.path = lexer.path;
result_data.stage = .Parser;
init_parse_state(*parse_state, lexer.result.tokens, lexer.path, context.allocator);
init_parse_state(*parse_state, lexer.result.tokens, lexer.path);
result := parse(*parse_state);
result_node : *AST_Node;
@@ -272,7 +272,7 @@ run_semantic_analysis_test :: (file_path : string, output_type : Output_Type = 0
parse_state : Parse_State;
result_data.stage = .Parser;
init_parse_state(*parse_state, lex_result.tokens, lexer.path, context.allocator);
init_parse_state(*parse_state, lex_result.tokens, lexer.path);
parse_result := parse(*parse_state);
if parse_result.had_error {
@@ -310,8 +310,8 @@ run_semantic_analysis_test :: (file_path : string, root : *AST_Node, output_type
result_text = pretty_print_symbol_table(*checker, temp);
constraints := pretty_print_type_constraints(*checker, temp);
type_vars := pretty_print_type_variables(*checker, temp);
print("Constraints\n%\n", constraints);
print("Solution\n%\n", type_vars);
// print("Constraints\n%\n", constraints);
// print("Solution\n%\n", type_vars);
}
if output_type & .StdOut {
@@ -398,7 +398,7 @@ run_codegen_test :: (path : string, output_type : Output_Type = 0) -> Result, Co
parse_state : Parse_State;
result_data.stage = .Parser;
init_parse_state(*parse_state, lex_result.tokens, lexer.path, context.allocator);
init_parse_state(*parse_state, lex_result.tokens, lexer.path);
parse_result := parse(*parse_state);
if parse_result.had_error {
@@ -477,7 +477,23 @@ run_test :: (file_path : string, stage_flags : Stage_Flags, results : *[..]Resul
}
run_test :: (test_case : Test_Case, results : *[..]Result, output_type : Output_Type = 0) {
print("%Running test: %\n", cyan(), test_case.path);
print("%Running test: %......", cyan(), test_case.path);
// path 30
// len 35
// == 5
// path 20
// len = 35
// == 15
len := 50;
rest := len - test_case.path.count;
for i: 0..rest {
print(" ");
}
run_test(test_case.path, test_case.stage_flags, results, output_type);
}
@@ -525,7 +541,7 @@ run_test_suite :: (using suite : *Test_Suite, output_type : Output_Type = 0) {
}
}
print("\n");
// print("\n");
}
print("\n");
@@ -621,25 +637,25 @@ evaluate_result :: (result : Result) {
if #complete result.type == {
case .File_Read_Failed; {
print(" %", red());
print("% failed with File_Read_Failed\n", result.path);
print("failed with File_Read_Failed\n");
}
case .Golden_File_Not_Found; {
print(" %", red());
print("% failed with Golden File Not Found for stage %\n", result.path, stage);
print("failed with Golden File Not Found for stage %\n", stage);
}
case .StdOut; {
}
case .Golden_Output; {
print(" %", yellow());
print("% output new golden file at %\n", result.path, result.golden_path);
print("output new golden file at %\n", result.golden_path);
}
case .Passed; {
print(" %", green());
print("% passed %\n", result.path, stage);
print("passed %\n", stage);
}
case .Failed; {
print(" %", red());
print("% failed %\n", result.path, stage);
print("failed %\n", stage);
}
}

View File

@@ -1,3 +1,3 @@
@echo off
jai Test.jai
jai first.jai -natvis

42
first.jai Normal file
View File

@@ -0,0 +1,42 @@
#import "Basic";
#import "File";
#import "Compiler";
build :: () {
w := compiler_create_workspace("Shader Compiler Test Build");
if !w {
print("Workspace creation failed.\n");
return;
}
EXECUTABLE_NAME :: "test";
MAIN_FILE :: "Test.jai";
options := get_build_options(w);
options.write_added_strings = true;
new_path: [..] string;
array_add(*new_path, ..options.import_path);
array_add(*new_path, "modules");
// array_add(*new_path, "modules/shader_parsing");
options.import_path = new_path;
options.output_executable_name = EXECUTABLE_NAME;
wd := get_working_directory();
set_build_options(options, w);
compiler_begin_intercept(w);
add_build_file(MAIN_FILE, w);
compiler_end_intercept(w);
print("\nDone!\n\n");
set_build_options_dc(.{do_output=false});
}
#run build();

View File

@@ -1,6 +1,5 @@
HLSL_BUILTIN : string;
#run {
HLSL_BUILTIN = read_entire_file("hlsl_builtin.shd");
}
HLSL_BUILTIN :: #run -> string {
// return read_entire_file("./hlsl_builtin.shd");
return "";
};

View File

@@ -259,4 +259,4 @@ int4x4 :: struct {
#foreign atan2 :: (float4, float4) -> float4;
#foreign atan2 :: (float4x4, float4x4) -> float4x4;
#foreign sample :: (Texture2D, float2, Sampler) -> float4;
#foreign sample :: (Texture2D, Sampler, float2) -> float4;

View File

@@ -56,6 +56,7 @@ Hint_Kind :: enum {
None;
Position;
UV;
Target;
Custom;
@@ -131,6 +132,68 @@ Shader_Variant_Collection :: struct {
variants : [..]Shader_Variant;
}
Input_File :: struct {
source : string;
path : string;
}
Token_Stream :: struct {
tokens : [..]Token;
}
Compiled_File :: struct {
file : Input_File;
tokens : Token_Stream;
ast_root : *AST_Node;
ast_nodes : [..]AST_Node;
}
Compile_Result :: struct {
files : [..]Compiled_File;
had_error : bool;
messages : [..]Compiler_Message;
allocator : Allocator;
arena : Arena;
}
//@Incomplete(niels): need to consider allocation
add_file :: (result : *Compile_Result, path : string) {
file_string, ok := read_entire_file(path);
if !ok {
// record_error(.File_Load_Failed, "Unable to load file: %", path);
return;
}
input_file : Input_File;
input_file.source = file_string;
input_file.path = path;
compiled_file : Compiled_File;
compiled_file.file = input_file;
array_add(*result.files, compiled_file);
}
// @Incomplete(nb): Will we ever even use this?
from_file :: (path : string) -> Compile_Result {
arr : [1]string;
arr[0] = path;
return from_files(arr);
}
from_files :: (paths : []string) -> Compile_Result {
result : Compile_Result;
for path : paths {
add_file(*result, path);
}
return result;
}
Compilation_Result :: struct {
messages : [..]Compiler_Message;
@@ -257,7 +320,7 @@ type_variable_to_field :: (checker : *Semantic_Checker, variable : *Type_Variabl
type_var := h2tv(checker, find_result.type_variable);
for i : 0..type_var.child_count - 1 {
for i : 0..type_var.children.count - 1 {
child := type_var.children[i];
child_field := type_variable_to_field(checker, h2tv(checker, child));
array_add(*type.children, child_field);
@@ -273,6 +336,8 @@ type_variable_to_field :: (checker : *Semantic_Checker, variable : *Type_Variabl
if hint.ident_value == "position" {
// @Incomplete(nb): Should be a lookup table somewhere
field_hint.kind = .Position;
} else if hint.ident_value == "uv" {
field_hint.kind = .UV;
} else if starts_with(hint.ident_value, "target") {
// @Incomplete(nb): Should be a lookup table somewhere
index_str : string;
@@ -295,6 +360,21 @@ type_variable_to_field :: (checker : *Semantic_Checker, variable : *Type_Variabl
return field;
}
compile_file :: (compiler : *Shader_Compiler, paths : []string) -> Compile_Result {
result : Compile_Result;
for path : paths {
add_file(*result, path);
}
lex(*result);
// parse(*result);
// check(*result);
// codegen(*result);
return result;
}
compile_file :: (compiler : *Shader_Compiler, path : string) -> Compilation_Result {
result : Compilation_Result;
@@ -315,7 +395,7 @@ compile_file :: (compiler : *Shader_Compiler, path : string) -> Compilation_Resu
}
parse_state : Parse_State;
init_parse_state(*parse_state, lex_result.tokens, lexer.path, context.allocator);
init_parse_state(*parse_state, lex_result.tokens, lexer.path);
parse_result := parse(*parse_state);
if parse_result.had_error {
@@ -376,7 +456,7 @@ compile_file :: (compiler : *Shader_Compiler, path : string) -> Compilation_Resu
cb := array_add(*result.collection.cbuffers);
for i : 0..variable.child_count - 1 {
for i : 0..variable.children.count - 1 {
child := variable.children[i];
field : Property_Field;
field.base_field = type_variable_to_field(*checker, h2tv(*checker, child));;
@@ -386,11 +466,11 @@ compile_file :: (compiler : *Shader_Compiler, path : string) -> Compilation_Resu
cb.buffer_index = variable.resource_index;
}
find_result := find_symbol(*check_result.scope_stack, "properties", xx 1);
find_result := find_symbol(*check_result.scope_stack, check_result.property_name, xx 1);
if find_result {
property_variable := h2tv(check_result.type_variables, find_result.type_variable);
for i : 0..property_variable.child_count - 1 {
for i : 0..property_variable.children.count - 1 {
child := property_variable.children[i];
field := type_variable_to_field(*checker, h2tv(*checker, child));
prop_field : Property_Field;

1
modules/ncore Submodule

Submodule modules/ncore added at 9db7ff0940

View File

@@ -1,43 +0,0 @@
Static_Array :: struct (T : Type, N : int) {
array : [N] T;
count : int;
}
operator *[] :: (sa : *Static_Array, index : int) -> *sa.T {
assert(index < sa.count);
return *sa.array[index];
}
array_add :: (sa : *Static_Array, item : sa.T) {
assert(sa.count + 1 < sa.N);
sa.array[sa.count] = item;
sa.count += 1;
}
array_add :: (sa : *Static_Array) -> *sa.T {
assert(sa.count + 1 < sa.N);
ptr := *sa.array[sa.count];
sa.count += 1;
return ptr;
}
pop :: (sa : *Static_Array) -> sa.T {
assert(sa.count > 0);
elem := sa.array[sa.count - 1];
sa.count -= 1;
return elem;
}
clear :: (sa : *Static_Array) {
sa.count = 0;
}
to_array :: (sa : *Static_Array) -> []sa.T {
array : []sa.T;
array.count = sa.count;
array.data = sa.array.data;
return array;
}

5
test/arrays.shd Normal file
View File

@@ -0,0 +1,5 @@
vertex main :: () -> float4 @position {
arr : [16].float4;
arr[0] = float4(1,1,1);
return arr[0];
}

View File

@@ -1,5 +1,5 @@
void vs_main()
{
float x = 2.0f + 5.0f;
float x = (2.0f + 5.0f);
}

View File

@@ -1,8 +1,9 @@
cbuffer __PROPERTIES : register(b0)
{
float4 color;
float4 __PROPERTIES__color;
}
float3 vs_main(float3 pos : POSITION) : SV_POSITION
{
return pos;
@@ -10,6 +11,6 @@ float3 vs_main(float3 pos : POSITION) : SV_POSITION
float4 ps_main() : SV_TARGET
{
return color;
return __PROPERTIES__color;
}

View File

@@ -2,6 +2,6 @@ void vs_main()
{
float x = 5.0f;
float y = 3000.0f;
float z = y * y + x;
float z = ((y * y) + x);
}

View File

@@ -1,4 +1,2 @@
struct Foo;
struct Foo {}
struct Foo {};

View File

@@ -1,15 +1,16 @@
cbuffer __PROPERTIES : register(b0)
{
float4 color;
float4 __PROPERTIES__color;
}
float3 vs_main(float3 pos : POSITION, float2 uv) : SV_POSITION
float3 vs_main(float3 pos : POSITION, float2 uv : TEXCOORD0) : SV_POSITION
{
return pos;
}
float4 ps_main() : SV_TARGET
{
return color;
return __PROPERTIES__color;
}

View File

@@ -8,7 +8,7 @@ int foo()
float bar()
{
return 1235.0f * 500;
return (1235.0f * 500);
}
void vs_main()

View File

@@ -1,15 +1,13 @@
struct Foo;
float foo(Foo f);
struct Foo
{
float some_data;
}
};
float foo(Foo f)
{
return f.some_data * 2.0f;
return (f.some_data * 2.0f);
}
void vs_main()

View File

@@ -1,9 +1,7 @@
struct Data;
struct Data
{
float4 color;
}
};
void vs_main()
{

View File

@@ -1,15 +1,12 @@
struct Foo;
struct Bar;
struct Foo
{
float4 color;
}
};
struct Bar
{
Foo t;
}
};
void vs_main()
{

7
test/double_access.shd Normal file
View File

@@ -0,0 +1,7 @@
p :: properties {
v : float2;
}
vertex main ::() {
x : float = p.v.x / p.v.y;
}

42
test/large_block.shd Normal file
View File

@@ -0,0 +1,42 @@
p :: properties {
color : float4;
rect_position : float2;
rect_scale : float2;
resolution : float2;
texture : Texture2D;
sampler : Sampler;
}
PS_Input :: struct {
uv : float2 @uv;
pos : float4 @pos;
}
vertex main :: (pos : float4 @position) -> PS_Input {
res : float2 = p.resolution;
scale : float2 = p.rect_scale;
rect_pos : float2 = p.rect_position;;
center : float2 = rect_pos;
half_size : float2 = float2(scale.x / 2, scale.y / 2);
dst_pos : float4 = float4(pos.x * half_size.x + center.x, pos.y * half_size.y + center.y, 0.0, 1.0);
result : PS_Input;
src_p0 : float2 = float2(0.0, 1.0);
src_p1 : float2 = float2(1.0, 0.0);
src_half_size : float2 = (src_p1 - src_p0) / 2;
src_center : float2 = (src_p1 + src_p0) / 2;
src_pos : float2 = float2(pos.x, pos.y) * src_half_size + src_center;
result.uv = float2(1, 1);
result.pos = float4(2.0 * dst_pos.x / res.x - 1, 2.0 * dst_pos.y / res.y - 1, 0.0, 1.0);
return result;
}
pixel main :: (input : PS_Input) -> float4 @target0 {
color : float4 = p.color;
return color;
}

40
test/lex/arrays.golden Normal file
View File

@@ -0,0 +1,40 @@
{kind = TOKEN_VERTEX; ; index = 0 ; length = 6 line = 1 ; column = 0 ; value ='vertex'; }
{kind = TOKEN_IDENTIFIER; ; index = 7 ; length = 4 line = 1 ; column = 7 ; value ='main'; }
{kind = TOKEN_DOUBLECOLON; ; index = 12 ; length = 2 line = 1 ; column = 12 ; value ='::'; }
{kind = TOKEN_LEFTPAREN; ; index = 15 ; length = 1 line = 1 ; column = 15 ; value ='('; }
{kind = TOKEN_RIGHTPAREN; ; index = 16 ; length = 1 line = 1 ; column = 16 ; value =')'; }
{kind = TOKEN_ARROW; ; index = 18 ; length = 2 line = 1 ; column = 18 ; value ='->'; }
{kind = TOKEN_IDENTIFIER; ; index = 21 ; length = 6 line = 1 ; column = 21 ; value ='float4'; }
{kind = TOKEN_AT; ; index = 28 ; length = 1 line = 1 ; column = 28 ; value ='@'; }
{kind = TOKEN_IDENTIFIER; ; index = 29 ; length = 8 line = 1 ; column = 29 ; value ='position'; }
{kind = TOKEN_LEFTBRACE; ; index = 38 ; length = 1 line = 1 ; column = 38 ; value ='{'; }
{kind = TOKEN_IDENTIFIER; ; index = 42 ; length = 3 line = 2 ; column = 0 ; value ='arr'; }
{kind = TOKEN_COLON; ; index = 46 ; length = 1 line = 2 ; column = 4 ; value =':'; }
{kind = TOKEN_LEFTBRACKET; ; index = 48 ; length = 1 line = 2 ; column = 6 ; value ='['; }
{kind = TOKEN_INTLITERAL; ; index = 49 ; length = 2 line = 2 ; column = 7 ; value ='16'; }
{kind = TOKEN_RIGHTBRACKET; ; index = 51 ; length = 1 line = 2 ; column = 9 ; value =']'; }
{kind = TOKEN_DOT; ; index = 52 ; length = 1 line = 2 ; column = 10 ; value ='.'; }
{kind = TOKEN_IDENTIFIER; ; index = 53 ; length = 6 line = 2 ; column = 11 ; value ='float4'; }
{kind = TOKEN_SEMICOLON; ; index = 59 ; length = 1 line = 2 ; column = 17 ; value =';'; }
{kind = TOKEN_IDENTIFIER; ; index = 63 ; length = 3 line = 3 ; column = 0 ; value ='arr'; }
{kind = TOKEN_LEFTBRACKET; ; index = 66 ; length = 1 line = 3 ; column = 3 ; value ='['; }
{kind = TOKEN_INTLITERAL; ; index = 67 ; length = 1 line = 3 ; column = 4 ; value ='0'; }
{kind = TOKEN_RIGHTBRACKET; ; index = 68 ; length = 1 line = 3 ; column = 5 ; value =']'; }
{kind = TOKEN_ASSIGN; ; index = 70 ; length = 1 line = 3 ; column = 7 ; value ='='; }
{kind = TOKEN_IDENTIFIER; ; index = 72 ; length = 6 line = 3 ; column = 9 ; value ='float4'; }
{kind = TOKEN_LEFTPAREN; ; index = 78 ; length = 1 line = 3 ; column = 15 ; value ='('; }
{kind = TOKEN_INTLITERAL; ; index = 79 ; length = 1 line = 3 ; column = 16 ; value ='1'; }
{kind = TOKEN_COMMA; ; index = 80 ; length = 1 line = 3 ; column = 17 ; value =','; }
{kind = TOKEN_INTLITERAL; ; index = 81 ; length = 1 line = 3 ; column = 18 ; value ='1'; }
{kind = TOKEN_COMMA; ; index = 82 ; length = 1 line = 3 ; column = 19 ; value =','; }
{kind = TOKEN_INTLITERAL; ; index = 83 ; length = 1 line = 3 ; column = 20 ; value ='1'; }
{kind = TOKEN_RIGHTPAREN; ; index = 84 ; length = 1 line = 3 ; column = 21 ; value =')'; }
{kind = TOKEN_SEMICOLON; ; index = 85 ; length = 1 line = 3 ; column = 22 ; value =';'; }
{kind = TOKEN_RETURN; ; index = 89 ; length = 6 line = 4 ; column = 0 ; value ='return'; }
{kind = TOKEN_IDENTIFIER; ; index = 96 ; length = 3 line = 4 ; column = 7 ; value ='arr'; }
{kind = TOKEN_LEFTBRACKET; ; index = 99 ; length = 1 line = 4 ; column = 10 ; value ='['; }
{kind = TOKEN_INTLITERAL; ; index = 100 ; length = 1 line = 4 ; column = 11 ; value ='0'; }
{kind = TOKEN_RIGHTBRACKET; ; index = 101 ; length = 1 line = 4 ; column = 12 ; value =']'; }
{kind = TOKEN_SEMICOLON; ; index = 102 ; length = 1 line = 4 ; column = 13 ; value =';'; }
{kind = TOKEN_RIGHTBRACE; ; index = 105 ; length = 1 line = 5 ; column = 0 ; value ='}'; }
{kind = TOKEN_EOF; ; index = 108 ; length = 0 line = 6 ; column = 0 ; value =''; }

6
test/parse/arrays.golden Normal file
View File

@@ -0,0 +1,6 @@
(program
(fun vertex vs_main -> float4 (@position)
[]
(:= arr [16].float4)
(= arr[0] (float4 1 1 1))
(return arr[0])))

View File

@@ -6,8 +6,6 @@ test/empty_struct.shd semant
test/empty_vertex_main.shd semant
test/empty_vertex_main_with_position_parameter.shd semant
test/field_assignment.shd semant
test/field_without_type_specifier.shd semant
test/float_suffix.shd semant
test/function_call.shd semant
test/function_call_out_of_order_declaration.shd semant
test/function_call_return.shd semant

View File

@@ -0,0 +1,14 @@
vertex main :: (pos : float4 @position) -> float4 @position {
src_p0 : float2 = float2(0.0, 1.0);
src_p1 : float2 = float2(1.0, 0.0);
src_half_size : float2 = (src_p1 - src_p0) / 2;
src_center : float2 = (src_p1 + src_p0) / 2;
src_pos : float2 = float2(pos.x, pos.y) * src_half_size + src_center;
return float4(1, 1, 1, 1);
}
pixel main :: () -> float4 @target0 {
return float4(1, 1, 1, 1);
}