Compare commits
5 Commits
ae54f1374e
...
b2ee560145
| Author | SHA1 | Date | |
|---|---|---|---|
| b2ee560145 | |||
| b475357cf9 | |||
| 76994b2567 | |||
| 0471dbe7d7 | |||
| c5758bd023 |
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
[submodule "modules/nbrutil"]
|
||||||
|
path = modules/ncore
|
||||||
|
url = git@git.nbross.com:nielsbross/nbrutil.git
|
||||||
20
AST.jai
20
AST.jai
@@ -54,6 +54,8 @@ AST_Node :: struct {
|
|||||||
|
|
||||||
token : Token;
|
token : Token;
|
||||||
|
|
||||||
|
array_field : bool;
|
||||||
|
|
||||||
source_location : Source_Range;
|
source_location : Source_Range;
|
||||||
|
|
||||||
type_variable : Type_Variable_Handle;
|
type_variable : Type_Variable_Handle;
|
||||||
@@ -105,7 +107,15 @@ pretty_print_field :: (node : *AST_Node, indentation : int, builder : *String_Bu
|
|||||||
print_to_builder(builder, tprint("(:= %", node.name));
|
print_to_builder(builder, tprint("(:= %", node.name));
|
||||||
|
|
||||||
if node.kind != .Unnamed_Field && node.token.ident_value.count > 0 {
|
if node.kind != .Unnamed_Field && node.token.ident_value.count > 0 {
|
||||||
print_to_builder(builder, tprint(" %", node.token.ident_value));
|
if node.array_field {
|
||||||
|
append(builder, " [");
|
||||||
|
pretty_print_node(node.children[0], 0, builder);
|
||||||
|
append(builder, "].");
|
||||||
|
print_to_builder(builder, "%", node.token.ident_value);
|
||||||
|
} else {
|
||||||
|
print_to_builder(builder, " %", node.token.ident_value);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for hint : node.hint_tokens {
|
for hint : node.hint_tokens {
|
||||||
@@ -114,7 +124,7 @@ pretty_print_field :: (node : *AST_Node, indentation : int, builder : *String_Bu
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.children.count > 0 {
|
if !node.array_field && node.children.count > 0 {
|
||||||
append(builder, " ");
|
append(builder, " ");
|
||||||
pretty_print_children(node, indentation, builder);
|
pretty_print_children(node, indentation, builder);
|
||||||
}
|
}
|
||||||
@@ -271,10 +281,16 @@ pretty_print_node :: (node : *AST_Node, indentation : int, builder : *String_Bui
|
|||||||
pretty_print_variable :: (node : *AST_Node, indentation : int, builder : *String_Builder) {
|
pretty_print_variable :: (node : *AST_Node, indentation : int, builder : *String_Builder) {
|
||||||
indent(builder, indentation);
|
indent(builder, indentation);
|
||||||
print_to_builder(builder, "%", node.name);
|
print_to_builder(builder, "%", node.name);
|
||||||
|
|
||||||
for child : node.children {
|
for child : node.children {
|
||||||
|
print("%\n", child.kind);
|
||||||
if child.kind == .Variable {
|
if child.kind == .Variable {
|
||||||
append(builder, ".");
|
append(builder, ".");
|
||||||
pretty_print_variable(child, indentation, builder);
|
pretty_print_variable(child, indentation, builder);
|
||||||
|
} else if child.kind == .Unary {
|
||||||
|
append(builder, "[");
|
||||||
|
pretty_print_node(child.children[0], 0, builder);
|
||||||
|
append(builder, "]");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -477,3 +477,6 @@ codegen :: (ast_root : *AST_Node, checker_result : Semantic_Check_Result, output
|
|||||||
init_codegen_state(*codegen_state, ast_root, checker_result, output_language);
|
init_codegen_state(*codegen_state, ast_root, checker_result, output_language);
|
||||||
return codegen(*codegen_state);
|
return codegen(*codegen_state);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#scope_module
|
||||||
|
#import "ncore";
|
||||||
|
|||||||
@@ -700,3 +700,4 @@ print_from_source_location :: (source_location : Source_Range, allocator := cont
|
|||||||
|
|
||||||
|
|
||||||
#import "Basic";
|
#import "Basic";
|
||||||
|
#import "File";
|
||||||
|
|||||||
47
Parsing.jai
47
Parsing.jai
@@ -84,6 +84,8 @@ parse_rules :: #run -> [(cast(int)Token_Kind.TOKEN_ERROR) + 1]Parse_Rule {
|
|||||||
rules[Token_Kind.TOKEN_RIGHTPAREN] = .{null, null, .PREC_NONE};
|
rules[Token_Kind.TOKEN_RIGHTPAREN] = .{null, null, .PREC_NONE};
|
||||||
rules[Token_Kind.TOKEN_LEFTBRACE] = .{null, null, .PREC_NONE};
|
rules[Token_Kind.TOKEN_LEFTBRACE] = .{null, null, .PREC_NONE};
|
||||||
rules[Token_Kind.TOKEN_RIGHTBRACE] = .{null, null, .PREC_NONE};
|
rules[Token_Kind.TOKEN_RIGHTBRACE] = .{null, null, .PREC_NONE};
|
||||||
|
rules[Token_Kind.TOKEN_LEFTBRACKET] = .{null, array_access, .PREC_CALL};
|
||||||
|
rules[Token_Kind.TOKEN_RIGHTBRACKET] = .{null, null, .PREC_NONE};
|
||||||
rules[Token_Kind.TOKEN_COMMA] = .{null, null, .PREC_NONE};
|
rules[Token_Kind.TOKEN_COMMA] = .{null, null, .PREC_NONE};
|
||||||
rules[Token_Kind.TOKEN_DOT] = .{null, dot, .PREC_CALL};
|
rules[Token_Kind.TOKEN_DOT] = .{null, dot, .PREC_CALL};
|
||||||
rules[Token_Kind.TOKEN_PROPERTIES] = .{named_variable, null, .PREC_CALL};
|
rules[Token_Kind.TOKEN_PROPERTIES] = .{named_variable, null, .PREC_CALL};
|
||||||
@@ -429,6 +431,36 @@ binary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|||||||
return binary_expression;
|
return binary_expression;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
||||||
|
identifier := parse_state.tokens[parse_state.current_token_index - 3];
|
||||||
|
left_bracket := parse_state.tokens[parse_state.current_token_index - 2];
|
||||||
|
|
||||||
|
array_access := make_node(parse_state, .Unary);
|
||||||
|
array_access.token = left_bracket;
|
||||||
|
array_index := expression(parse_state);
|
||||||
|
add_child(array_access, array_index);
|
||||||
|
|
||||||
|
add_child(left, array_access);
|
||||||
|
|
||||||
|
consume(parse_state, .TOKEN_RIGHTBRACKET, "Expected ']' after array index.");
|
||||||
|
|
||||||
|
source_location : Source_Range;
|
||||||
|
source_location.begin = left.source_location.begin;
|
||||||
|
|
||||||
|
if check(parse_state, .TOKEN_ASSIGN) {
|
||||||
|
advance(parse_state);
|
||||||
|
|
||||||
|
node := make_node(parse_state, .Binary);
|
||||||
|
node.token = parse_state.previous;
|
||||||
|
add_child(node, left);
|
||||||
|
add_child(node, expression(parse_state));
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
source_location.end = parse_state.previous;
|
||||||
|
return left;
|
||||||
|
}
|
||||||
|
|
||||||
unary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
unary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
||||||
op := parse_state.previous.*;
|
op := parse_state.previous.*;
|
||||||
rule := get_rule(op.kind);
|
rule := get_rule(op.kind);
|
||||||
@@ -441,6 +473,10 @@ unary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|||||||
case .TOKEN_MINUS; {
|
case .TOKEN_MINUS; {
|
||||||
unary_expression.token = op;
|
unary_expression.token = op;
|
||||||
}
|
}
|
||||||
|
case .TOKEN_LEFTBRACKET; {
|
||||||
|
unary_expression.token = op;
|
||||||
|
consume(parse_state, .TOKEN_RIGHTBRACKET, "Expect ']' after array access.");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return unary_expression;
|
return unary_expression;
|
||||||
@@ -588,6 +624,17 @@ field_declaration :: (parse_state : *Parse_State, identifier_token : *Token) ->
|
|||||||
type_identifier := parse_state.current;
|
type_identifier := parse_state.current;
|
||||||
node.token = type_identifier;
|
node.token = type_identifier;
|
||||||
advance(parse_state);
|
advance(parse_state);
|
||||||
|
} else if check(parse_state, .TOKEN_LEFTBRACKET) {
|
||||||
|
advance(parse_state);
|
||||||
|
array_size_expression := expression(parse_state);
|
||||||
|
add_child(node, array_size_expression);
|
||||||
|
consume(parse_state, .TOKEN_RIGHTBRACKET, "Expected closing ']' in array declaration.");
|
||||||
|
consume(parse_state, .TOKEN_DOT, "Expected '.' before array type.");
|
||||||
|
|
||||||
|
type_identifier := parse_state.current;
|
||||||
|
node.token = type_identifier;
|
||||||
|
advance(parse_state);
|
||||||
|
node.array_field = true;
|
||||||
} else {
|
} else {
|
||||||
missing_type_specifier(parse_state, identifier_token, "Expected type specifier after field name.");
|
missing_type_specifier(parse_state, identifier_token, "Expected type specifier after field name.");
|
||||||
return node;
|
return node;
|
||||||
|
|||||||
@@ -8,9 +8,6 @@
|
|||||||
// [x] Improve error reporting on mismatched overloads when types don't match, but arity does
|
// [x] Improve error reporting on mismatched overloads when types don't match, but arity does
|
||||||
// [x] Improve error reporting for type mismatches in general. It seems like the expect node is not always correct.
|
// [x] Improve error reporting for type mismatches in general. It seems like the expect node is not always correct.
|
||||||
|
|
||||||
#load "static_array.jai";
|
|
||||||
#import "Hash_Table";
|
|
||||||
|
|
||||||
VERTEX_MAIN_FUNCTION_PREFIX :: "vertex";
|
VERTEX_MAIN_FUNCTION_PREFIX :: "vertex";
|
||||||
PIXEL_MAIN_FUNCTION_PREFIX :: "pixel";
|
PIXEL_MAIN_FUNCTION_PREFIX :: "pixel";
|
||||||
PROPERTIES_PREFIX :: "properties";
|
PROPERTIES_PREFIX :: "properties";
|
||||||
@@ -71,6 +68,7 @@ Type_Variable :: struct {
|
|||||||
struct_field_parent : *AST_Node;
|
struct_field_parent : *AST_Node;
|
||||||
|
|
||||||
typename : string;
|
typename : string;
|
||||||
|
is_array : bool;
|
||||||
|
|
||||||
MAX_TYPE_VARIABLE_CHILDREN :: 16;
|
MAX_TYPE_VARIABLE_CHILDREN :: 16;
|
||||||
children : [MAX_TYPE_VARIABLE_CHILDREN]Type_Variable_Handle;
|
children : [MAX_TYPE_VARIABLE_CHILDREN]Type_Variable_Handle;
|
||||||
@@ -1255,6 +1253,17 @@ create_field :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variable
|
|||||||
typename : string;
|
typename : string;
|
||||||
variable.type = get_type_from_identifier(checker, checker.current_scope, node, *typename);
|
variable.type = get_type_from_identifier(checker, checker.current_scope, node, *typename);
|
||||||
|
|
||||||
|
variable.is_array = node.array_field;
|
||||||
|
|
||||||
|
if variable.is_array {
|
||||||
|
size_node := node.children[0];
|
||||||
|
size_var := check_node(checker, size_node);
|
||||||
|
if h2tv(checker, size_var).type != .Int {
|
||||||
|
//@Incomplete(niels): Type mismatch here. With integral type required message.
|
||||||
|
print("Shiet\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if variable.kind == .Declaration && variable.type == .Sampler {
|
if variable.kind == .Declaration && variable.type == .Sampler {
|
||||||
variable.resource_index = checker.current_sampler_index;
|
variable.resource_index = checker.current_sampler_index;
|
||||||
checker.current_sampler_index += 1;
|
checker.current_sampler_index += 1;
|
||||||
@@ -2186,3 +2195,9 @@ pretty_print_type_constraints :: (checker : *Semantic_Checker, allocator : Alloc
|
|||||||
|
|
||||||
return builder_to_string(*builder,, allocator);
|
return builder_to_string(*builder,, allocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#scope_module
|
||||||
|
|
||||||
|
#import "ncore";
|
||||||
|
#import "Hash_Table";
|
||||||
|
#import "String";
|
||||||
|
|||||||
42
first.jai
Normal file
42
first.jai
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
#import "Basic";
|
||||||
|
#import "File";
|
||||||
|
#import "Compiler";
|
||||||
|
|
||||||
|
build :: () {
|
||||||
|
w := compiler_create_workspace("Shader Compiler Test Build");
|
||||||
|
if !w {
|
||||||
|
print("Workspace creation failed.\n");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
EXECUTABLE_NAME :: "test";
|
||||||
|
MAIN_FILE :: "Test.jai";
|
||||||
|
|
||||||
|
options := get_build_options(w);
|
||||||
|
|
||||||
|
options.write_added_strings = true;
|
||||||
|
|
||||||
|
new_path: [..] string;
|
||||||
|
array_add(*new_path, ..options.import_path);
|
||||||
|
array_add(*new_path, "modules");
|
||||||
|
// array_add(*new_path, "modules/shader_parsing");
|
||||||
|
options.import_path = new_path;
|
||||||
|
options.output_executable_name = EXECUTABLE_NAME;
|
||||||
|
|
||||||
|
wd := get_working_directory();
|
||||||
|
|
||||||
|
|
||||||
|
set_build_options(options, w);
|
||||||
|
|
||||||
|
compiler_begin_intercept(w);
|
||||||
|
|
||||||
|
add_build_file(MAIN_FILE, w);
|
||||||
|
|
||||||
|
compiler_end_intercept(w);
|
||||||
|
|
||||||
|
print("\nDone!\n\n");
|
||||||
|
|
||||||
|
set_build_options_dc(.{do_output=false});
|
||||||
|
}
|
||||||
|
|
||||||
|
#run build();
|
||||||
1
modules/ncore
Submodule
1
modules/ncore
Submodule
Submodule modules/ncore added at d82796090a
@@ -1,43 +0,0 @@
|
|||||||
Static_Array :: struct (T : Type, N : int) {
|
|
||||||
array : [N] T;
|
|
||||||
|
|
||||||
count : int;
|
|
||||||
}
|
|
||||||
|
|
||||||
operator *[] :: (sa : *Static_Array, index : int) -> *sa.T {
|
|
||||||
assert(index < sa.count);
|
|
||||||
return *sa.array[index];
|
|
||||||
}
|
|
||||||
|
|
||||||
array_add :: (sa : *Static_Array, item : sa.T) {
|
|
||||||
assert(sa.count + 1 < sa.N);
|
|
||||||
|
|
||||||
sa.array[sa.count] = item;
|
|
||||||
sa.count += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
array_add :: (sa : *Static_Array) -> *sa.T {
|
|
||||||
assert(sa.count + 1 < sa.N);
|
|
||||||
|
|
||||||
ptr := *sa.array[sa.count];
|
|
||||||
sa.count += 1;
|
|
||||||
return ptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
pop :: (sa : *Static_Array) -> sa.T {
|
|
||||||
assert(sa.count > 0);
|
|
||||||
elem := sa.array[sa.count - 1];
|
|
||||||
sa.count -= 1;
|
|
||||||
return elem;
|
|
||||||
}
|
|
||||||
|
|
||||||
clear :: (sa : *Static_Array) {
|
|
||||||
sa.count = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
to_array :: (sa : *Static_Array) -> []sa.T {
|
|
||||||
array : []sa.T;
|
|
||||||
array.count = sa.count;
|
|
||||||
array.data = sa.array.data;
|
|
||||||
return array;
|
|
||||||
}
|
|
||||||
5
test/arrays.shd
Normal file
5
test/arrays.shd
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
vertex main :: () -> float4 @position {
|
||||||
|
arr : [16].float4;
|
||||||
|
arr[0] = float4(1,1,1);
|
||||||
|
return arr[0];
|
||||||
|
}
|
||||||
40
test/lex/arrays.golden
Normal file
40
test/lex/arrays.golden
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
{kind = TOKEN_VERTEX; ; index = 0 ; length = 6 line = 1 ; column = 0 ; value ='vertex'; }
|
||||||
|
{kind = TOKEN_IDENTIFIER; ; index = 7 ; length = 4 line = 1 ; column = 7 ; value ='main'; }
|
||||||
|
{kind = TOKEN_DOUBLECOLON; ; index = 12 ; length = 2 line = 1 ; column = 12 ; value ='::'; }
|
||||||
|
{kind = TOKEN_LEFTPAREN; ; index = 15 ; length = 1 line = 1 ; column = 15 ; value ='('; }
|
||||||
|
{kind = TOKEN_RIGHTPAREN; ; index = 16 ; length = 1 line = 1 ; column = 16 ; value =')'; }
|
||||||
|
{kind = TOKEN_ARROW; ; index = 18 ; length = 2 line = 1 ; column = 18 ; value ='->'; }
|
||||||
|
{kind = TOKEN_IDENTIFIER; ; index = 21 ; length = 6 line = 1 ; column = 21 ; value ='float4'; }
|
||||||
|
{kind = TOKEN_AT; ; index = 28 ; length = 1 line = 1 ; column = 28 ; value ='@'; }
|
||||||
|
{kind = TOKEN_IDENTIFIER; ; index = 29 ; length = 8 line = 1 ; column = 29 ; value ='position'; }
|
||||||
|
{kind = TOKEN_LEFTBRACE; ; index = 38 ; length = 1 line = 1 ; column = 38 ; value ='{'; }
|
||||||
|
{kind = TOKEN_IDENTIFIER; ; index = 42 ; length = 3 line = 2 ; column = 0 ; value ='arr'; }
|
||||||
|
{kind = TOKEN_COLON; ; index = 46 ; length = 1 line = 2 ; column = 4 ; value =':'; }
|
||||||
|
{kind = TOKEN_LEFTBRACKET; ; index = 48 ; length = 1 line = 2 ; column = 6 ; value ='['; }
|
||||||
|
{kind = TOKEN_INTLITERAL; ; index = 49 ; length = 2 line = 2 ; column = 7 ; value ='16'; }
|
||||||
|
{kind = TOKEN_RIGHTBRACKET; ; index = 51 ; length = 1 line = 2 ; column = 9 ; value =']'; }
|
||||||
|
{kind = TOKEN_DOT; ; index = 52 ; length = 1 line = 2 ; column = 10 ; value ='.'; }
|
||||||
|
{kind = TOKEN_IDENTIFIER; ; index = 53 ; length = 6 line = 2 ; column = 11 ; value ='float4'; }
|
||||||
|
{kind = TOKEN_SEMICOLON; ; index = 59 ; length = 1 line = 2 ; column = 17 ; value =';'; }
|
||||||
|
{kind = TOKEN_IDENTIFIER; ; index = 63 ; length = 3 line = 3 ; column = 0 ; value ='arr'; }
|
||||||
|
{kind = TOKEN_LEFTBRACKET; ; index = 66 ; length = 1 line = 3 ; column = 3 ; value ='['; }
|
||||||
|
{kind = TOKEN_INTLITERAL; ; index = 67 ; length = 1 line = 3 ; column = 4 ; value ='0'; }
|
||||||
|
{kind = TOKEN_RIGHTBRACKET; ; index = 68 ; length = 1 line = 3 ; column = 5 ; value =']'; }
|
||||||
|
{kind = TOKEN_ASSIGN; ; index = 70 ; length = 1 line = 3 ; column = 7 ; value ='='; }
|
||||||
|
{kind = TOKEN_IDENTIFIER; ; index = 72 ; length = 6 line = 3 ; column = 9 ; value ='float4'; }
|
||||||
|
{kind = TOKEN_LEFTPAREN; ; index = 78 ; length = 1 line = 3 ; column = 15 ; value ='('; }
|
||||||
|
{kind = TOKEN_INTLITERAL; ; index = 79 ; length = 1 line = 3 ; column = 16 ; value ='1'; }
|
||||||
|
{kind = TOKEN_COMMA; ; index = 80 ; length = 1 line = 3 ; column = 17 ; value =','; }
|
||||||
|
{kind = TOKEN_INTLITERAL; ; index = 81 ; length = 1 line = 3 ; column = 18 ; value ='1'; }
|
||||||
|
{kind = TOKEN_COMMA; ; index = 82 ; length = 1 line = 3 ; column = 19 ; value =','; }
|
||||||
|
{kind = TOKEN_INTLITERAL; ; index = 83 ; length = 1 line = 3 ; column = 20 ; value ='1'; }
|
||||||
|
{kind = TOKEN_RIGHTPAREN; ; index = 84 ; length = 1 line = 3 ; column = 21 ; value =')'; }
|
||||||
|
{kind = TOKEN_SEMICOLON; ; index = 85 ; length = 1 line = 3 ; column = 22 ; value =';'; }
|
||||||
|
{kind = TOKEN_RETURN; ; index = 89 ; length = 6 line = 4 ; column = 0 ; value ='return'; }
|
||||||
|
{kind = TOKEN_IDENTIFIER; ; index = 96 ; length = 3 line = 4 ; column = 7 ; value ='arr'; }
|
||||||
|
{kind = TOKEN_LEFTBRACKET; ; index = 99 ; length = 1 line = 4 ; column = 10 ; value ='['; }
|
||||||
|
{kind = TOKEN_INTLITERAL; ; index = 100 ; length = 1 line = 4 ; column = 11 ; value ='0'; }
|
||||||
|
{kind = TOKEN_RIGHTBRACKET; ; index = 101 ; length = 1 line = 4 ; column = 12 ; value =']'; }
|
||||||
|
{kind = TOKEN_SEMICOLON; ; index = 102 ; length = 1 line = 4 ; column = 13 ; value =';'; }
|
||||||
|
{kind = TOKEN_RIGHTBRACE; ; index = 105 ; length = 1 line = 5 ; column = 0 ; value ='}'; }
|
||||||
|
{kind = TOKEN_EOF; ; index = 108 ; length = 0 line = 6 ; column = 0 ; value =''; }
|
||||||
6
test/parse/arrays.golden
Normal file
6
test/parse/arrays.golden
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
(program
|
||||||
|
(fun vertex vs_main -> float4 (@position)
|
||||||
|
[]
|
||||||
|
(:= arr [16].float4)
|
||||||
|
(= arr[0] (float4 1 1 1))
|
||||||
|
(return arr[0])))
|
||||||
Reference in New Issue
Block a user