Broke builtins.
This commit is contained in:
2
Ink.jai
2
Ink.jai
@@ -232,7 +232,7 @@ run_lexer_test :: (file_path : string, result : *Compile_Result, output_type : O
|
|||||||
result_data.type = .Failed;
|
result_data.type = .Failed;
|
||||||
result_text = report_messages(result.messages);
|
result_text = report_messages(result.messages);
|
||||||
} else {
|
} else {
|
||||||
result_text = pretty_print_tokens(result.tokens.tokens, *temp);
|
result_text = pretty_print_tokens(result.tokens, *temp);
|
||||||
}
|
}
|
||||||
|
|
||||||
if output_type & .StdOut {
|
if output_type & .StdOut {
|
||||||
|
|||||||
87
Lexing.jai
87
Lexing.jai
@@ -5,17 +5,11 @@ Lexer :: struct {
|
|||||||
current_line : int;
|
current_line : int;
|
||||||
current_column : int;
|
current_column : int;
|
||||||
|
|
||||||
result : Lexing_Result;
|
result : *Compile_Result;
|
||||||
|
|
||||||
path : string;
|
path : string;
|
||||||
}
|
}
|
||||||
|
|
||||||
Lexing_Result :: struct {
|
|
||||||
tokens : [..]Token;
|
|
||||||
had_error : bool;
|
|
||||||
messages : [..]Compiler_Message;
|
|
||||||
}
|
|
||||||
|
|
||||||
Token_Kind :: enum {
|
Token_Kind :: enum {
|
||||||
TOKEN_FLOATLITERAL;
|
TOKEN_FLOATLITERAL;
|
||||||
TOKEN_INTLITERAL;
|
TOKEN_INTLITERAL;
|
||||||
@@ -139,6 +133,8 @@ Token :: struct {
|
|||||||
index : int;
|
index : int;
|
||||||
|
|
||||||
error : string;
|
error : string;
|
||||||
|
|
||||||
|
builtin : bool; // @Incomplete: This is kind of a bad idea, but let's just do it for now...
|
||||||
}
|
}
|
||||||
|
|
||||||
Source_Range :: struct {
|
Source_Range :: struct {
|
||||||
@@ -272,6 +268,26 @@ error_token :: (lexer : *Lexer, message : string) -> *Token {
|
|||||||
return token;
|
return token;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// unable_to_open_file :: (state : *Parse_State, path : string, token : Token) {
|
||||||
|
// builder : String_Builder;
|
||||||
|
// init_string_builder(*builder,, temp);
|
||||||
|
|
||||||
|
// print_to_builder(*builder, "Unable to open file '%' for reading\n\n", path);
|
||||||
|
|
||||||
|
// location := generate_source_location_from_token(state, token);
|
||||||
|
|
||||||
|
// indent(*builder, 1);
|
||||||
|
// cyan(*builder);
|
||||||
|
// print_to_builder(*builder, "%\n", print_from_source_location(location));
|
||||||
|
// indent(*builder, 1);
|
||||||
|
|
||||||
|
// loc := location.begin;
|
||||||
|
// print_token_pointer(*builder, loc);
|
||||||
|
|
||||||
|
// final_message := builder_to_string(*builder);
|
||||||
|
// record_error(state, token, final_message, false);
|
||||||
|
// }
|
||||||
|
|
||||||
record_error :: (lexer : *Lexer, message : string) {
|
record_error :: (lexer : *Lexer, message : string) {
|
||||||
error : Compiler_Message;
|
error : Compiler_Message;
|
||||||
error.message_kind = .Error;
|
error.message_kind = .Error;
|
||||||
@@ -346,14 +362,41 @@ new_token :: (lexer : *Lexer, kind : Token_Kind) -> *Token {
|
|||||||
|
|
||||||
make_directive :: (lexer : *Lexer) -> *Token {
|
make_directive :: (lexer : *Lexer) -> *Token {
|
||||||
lexer.start += 1;
|
lexer.start += 1;
|
||||||
return make_identifier(lexer, .TOKEN_DIRECTIVE);
|
ident := make_identifier(lexer, .TOKEN_DIRECTIVE);
|
||||||
|
if ident.ident_value == "load" {
|
||||||
|
path_tok := scan_next_token(lexer);
|
||||||
|
path := path_tok.string_value;
|
||||||
|
result : Compile_Result;
|
||||||
|
result.allocator = lexer.result.allocator;
|
||||||
|
result.environment = lexer.result.environment;
|
||||||
|
|
||||||
|
result.file = make_file(*result, path);
|
||||||
|
|
||||||
|
if result.file.source.count == 0 {
|
||||||
|
// unable_to_open_file(lexer, path, path_tok);
|
||||||
|
record_error(lexer, tprint("Unable to open file '%' for reading\n", path));
|
||||||
|
return error_token(lexer, tprint("Unable to open file '%' for reading\n", path));
|
||||||
|
}
|
||||||
|
|
||||||
|
lex(*result);
|
||||||
|
|
||||||
|
result.tokens.count -= 1; // @Note: remote TOKEN_EOF
|
||||||
|
lexer.result.tokens.count -= 2;
|
||||||
|
array_resize(*lexer.result.tokens, lexer.result.tokens.count + result.tokens.count);
|
||||||
|
|
||||||
|
for tok : result.tokens {
|
||||||
|
lexer.result.tokens[it_index] = tok;
|
||||||
|
}
|
||||||
|
return scan_next_token(lexer);;
|
||||||
|
}
|
||||||
|
return ident;
|
||||||
}
|
}
|
||||||
|
|
||||||
make_string :: (lexer : *Lexer) -> *Token {
|
make_string :: (lexer : *Lexer) -> *Token {
|
||||||
token : *Token = new_token(lexer, .TOKEN_STRING);
|
token : *Token = new_token(lexer, .TOKEN_STRING);
|
||||||
|
|
||||||
name : string = .{ count = token.length,
|
name : string = .{ count = token.length - 2,
|
||||||
data = *lexer.input.data[lexer.start] };
|
data = *lexer.input.data[lexer.start + 1] };
|
||||||
token.string_value = name;
|
token.string_value = name;
|
||||||
|
|
||||||
return token;
|
return token;
|
||||||
@@ -432,13 +475,13 @@ scan_next_token :: (lexer : *Lexer) -> *Token {
|
|||||||
if c == {
|
if c == {
|
||||||
case #char "\""; {
|
case #char "\""; {
|
||||||
c = advance(lexer);
|
c = advance(lexer);
|
||||||
lexer.start = lexer.cursor;
|
// lexer.start = lexer.cursor;
|
||||||
while c != #char "\"" {
|
while c != #char "\"" {
|
||||||
c = advance(lexer);
|
c = advance(lexer);
|
||||||
}
|
}
|
||||||
lexer.cursor -= 1;
|
// lexer.cursor -= 1;
|
||||||
tok := make_string(lexer);
|
tok := make_string(lexer);
|
||||||
advance(lexer);
|
// advance(lexer);
|
||||||
return tok;
|
return tok;
|
||||||
}
|
}
|
||||||
case #char "+"; {
|
case #char "+"; {
|
||||||
@@ -528,28 +571,14 @@ lex :: (result : *Compile_Result) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
lexer : Lexer;
|
lexer : Lexer;
|
||||||
|
lexer.result = result;
|
||||||
|
|
||||||
init_lexer_from_string(*lexer, result.file.source);
|
init_lexer_from_string(*lexer, result.file.source);
|
||||||
lexer.path = result.file.path;
|
lexer.path = result.file.path;
|
||||||
token : *Token = scan_next_token(*lexer);
|
token : *Token = scan_next_token(*lexer);
|
||||||
while token && token.kind != .TOKEN_EOF {
|
while token && token.kind != .TOKEN_EOF {
|
||||||
token = scan_next_token(*lexer);
|
token = scan_next_token(*lexer);
|
||||||
}
|
}
|
||||||
|
|
||||||
array_copy(*result.tokens.tokens, lexer.result.tokens);
|
|
||||||
result.had_error |= lexer.result.had_error;
|
|
||||||
|
|
||||||
// @Incomplete(nb): Temporary until we figure out a good way of passing this stuff around
|
|
||||||
copy_messages(lexer.result.messages, *result.messages);
|
|
||||||
}
|
|
||||||
|
|
||||||
lex :: (lexer : *Lexer, allocator : Allocator = context.allocator) -> Lexing_Result {
|
|
||||||
lexer.result.tokens.allocator = allocator;
|
|
||||||
token : *Token = scan_next_token(lexer);
|
|
||||||
while token && token.kind != .TOKEN_EOF {
|
|
||||||
token = scan_next_token(lexer);
|
|
||||||
}
|
|
||||||
|
|
||||||
return lexer.result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
init_lexer_from_string :: (lexer : *Lexer, input : string) {
|
init_lexer_from_string :: (lexer : *Lexer, input : string) {
|
||||||
|
|||||||
85
Parsing.jai
85
Parsing.jai
@@ -368,16 +368,30 @@ advance_to_sync_point :: (parse_state : *Parse_State) {
|
|||||||
////////////////////////////
|
////////////////////////////
|
||||||
//@nb - Base parsing functions
|
//@nb - Base parsing functions
|
||||||
|
|
||||||
make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node {
|
make_node :: (nodes : *[..]AST_Node, kind : AST_Kind, allocator : Allocator) -> *AST_Node {
|
||||||
node : AST_Node;
|
node : AST_Node;
|
||||||
|
|
||||||
node.kind = kind;
|
node.kind = kind;
|
||||||
node.children.allocator = parse_state.result.allocator;
|
node.children.allocator = allocator;
|
||||||
array_add(*parse_state.result.nodes, node);
|
array_add(nodes, node);
|
||||||
|
|
||||||
return *parse_state.result.nodes[parse_state.result.nodes.count - 1];
|
return *(nodes.*[nodes.count - 1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node {
|
||||||
|
return make_node(*parse_state.result.nodes, kind, parse_state.result.allocator);
|
||||||
|
}
|
||||||
|
|
||||||
|
// new_builtin_node :: (nodes : *[..]AST_Node, kind : AST_Kind) -> *AST_Node {
|
||||||
|
// node := make_node(parse_state, kind);
|
||||||
|
// node.builtin = true;
|
||||||
|
// return node;
|
||||||
|
// }
|
||||||
|
|
||||||
|
// new_builtin_struct_node :: (nodes : *[..]AST_Node, name : string, member_names : []string, allocator : Allocator) -> *AST_Node {
|
||||||
|
|
||||||
|
// }
|
||||||
|
|
||||||
add_child :: (node : *AST_Node, child : *AST_Node) {
|
add_child :: (node : *AST_Node, child : *AST_Node) {
|
||||||
child.parent = node;
|
child.parent = node;
|
||||||
array_add(*node.children, child);
|
array_add(*node.children, child);
|
||||||
@@ -408,10 +422,10 @@ advance :: (parse_state : *Parse_State) {
|
|||||||
parse_state.previous = parse_state.current;
|
parse_state.previous = parse_state.current;
|
||||||
|
|
||||||
while true {
|
while true {
|
||||||
if parse_state.current_token_index >= parse_state.result.tokens.tokens.count {
|
if parse_state.current_token_index >= parse_state.result.tokens.count {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
parse_state.current = *parse_state.result.tokens.tokens[parse_state.current_token_index];
|
parse_state.current = *parse_state.result.tokens[parse_state.current_token_index];
|
||||||
parse_state.current_token_index += 1;
|
parse_state.current_token_index += 1;
|
||||||
if parse_state.current.kind != .TOKEN_ERROR break;
|
if parse_state.current.kind != .TOKEN_ERROR break;
|
||||||
|
|
||||||
@@ -444,7 +458,7 @@ check_any :: (parse_state : *Parse_State, kinds : ..Token_Kind) -> bool {
|
|||||||
|
|
||||||
//nb - Checks if the next token is of a certain kind
|
//nb - Checks if the next token is of a certain kind
|
||||||
check_next :: (parse_state : *Parse_State, kind : Token_Kind) -> bool {
|
check_next :: (parse_state : *Parse_State, kind : Token_Kind) -> bool {
|
||||||
return parse_state.result.tokens.tokens[parse_state.current_token_index].kind == kind;
|
return parse_state.result.tokens[parse_state.current_token_index].kind == kind;
|
||||||
}
|
}
|
||||||
|
|
||||||
//nb - Consume a token if
|
//nb - Consume a token if
|
||||||
@@ -563,8 +577,8 @@ binary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|||||||
}
|
}
|
||||||
|
|
||||||
array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
||||||
identifier := parse_state.result.tokens.tokens[parse_state.current_token_index - 3];
|
identifier := parse_state.result.tokens[parse_state.current_token_index - 3];
|
||||||
left_bracket := parse_state.result.tokens.tokens[parse_state.current_token_index - 2];
|
left_bracket := parse_state.result.tokens[parse_state.current_token_index - 2];
|
||||||
|
|
||||||
array_access := make_node(parse_state, .Unary);
|
array_access := make_node(parse_state, .Unary);
|
||||||
array_access.token = left_bracket;
|
array_access.token = left_bracket;
|
||||||
@@ -650,27 +664,44 @@ directive :: (state : *Parse_State) -> *AST_Node {
|
|||||||
advance(state);
|
advance(state);
|
||||||
|
|
||||||
if check(state, .TOKEN_STRING) {
|
if check(state, .TOKEN_STRING) {
|
||||||
path := state.current.string_value;
|
// path_tok := state.current;
|
||||||
advance(state);
|
// path := path_tok.string_value;
|
||||||
|
|
||||||
consume(state, .TOKEN_SEMICOLON, "Expected ';' after #load directive");
|
// advance(state);
|
||||||
result : Compile_Result;
|
|
||||||
result.allocator = state.result.allocator;
|
|
||||||
result.environment = state.result.environment;
|
|
||||||
|
|
||||||
result.file = make_file(*result, path);
|
// result : Compile_Result;
|
||||||
|
// result.allocator = state.result.allocator;
|
||||||
|
// result.environment = state.result.environment;
|
||||||
|
|
||||||
if result.file.source.count == 0 {
|
// result.file = make_file(*result, path);
|
||||||
unable_to_open_file(state, path, state.previous);
|
|
||||||
advance_to_sync_point(state);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
lex(*result);
|
// if result.file.source.count == 0 {
|
||||||
|
// unable_to_open_file(state, path, path_tok);
|
||||||
|
// advance_to_sync_point(state);
|
||||||
|
// advance(state);
|
||||||
|
// return null;
|
||||||
|
// }
|
||||||
|
|
||||||
for tok : result.tokens.tokens {
|
// consume(state, .TOKEN_SEMICOLON, "Expected ';' after #load directive");
|
||||||
array_add(*state.result.tokens.tokens, tok);
|
|
||||||
}
|
// lex(*result);
|
||||||
|
|
||||||
|
// count := state.result.tokens..count;
|
||||||
|
// current_idx := state.current_token_index;
|
||||||
|
// result_count := result.tokens..count;
|
||||||
|
|
||||||
|
// // state.result.tokens..count -= 1;
|
||||||
|
// array_resize(*state.result.tokens., count + result_count - 1);
|
||||||
|
|
||||||
|
// memcpy(*state.result.tokens[current_idx + result_count - 1], *state.result.tokens[current_idx], size_of(Token) * (count - current_idx));
|
||||||
|
|
||||||
|
// for *tok : result.tokens. {
|
||||||
|
// if tok.kind == .TOKEN_EOF {
|
||||||
|
// break;
|
||||||
|
// }
|
||||||
|
// tok.builtin = true;
|
||||||
|
// state.result.tokens[it_index] = tok.*;
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1293,6 +1324,7 @@ const_declaration :: (parse_state : *Parse_State, identifier_token : *Token) ->
|
|||||||
}
|
}
|
||||||
|
|
||||||
declaration :: (parse_state : *Parse_State) -> *AST_Node {
|
declaration :: (parse_state : *Parse_State) -> *AST_Node {
|
||||||
|
skip_statement := false;
|
||||||
decl_node : *AST_Node;
|
decl_node : *AST_Node;
|
||||||
if match(parse_state, .TOKEN_PROPERTIES) {
|
if match(parse_state, .TOKEN_PROPERTIES) {
|
||||||
decl_node = property_block(parse_state);
|
decl_node = property_block(parse_state);
|
||||||
@@ -1320,6 +1352,7 @@ declaration :: (parse_state : *Parse_State) -> *AST_Node {
|
|||||||
decl_node = call(parse_state, null);
|
decl_node = call(parse_state, null);
|
||||||
} else if check(parse_state, .TOKEN_DIRECTIVE) {
|
} else if check(parse_state, .TOKEN_DIRECTIVE) {
|
||||||
decl_node = directive(parse_state);
|
decl_node = directive(parse_state);
|
||||||
|
skip_statement = true;
|
||||||
} else if check(parse_state, .TOKEN_IDENTIFIER) {
|
} else if check(parse_state, .TOKEN_IDENTIFIER) {
|
||||||
identifier := parse_state.current;
|
identifier := parse_state.current;
|
||||||
|
|
||||||
@@ -1342,7 +1375,7 @@ declaration :: (parse_state : *Parse_State) -> *AST_Node {
|
|||||||
decl_node = error;
|
decl_node = error;
|
||||||
}
|
}
|
||||||
|
|
||||||
if !decl_node {
|
if !decl_node && !skip_statement {
|
||||||
decl_node = statement(parse_state);
|
decl_node = statement(parse_state);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ Type_Kind :: enum {
|
|||||||
Array;
|
Array;
|
||||||
}
|
}
|
||||||
|
|
||||||
Type_Variable_Kind :: enum {
|
Source_Kind :: enum {
|
||||||
Expression;
|
Expression;
|
||||||
Declaration; // struct, properties, function, etc.
|
Declaration; // struct, properties, function, etc.
|
||||||
}
|
}
|
||||||
@@ -60,7 +60,7 @@ Typenames :: string.[
|
|||||||
// We can just have the built-in types and then we can declare structs, functions, buffers etc. as one time things.
|
// We can just have the built-in types and then we can declare structs, functions, buffers etc. as one time things.
|
||||||
Type_Variable :: struct {
|
Type_Variable :: struct {
|
||||||
type : Type_Kind;
|
type : Type_Kind;
|
||||||
kind : Type_Variable_Kind;
|
source_kind : Source_Kind;
|
||||||
builtin : bool;
|
builtin : bool;
|
||||||
|
|
||||||
name : string;
|
name : string;
|
||||||
@@ -704,6 +704,92 @@ new_type_variable :: (checker : *Semantic_Checker) -> *Type_Variable, Type_Varia
|
|||||||
return from_handle(checker, handle), handle;
|
return from_handle(checker, handle), handle;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
new_builtin_type_variable :: (checker : *Semantic_Checker, type : Type_Kind, source : Source_Kind, name : string, typename : string = "") -> *Type_Variable, Type_Variable_Handle {
|
||||||
|
tv, handle := new_type_variable(checker);
|
||||||
|
|
||||||
|
tv.name = name;
|
||||||
|
tv.type = type;
|
||||||
|
tv.source_kind = source;
|
||||||
|
tv.builtin = true;
|
||||||
|
tv.typename = typename;
|
||||||
|
|
||||||
|
return tv, handle;
|
||||||
|
}
|
||||||
|
|
||||||
|
Arg :: struct {
|
||||||
|
name : string;
|
||||||
|
typename : string;
|
||||||
|
}
|
||||||
|
|
||||||
|
new_builtin_struct :: (checker : *Semantic_Checker, name : string, members : []Arg) -> *Type_Variable, Type_Variable_Handle {
|
||||||
|
tv, handle := new_builtin_type_variable(checker, .Struct, .Declaration, name, name);
|
||||||
|
|
||||||
|
// @Incomplete: Skip for now. This is solely for error reporting?
|
||||||
|
// At least let's not make a big deal out of it for now.
|
||||||
|
// We could report builtin nodes in a special way instead of with an actual source location.
|
||||||
|
// builtin_node := new_builtin_struct_node(checker.result.nodes, members, checker.result.allocator;
|
||||||
|
|
||||||
|
symbol : Defined_Symbol;
|
||||||
|
symbol.name = name;
|
||||||
|
// symbol.source_node = builtin_node;
|
||||||
|
symbol.builtin = true;
|
||||||
|
symbol.type_variable = handle;
|
||||||
|
add_symbol_to_scope(checker.state, *checker.result.scope_stack, checker.current_scope, name, symbol);
|
||||||
|
|
||||||
|
scope, scope_handle := push_scope(checker, name, .Struct);
|
||||||
|
tv.scope = scope_handle;
|
||||||
|
|
||||||
|
for member : members {
|
||||||
|
typename : string;
|
||||||
|
kind := lookup_type(checker, checker.current_scope, member.typename, *typename);
|
||||||
|
|
||||||
|
member_var, member_handle := new_builtin_type_variable(checker, kind, .Expression, member.name);
|
||||||
|
member_var.scope = tv.scope;
|
||||||
|
|
||||||
|
member_symbol : Defined_Symbol;
|
||||||
|
member_symbol.name = member.name;
|
||||||
|
member_symbol.type_variable = member_handle;
|
||||||
|
add_symbol_to_scope(checker.state, *checker.result.scope_stack, checker.current_scope, member.name, symbol);
|
||||||
|
|
||||||
|
add_child(checker, handle, member_handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
pop_scope(checker);
|
||||||
|
|
||||||
|
return from_handle(checker, handle), handle;
|
||||||
|
}
|
||||||
|
|
||||||
|
// new_builtin_function :: (checker : *Semantic_Checker, name : string, args : []Arg) -> *Type_Variable, Type_Variable_Handle {
|
||||||
|
// tv, handle := new_builtin_type_variable(checker, .Function, .Declaration, name);
|
||||||
|
|
||||||
|
// // @Incomplete: Skip for now. This is solely for error reporting?
|
||||||
|
// // At least let's not make a big deal out of it for now.
|
||||||
|
// // We could report builtin nodes in a special way instead of with an actual source location.
|
||||||
|
// // builtin_node := new_builtin_struct_node(checker.result.nodes, members, checker.result.allocator;
|
||||||
|
|
||||||
|
// symbol : Defined_Symbol;
|
||||||
|
// symbol.name = name;
|
||||||
|
// // symbol.source_node = builtin_node;
|
||||||
|
// symbol.builtin = true;
|
||||||
|
// symbol.type_variable = handle;
|
||||||
|
// add_symbol_to_scope(checker.state, *checker.result.scope_stack, checker.current_scope, name, symbol);
|
||||||
|
|
||||||
|
// scope, scope_handle := push_scope(checker, name, .Struct);
|
||||||
|
// tv.scope = scope_handle;
|
||||||
|
|
||||||
|
// for arg : args {
|
||||||
|
// typename : string;
|
||||||
|
// kind := get_type_from_identifier(checker, checker.current_scope, arg, *typename);
|
||||||
|
|
||||||
|
// arg_var, arg_handle := new_builtin_type_variable(checker, kind, .Expression, typename);
|
||||||
|
// add_child(checker, handle, arg_handle);
|
||||||
|
// }
|
||||||
|
|
||||||
|
// pop_scope(checker);
|
||||||
|
|
||||||
|
// return from_handle(checker, handle), handle;
|
||||||
|
// }
|
||||||
|
|
||||||
add_child :: (variable : *Type_Variable, child : Type_Variable_Handle) {
|
add_child :: (variable : *Type_Variable, child : Type_Variable_Handle) {
|
||||||
assert(variable.children.count < Type_Variable.MAX_TYPE_VARIABLE_CHILDREN);
|
assert(variable.children.count < Type_Variable.MAX_TYPE_VARIABLE_CHILDREN);
|
||||||
array_add(*variable.children, child);
|
array_add(*variable.children, child);
|
||||||
@@ -846,9 +932,7 @@ proper_type_to_string :: (variables : []Type_Variable, var : Type_Variable, allo
|
|||||||
return "______not proper type______";
|
return "______not proper type______";
|
||||||
}
|
}
|
||||||
|
|
||||||
get_type_from_identifier :: (checker : *Semantic_Checker, scope : Scope_Handle, node : *AST_Node, typename : *string = null) -> Type_Kind {
|
lookup_type :: (checker : *Semantic_Checker, scope : Scope_Handle, type_string : string, typename : *string = null) -> Type_Kind {
|
||||||
type_string := node.token.ident_value;
|
|
||||||
|
|
||||||
if type_string == {
|
if type_string == {
|
||||||
case Typenames[Type_Kind.Int]; return .Int;
|
case Typenames[Type_Kind.Int]; return .Int;
|
||||||
case Typenames[Type_Kind.Half]; return .Half;
|
case Typenames[Type_Kind.Half]; return .Half;
|
||||||
@@ -874,6 +958,11 @@ get_type_from_identifier :: (checker : *Semantic_Checker, scope : Scope_Handle,
|
|||||||
return .Invalid;
|
return .Invalid;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lookup_type :: (checker : *Semantic_Checker, scope : Scope_Handle, node : *AST_Node, typename : *string = null) -> Type_Kind {
|
||||||
|
type_string := node.token.ident_value;
|
||||||
|
return lookup_type(checker, scope, type_string, typename);
|
||||||
|
}
|
||||||
|
|
||||||
check_block :: (checker : *Semantic_Checker, node : *AST_Node) {
|
check_block :: (checker : *Semantic_Checker, node : *AST_Node) {
|
||||||
for child : node.children {
|
for child : node.children {
|
||||||
check_node(checker, child);
|
check_node(checker, child);
|
||||||
@@ -883,7 +972,7 @@ check_block :: (checker : *Semantic_Checker, node : *AST_Node) {
|
|||||||
declare_struct :: (checker : *Semantic_Checker, node : *AST_Node, name : string) -> Type_Variable_Handle {
|
declare_struct :: (checker : *Semantic_Checker, node : *AST_Node, name : string) -> Type_Variable_Handle {
|
||||||
variable, handle := new_type_variable(checker);
|
variable, handle := new_type_variable(checker);
|
||||||
variable.type = .Struct;
|
variable.type = .Struct;
|
||||||
variable.kind = .Declaration;
|
variable.source_kind = .Declaration;
|
||||||
variable.name = name;
|
variable.name = name;
|
||||||
variable.source_node = node;
|
variable.source_node = node;
|
||||||
variable.typename = name;
|
variable.typename = name;
|
||||||
@@ -975,7 +1064,7 @@ declare_function :: (checker : *Semantic_Checker, node : *AST_Node, builtin : bo
|
|||||||
|
|
||||||
variable, handle := new_type_variable(checker);
|
variable, handle := new_type_variable(checker);
|
||||||
variable.type = .Function;
|
variable.type = .Function;
|
||||||
variable.kind = .Declaration;
|
variable.source_kind = .Declaration;
|
||||||
variable.name = node.name;
|
variable.name = node.name;
|
||||||
variable.source_node = node;
|
variable.source_node = node;
|
||||||
variable.builtin = builtin;
|
variable.builtin = builtin;
|
||||||
@@ -1022,7 +1111,7 @@ declare_function :: (checker : *Semantic_Checker, node : *AST_Node, builtin : bo
|
|||||||
node_child := field_list.children[i];
|
node_child := field_list.children[i];
|
||||||
|
|
||||||
typename : string;
|
typename : string;
|
||||||
arg_type := get_type_from_identifier(checker, checker.current_scope, node_child, *typename);
|
arg_type := lookup_type(checker, checker.current_scope, node_child, *typename);
|
||||||
other_arg := from_handle(checker, arg);
|
other_arg := from_handle(checker, arg);
|
||||||
|
|
||||||
if arg_type != other_arg.type {
|
if arg_type != other_arg.type {
|
||||||
@@ -1082,7 +1171,7 @@ declare_function :: (checker : *Semantic_Checker, node : *AST_Node, builtin : bo
|
|||||||
|
|
||||||
if builtin && node.token.ident_value.count > 0 {
|
if builtin && node.token.ident_value.count > 0 {
|
||||||
return_var, return_handle := new_type_variable(checker);
|
return_var, return_handle := new_type_variable(checker);
|
||||||
return_var.type = get_type_from_identifier(checker, checker.current_scope, node, *return_var.typename);
|
return_var.type = lookup_type(checker, checker.current_scope, node, *return_var.typename);
|
||||||
from_handle(checker, handle).return_type_variable= return_handle;
|
from_handle(checker, handle).return_type_variable= return_handle;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1202,7 +1291,7 @@ check_field :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variable_
|
|||||||
variable, handle := new_type_variable(checker);
|
variable, handle := new_type_variable(checker);
|
||||||
variable.name = node.name;
|
variable.name = node.name;
|
||||||
typename : string;
|
typename : string;
|
||||||
variable.type = get_type_from_identifier(checker, checker.current_scope, node, *typename);
|
variable.type = lookup_type(checker, checker.current_scope, node, *typename);
|
||||||
|
|
||||||
variable.is_array = node.array_field;
|
variable.is_array = node.array_field;
|
||||||
|
|
||||||
@@ -1214,12 +1303,12 @@ check_field :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variable_
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if variable.kind == .Declaration && variable.type == .Sampler {
|
if variable.source_kind == .Declaration && variable.type == .Sampler {
|
||||||
variable.resource_index = checker.current_sampler_index;
|
variable.resource_index = checker.current_sampler_index;
|
||||||
checker.current_sampler_index += 1;
|
checker.current_sampler_index += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if variable.kind == .Declaration && variable.type == .Texture2D {
|
if variable.source_kind == .Declaration && variable.type == .Texture2D {
|
||||||
variable.resource_index = checker.current_texture_index;
|
variable.resource_index = checker.current_texture_index;
|
||||||
checker.current_texture_index += 1;
|
checker.current_texture_index += 1;
|
||||||
}
|
}
|
||||||
@@ -1250,7 +1339,7 @@ check_field :: (checker : *Semantic_Checker, node : *AST_Node) -> Type_Variable_
|
|||||||
}
|
}
|
||||||
|
|
||||||
if node.token.ident_value.count > 0 {
|
if node.token.ident_value.count > 0 {
|
||||||
variable.type = get_type_from_identifier(checker, checker.current_scope, node);
|
variable.type = lookup_type(checker, checker.current_scope, node);
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.children.count > 0 {
|
if node.children.count > 0 {
|
||||||
@@ -1687,17 +1776,16 @@ types_compatible :: (checker : *Semantic_Checker, lhs : Type_Variable_Handle, rh
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// add_type :: (checker : *Semantic_Checker, kind : Type_Kind, members : ..Type_Kind) {
|
add_builtins_new :: (checker : *Semantic_Checker) {
|
||||||
|
|
||||||
// }
|
checker.state = .Adding_Builtins;
|
||||||
|
float_name := Typenames[Type_Kind.Float];
|
||||||
|
// new_builtin_struct(checker, "float2", .[.{"x", float_name}, .{"y", float_name}]);
|
||||||
|
// new_builtin_struct(checker, "float3", .[.{"x", float_name}, .{"y", float_name}, .{"z", float_name}]);
|
||||||
|
// new_builtin_struct(checker, "float4", .[.{"x", float_name}, .{"y", float_name}, .{"z", float_name}, .{"w", float_name}]);
|
||||||
|
|
||||||
// add_builtins :: (checler : *Semantic_Checker) {
|
checker.state = .Type_Checking;
|
||||||
// //~ Types
|
}
|
||||||
// add_type(checker, "float2", .Struct, .Float, .Float);
|
|
||||||
// add_type(checker, "float3", .Struct, .Float, .Float, .Float);
|
|
||||||
|
|
||||||
// //~ Functions
|
|
||||||
// }
|
|
||||||
|
|
||||||
add_builtins :: (checker : *Semantic_Checker) {
|
add_builtins :: (checker : *Semantic_Checker) {
|
||||||
source_location := #location().fully_pathed_filename;
|
source_location := #location().fully_pathed_filename;
|
||||||
@@ -1734,27 +1822,6 @@ add_builtins :: (checker : *Semantic_Checker) {
|
|||||||
parse(checker.result);
|
parse(checker.result);
|
||||||
type_check(checker, checker.result.root);
|
type_check(checker, checker.result.root);
|
||||||
|
|
||||||
// lex_result := lex(*lexer,, *temp);
|
|
||||||
// if lex_result.had_error {
|
|
||||||
// print("%\n", report_messages(lex_result.messages));
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// parse_state : Parse_State;
|
|
||||||
// init_parse_state(*parse_state, lex_result.tokens, lexer.path);
|
|
||||||
|
|
||||||
// parse_result := parse(*parse_state);
|
|
||||||
// if parse_result.had_error {
|
|
||||||
// print("%\n", report_messages(parse_result.messages));
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// type_check(checker, parse_result.root);
|
|
||||||
// if checker.had_error {
|
|
||||||
// print("%\n", report_messages(checker.messages));
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
|
|
||||||
for *type_var : checker.result.type_variables {
|
for *type_var : checker.result.type_variables {
|
||||||
type_var.builtin = true;
|
type_var.builtin = true;
|
||||||
}
|
}
|
||||||
@@ -1777,18 +1844,18 @@ check :: (result : *Compile_Result) {
|
|||||||
checker.current_sampler_index = 0;
|
checker.current_sampler_index = 0;
|
||||||
checker.current_texture_index = 0;
|
checker.current_texture_index = 0;
|
||||||
checker.result = result;
|
checker.result = result;
|
||||||
|
|
||||||
file := result.file;
|
file := result.file;
|
||||||
root := result.root;
|
root := result.root;
|
||||||
array_reserve(*checker.messages, 32);
|
array_reserve(*checker.messages, 32);
|
||||||
|
|
||||||
|
result.root = null;
|
||||||
|
add_builtins_new(*checker);
|
||||||
|
add_builtins(*checker);
|
||||||
|
|
||||||
init_semantic_checker(*checker, result.root, result.file.path);
|
init_semantic_checker(*checker, result.root, result.file.path);
|
||||||
|
|
||||||
// Actually, these are not built-ins, they should be a core lib you import.
|
|
||||||
// So if we add imports, we can just add this file.
|
|
||||||
// Maybe we should just do #load
|
|
||||||
add_builtins(*checker);
|
|
||||||
checker.result.file = file;
|
checker.result.file = file;
|
||||||
|
|
||||||
result.root = root;
|
result.root = root;
|
||||||
|
|
||||||
type_check(*checker, result.root);
|
type_check(*checker, result.root);
|
||||||
@@ -1927,7 +1994,7 @@ pretty_print_scope :: (current_scope : Scope_Handle, scope_stack : Scope_Stack,
|
|||||||
case .CBuffer; #through;
|
case .CBuffer; #through;
|
||||||
case .Properties; #through;
|
case .Properties; #through;
|
||||||
case .Struct; {
|
case .Struct; {
|
||||||
if type_variable.typename.count > 0 && type_variable.kind != .Declaration {
|
if type_variable.typename.count > 0 && type_variable.source_kind != .Declaration {
|
||||||
indent(builder, indentation + 1);
|
indent(builder, indentation + 1);
|
||||||
print_key(*scope_stack, current_scope, builder, key);
|
print_key(*scope_stack, current_scope, builder, key);
|
||||||
print_type_variable(builder, variables, type_variable);
|
print_type_variable(builder, variables, type_variable);
|
||||||
@@ -1953,7 +2020,7 @@ pretty_print_scope :: (current_scope : Scope_Handle, scope_stack : Scope_Stack,
|
|||||||
case .CBuffer; #through;
|
case .CBuffer; #through;
|
||||||
case .Properties; #through;
|
case .Properties; #through;
|
||||||
case .Struct; {
|
case .Struct; {
|
||||||
if type_variable.typename.count > 0 && type_variable.kind != .Declaration {
|
if type_variable.typename.count > 0 && type_variable.source_kind != .Declaration {
|
||||||
indent(builder, indentation + 1);
|
indent(builder, indentation + 1);
|
||||||
print_key(*scope_stack, current_scope, builder, key);
|
print_key(*scope_stack, current_scope, builder, key);
|
||||||
print_to_builder(builder, "%\n", type_variable.typename);
|
print_to_builder(builder, "%\n", type_variable.typename);
|
||||||
|
|||||||
@@ -141,16 +141,12 @@ Input_File :: struct {
|
|||||||
path : string;
|
path : string;
|
||||||
}
|
}
|
||||||
|
|
||||||
Token_Stream :: struct {
|
|
||||||
tokens : [..]Token;
|
|
||||||
}
|
|
||||||
|
|
||||||
Compile_Result :: struct {
|
Compile_Result :: struct {
|
||||||
file : Input_File;
|
file : Input_File;
|
||||||
|
|
||||||
environment : Environment;
|
environment : Environment;
|
||||||
|
|
||||||
tokens : Token_Stream;
|
tokens : [..]Token;;
|
||||||
root : *AST_Node;
|
root : *AST_Node;
|
||||||
nodes : [..]AST_Node;
|
nodes : [..]AST_Node;
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#load "../builtins.ink";
|
#load "some_file.ink";
|
||||||
|
|
||||||
vertex main :: () {
|
vertex main :: () {
|
||||||
v2 : float2 = float2(2.0, 2.0);
|
v2 : float2 = float2(2.0, 2.0);
|
||||||
|
|||||||
Reference in New Issue
Block a user