1514 lines
45 KiB
Plaintext
1514 lines
45 KiB
Plaintext
#import "Flat_Pool";
|
|
|
|
// #load "qpwodkqopwkd.jai";
|
|
|
|
/**
|
|
* TODO:
|
|
* if parsing
|
|
* for/while loop parsing
|
|
**/
|
|
|
|
////////////////////////////
|
|
//@nb - Parse_state state
|
|
Parse_State :: struct {
|
|
current : *Token;
|
|
previous : *Token;
|
|
|
|
current_token_index : int;
|
|
|
|
ctx : *Compiler_Context;
|
|
}
|
|
|
|
////////////////////////////
|
|
//@nb - Result and error handling
|
|
Parse_Error_Kind :: enum {
|
|
Parse_Error_Type_Missing;
|
|
Parse_Error_Expected_Expression;
|
|
Parse_Error_Empty_Block;
|
|
Parse_Error_Unexpected_Token;
|
|
}
|
|
|
|
////////////////////////////
|
|
//@nb - Parsing helper types
|
|
Separator_Type :: enum {
|
|
Comma;
|
|
Semicolon;
|
|
}
|
|
|
|
Entry_Point_Type :: enum {
|
|
None;
|
|
Vertex;
|
|
Pixel;
|
|
}
|
|
|
|
////////////////////////////
|
|
//@nb - Expression parsing
|
|
Precedence :: enum {
|
|
PREC_NONE;
|
|
PREC_ASSIGNMENT; // =
|
|
PREC_OR; // ||
|
|
PREC_AND; // &&
|
|
PREC_BITWISE; // | & ^
|
|
PREC_EQUALITY; // == !=
|
|
PREC_COMPARISON; // < > <= >=
|
|
PREC_TERM; // + -
|
|
PREC_FACTOR; // * /
|
|
PREC_UNARY; // ! -
|
|
PREC_CALL; // . ()
|
|
PREC_PRIMARY;
|
|
}
|
|
|
|
Parse_Fn :: #type (parse_state: *Parse_State, left : *AST_Node) -> *AST_Node;
|
|
Parse_Rule :: struct {
|
|
prefix : Parse_Fn;
|
|
infix : Parse_Fn;
|
|
precedence : Precedence;
|
|
}
|
|
|
|
parse_rules :: #run -> [(cast(int)Token_Kind.TOKEN_ERROR) + 1]Parse_Rule {
|
|
rules : [(cast(int)Token_Kind.TOKEN_ERROR) + 1]Parse_Rule;
|
|
rules[Token_Kind.TOKEN_LEFTPAREN] = .{grouping, call, .PREC_CALL};
|
|
rules[Token_Kind.TOKEN_RIGHTPAREN] = .{null, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_LEFTBRACE] = .{null, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_RIGHTBRACE] = .{null, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_LEFTBRACKET] = .{null, array_access, .PREC_CALL};
|
|
rules[Token_Kind.TOKEN_RIGHTBRACKET] = .{null, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_COMMA] = .{null, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_DOT] = .{null, dot, .PREC_CALL};
|
|
// rules[Token_Kind.TOKEN_PROPERTIES] = .{named_variable, null, .PREC_CALL};
|
|
rules[Token_Kind.TOKEN_MINUS] = .{unary, binary, .PREC_TERM};
|
|
rules[Token_Kind.TOKEN_PLUS] = .{null, binary, .PREC_TERM};
|
|
rules[Token_Kind.TOKEN_SEMICOLON] = .{null, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_SLASH] = .{null, binary, .PREC_FACTOR};
|
|
rules[Token_Kind.TOKEN_STAR] = .{null, binary, .PREC_FACTOR};
|
|
rules[Token_Kind.TOKEN_ISNOTEQUAL] = .{null, binary, .PREC_COMPARISON};
|
|
rules[Token_Kind.TOKEN_ASSIGN] = .{null, binary, .PREC_COMPARISON};
|
|
rules[Token_Kind.TOKEN_MINUSEQUALS] = .{null, binary, .PREC_COMPARISON};
|
|
rules[Token_Kind.TOKEN_PLUSEQUALS] = .{null, binary, .PREC_COMPARISON};
|
|
rules[Token_Kind.TOKEN_DIVEQUALS] = .{null, binary, .PREC_COMPARISON};
|
|
rules[Token_Kind.TOKEN_TIMESEQUALS] = .{null, binary, .PREC_COMPARISON};
|
|
rules[Token_Kind.TOKEN_MODEQUALS] = .{null, binary, .PREC_COMPARISON};
|
|
rules[Token_Kind.TOKEN_ISEQUAL] = .{null, binary, .PREC_EQUALITY};
|
|
rules[Token_Kind.TOKEN_GREATER] = .{null, binary, .PREC_COMPARISON};
|
|
rules[Token_Kind.TOKEN_GREATEREQUALS] = .{null, binary, .PREC_COMPARISON};
|
|
rules[Token_Kind.TOKEN_LESS] = .{null, binary, .PREC_COMPARISON};
|
|
rules[Token_Kind.TOKEN_LESSEQUALS] = .{null, binary, .PREC_COMPARISON};
|
|
rules[Token_Kind.TOKEN_IDENTIFIER] = .{named_variable, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_INTLITERAL] = .{integer, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_FLOATLITERAL] = .{floating, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_ELSE] = .{null, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_FALSE] = .{null, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_FOR] = .{null, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_IF] = .{null, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_LOGICALOR] = .{null, binary, .PREC_OR};
|
|
rules[Token_Kind.TOKEN_LOGICALAND] = .{null, binary, .PREC_AND};
|
|
rules[Token_Kind.TOKEN_RETURN] = .{null, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_TRUE] = .{null, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_WHILE] = .{null, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_ERROR] = .{null, null, .PREC_NONE};
|
|
rules[Token_Kind.TOKEN_EOF] = .{null, null, .PREC_NONE};
|
|
|
|
return rules;
|
|
}
|
|
|
|
////////////////////////////
|
|
//@nb - Error handling functions
|
|
|
|
//nb - Record an error and report it immediately to the user.
|
|
record_error :: (parse_state : *Parse_State, token : Token, message : string, report_source_location : bool = true) {
|
|
error : Compiler_Message;
|
|
error.message_kind = .Error;
|
|
error.message = message;
|
|
error.path = parse_state.ctx.file.path;
|
|
|
|
source_location : Source_Range;
|
|
source_location.begin = token;
|
|
source_location.begin.column = 0;
|
|
source_location.begin.source = source_location.begin.source - source_location.begin.column;
|
|
source_location.main_token = token;
|
|
|
|
snap := snapshot_state(parse_state);
|
|
advance_to_sync_point(parse_state);
|
|
error.report_source_location = report_source_location;
|
|
|
|
source_location.end = parse_state.current;
|
|
array_add(*error.source_locations, source_location);
|
|
|
|
parse_state.ctx.had_error = true;
|
|
array_add(*parse_state.ctx.messages, error);
|
|
|
|
rewind_to_snapshot(parse_state, snap);
|
|
}
|
|
|
|
generate_source_location_from_token :: (state : *Parse_State, token : Token) -> Source_Range {
|
|
location : Source_Range;
|
|
begin : Token = token;
|
|
begin.index -= begin.column;
|
|
begin.length += begin.column;
|
|
begin.source -= begin.column;
|
|
begin.column = 0;
|
|
|
|
location.begin = begin;
|
|
location.main_token = token;
|
|
|
|
snapshot := snapshot_state(state);
|
|
advance_to_sync_point(state);
|
|
|
|
location.end = state.current;
|
|
|
|
rewind_to_snapshot(state, snapshot);
|
|
|
|
return location;
|
|
}
|
|
|
|
unexpected_token :: (state : *Parse_State, token : Token, message : string) {
|
|
/*
|
|
|
|
*/
|
|
sc := get_scratch();
|
|
defer scratch_end(sc);
|
|
builder : String_Builder;
|
|
init_string_builder(*builder,, sc.allocator);
|
|
|
|
print_to_builder(*builder, "%\n\n", message);
|
|
|
|
location : Source_Range;
|
|
location.begin = token;
|
|
location.begin.index -= location.begin.column;
|
|
location.begin.source -= location.begin.column;
|
|
location.begin.length += location.begin.column;
|
|
location.begin.column = 0;
|
|
|
|
location.main_token = token;
|
|
location.end = token;
|
|
|
|
// advance(state);
|
|
|
|
indent(*builder, 1);
|
|
cyan(*builder);
|
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
|
|
|
indent(*builder, 1);
|
|
print_token_pointer(*builder, token);
|
|
|
|
final_message := builder_to_string(*builder,, context.allocator);
|
|
record_error(state, token, final_message, false);
|
|
}
|
|
|
|
else_if_without_if :: (state : *Parse_State) {
|
|
builder : String_Builder;
|
|
init_string_builder(*builder,, temp);
|
|
|
|
append(*builder, "'else if' without 'if'\n");
|
|
|
|
token := state.previous;
|
|
|
|
location : Source_Range = generate_source_location_from_token(state, token);
|
|
|
|
indent(*builder, 1);
|
|
cyan(*builder);
|
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
|
|
|
indent(*builder, 1);
|
|
print_token_pointer(*builder, token);
|
|
white(*builder);
|
|
|
|
final_message := builder_to_string(*builder);
|
|
record_error(state, token, final_message, false);
|
|
}
|
|
|
|
else_without_if :: (state : *Parse_State) {
|
|
builder : String_Builder;
|
|
init_string_builder(*builder,, temp);
|
|
|
|
append(*builder, "'else' without 'if'\n");
|
|
|
|
token := state.previous;
|
|
|
|
location : Source_Range = generate_source_location_from_token(state, token);
|
|
|
|
indent(*builder, 1);
|
|
cyan(*builder);
|
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
|
|
|
indent(*builder, 1);
|
|
print_token_pointer(*builder, token);
|
|
white(*builder);
|
|
|
|
final_message := builder_to_string(*builder);
|
|
record_error(state, token, final_message, false);
|
|
}
|
|
|
|
unable_to_parse_statement :: (state : *Parse_State, token : Token, message : string = "") {
|
|
builder : String_Builder;
|
|
init_string_builder(*builder,, temp);
|
|
|
|
print_to_builder(*builder, "Unable to parse statement here. %\n", message);
|
|
|
|
location : Source_Range = generate_source_location_from_token(state, token);
|
|
|
|
indent(*builder, 1);
|
|
cyan(*builder);
|
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
|
|
|
|
|
indent(*builder, 1);
|
|
print_token_pointer(*builder, token);
|
|
|
|
final_message := builder_to_string(*builder);
|
|
record_error(state, token, final_message, false);
|
|
}
|
|
|
|
expected_expression :: (state : *Parse_State, token : Token, message : string) {
|
|
builder : String_Builder;
|
|
init_string_builder(*builder,, temp);
|
|
|
|
print_to_builder(*builder, "%\n", message);
|
|
|
|
location : Source_Range = generate_source_location_from_token(state, token);
|
|
|
|
indent(*builder, 1);
|
|
cyan(*builder);
|
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
|
|
|
indent(*builder, 1);
|
|
print_token_pointer(*builder, token);
|
|
|
|
final_message := builder_to_string(*builder);
|
|
record_error(state, token, final_message, false);
|
|
}
|
|
|
|
missing_type_specifier :: (state : *Parse_State, token : Token, message : string) {
|
|
builder : String_Builder;
|
|
init_string_builder(*builder,, temp);
|
|
|
|
print_to_builder(*builder, "%\n", message);
|
|
|
|
location := generate_source_location_from_token(state, token);
|
|
|
|
indent(*builder, 1);
|
|
cyan(*builder);
|
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
|
indent(*builder, 1);
|
|
|
|
loc := location.begin;
|
|
increment := location.begin.length + 2;
|
|
loc.source += increment;
|
|
loc.index += increment;
|
|
loc.column += increment;
|
|
print_token_pointer(*builder, loc);
|
|
|
|
final_message := builder_to_string(*builder);
|
|
record_error(state, token, final_message, false);
|
|
}
|
|
|
|
empty_block :: (state : *Parse_State, token : Token, message : string) {
|
|
builder : String_Builder;
|
|
init_string_builder(*builder,, temp);
|
|
|
|
print_to_builder(*builder, "%\n", message);
|
|
|
|
location := generate_source_location_from_token(state, token);
|
|
|
|
indent(*builder, 1);
|
|
cyan(*builder);
|
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
|
indent(*builder, 1);
|
|
|
|
loc := location.begin;
|
|
// increment := location.begin.length + 2;
|
|
// loc.source += increment;
|
|
// loc.index += increment;
|
|
// loc.column += increment;
|
|
print_token_pointer(*builder, loc);
|
|
|
|
final_message := builder_to_string(*builder);
|
|
record_error(state, token, final_message, false);
|
|
}
|
|
|
|
unable_to_open_file :: (state : *Parse_State, path : string, token : Token) {
|
|
builder : String_Builder;
|
|
init_string_builder(*builder,, temp);
|
|
|
|
print_to_builder(*builder, "Unable to open file '%' for reading\n\n", path);
|
|
|
|
location := generate_source_location_from_token(state, token);
|
|
|
|
indent(*builder, 1);
|
|
cyan(*builder);
|
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
|
indent(*builder, 1);
|
|
|
|
loc := location.begin;
|
|
print_token_pointer(*builder, loc);
|
|
|
|
final_message := builder_to_string(*builder);
|
|
record_error(state, token, final_message, false);
|
|
}
|
|
|
|
entry_point_requires_return_value :: (state : *Parse_State, token : Token) {
|
|
builder : String_Builder;
|
|
init_string_builder(*builder,, temp);
|
|
|
|
print_to_builder(*builder, "Entry point '%' requires return value\n\n", token.ident_value);
|
|
|
|
location := generate_source_location_from_token(state, token);
|
|
indent(*builder, 1);
|
|
cyan(*builder);
|
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
|
indent(*builder, 1);
|
|
|
|
loc := location.begin;
|
|
print_token_pointer(*builder, loc);
|
|
|
|
final_message := builder_to_string(*builder);
|
|
record_error(state, token, final_message, false);
|
|
}
|
|
|
|
error_node :: (parse_state : *Parse_State, message : string) -> *AST_Node {
|
|
node := make_node(parse_state, .Error);
|
|
node.name = copy_string(message);
|
|
|
|
return node;
|
|
}
|
|
|
|
//nb - Advance to the next sync point.
|
|
// A sync point is the next token that ends a statement or starts/ends a block.
|
|
advance_to_sync_point :: (parse_state : *Parse_State) {
|
|
while true {
|
|
if parse_state.current.kind == .TOKEN_SEMICOLON || parse_state.current.kind == .TOKEN_RIGHTBRACE ||
|
|
parse_state.current.kind == .TOKEN_LEFTBRACE || parse_state.current.kind == .TOKEN_EOF {
|
|
break;
|
|
}
|
|
advance(parse_state);
|
|
}
|
|
}
|
|
////////////////////////////
|
|
|
|
|
|
////////////////////////////
|
|
//@nb - Base parsing functions
|
|
|
|
make_node :: (nodes : *[..]AST_Node, kind : AST_Kind) -> *AST_Node {
|
|
node : AST_Node;
|
|
|
|
node.kind = kind;
|
|
array_add(nodes, node);
|
|
|
|
return *(nodes.*[nodes.count - 1]);
|
|
}
|
|
|
|
make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node {
|
|
return make_node(*parse_state.ctx.nodes, kind);
|
|
}
|
|
|
|
make_builtin_token :: (tokens : *[..]Token, kind : Token_Kind, text : string, col : *int, line : *int) -> *Token {
|
|
tok : Token;
|
|
tok.kind = kind;
|
|
|
|
start := 0;
|
|
|
|
tok.column = col.*;
|
|
|
|
for c : text {
|
|
if c == #char "\n" {
|
|
line.* ++ 1;
|
|
col.* = 0;
|
|
} else {
|
|
col.* += 1;
|
|
}
|
|
}
|
|
|
|
tok.index = tokens.count;
|
|
tok.length = text.count;
|
|
tok.builtin = true;
|
|
tok.source = text.data;
|
|
tok.ident_value = text;
|
|
|
|
array_add(tokens, tok);
|
|
|
|
return *(tokens.*)[tokens.count - 1];
|
|
}
|
|
|
|
new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []Arg) -> *AST_Node {
|
|
sc := get_scratch(context.allocator);
|
|
defer scratch_end(sc);
|
|
node := make_node(*ctx.nodes, .Struct);
|
|
|
|
source_location : Source_Range;
|
|
|
|
col := 0;
|
|
line := 0;
|
|
|
|
tok_index := ctx.tokens.count;
|
|
|
|
ident_token := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, name, *col, *line);
|
|
ident_token.ident_value = name;
|
|
source_location.begin = ident_token;
|
|
|
|
make_builtin_token(*ctx.tokens, .TOKEN_DOUBLECOLON, " :: ", *col, *line);
|
|
make_builtin_token(*ctx.tokens, .TOKEN_STRUCT, "struct ", *col, *line);
|
|
make_builtin_token(*ctx.tokens, .TOKEN_LEFTBRACE, "{\n\t", *col, *line);
|
|
line += 1;
|
|
col = 0;
|
|
|
|
field_list := make_node(*ctx.nodes, .FieldList);
|
|
add_child(node, field_list);
|
|
|
|
for member : members {
|
|
field := make_node(*ctx.nodes, .Field);
|
|
field_source_loc : Source_Range;
|
|
|
|
field_ident := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, member.name, *col, *line);
|
|
field_source_loc.begin = field_ident;
|
|
field.token = field_ident;
|
|
field.name = member.name;
|
|
|
|
make_builtin_token(*ctx.tokens, .TOKEN_COLON, ": ", *col, *line);
|
|
make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, member.typename, *col, *line);
|
|
semicolon_tok := make_builtin_token(*ctx.tokens, .TOKEN_SEMICOLON, ";", *col, *line);
|
|
col = 0;
|
|
line += 1;
|
|
|
|
field_source_loc.end = semicolon_tok;
|
|
field.source_location = field_source_loc;
|
|
|
|
add_child(field_list, field);
|
|
}
|
|
|
|
brace_token := make_builtin_token(*ctx.tokens, .TOKEN_RIGHTBRACE, "\n}", *col, *line);
|
|
|
|
source_location.end = brace_token;
|
|
|
|
node.source_location = source_location;
|
|
|
|
return node;
|
|
}
|
|
|
|
new_builtin_function_node :: (ctx : *Compiler_Context, name : string, members : []Arg, return_var : Arg) -> *AST_Node {
|
|
sc := get_scratch(context.allocator);
|
|
defer scratch_end(sc);
|
|
|
|
node := make_node(*ctx.nodes, .Function);
|
|
|
|
source_location : Source_Range;
|
|
|
|
col := 0;
|
|
line := 0;
|
|
|
|
tok_index := ctx.tokens.count;
|
|
|
|
ident_token := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, name, *col, *line);
|
|
source_location.begin = ident_token;
|
|
|
|
make_builtin_token(*ctx.tokens, .TOKEN_DOUBLECOLON, " :: ", *col, *line);
|
|
make_builtin_token(*ctx.tokens, .TOKEN_LEFTPAREN, "(", *col, *line);
|
|
field_list := make_node(*ctx.nodes, .FieldList);
|
|
add_child(node, field_list);
|
|
|
|
for member : members {
|
|
field := make_node(*ctx.nodes, .Field);
|
|
field_source_loc : Source_Range;
|
|
|
|
type_tok := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, member.typename, *col, *line);
|
|
field_source_loc.begin = type_tok;
|
|
field.token = type_tok;
|
|
|
|
if it_index < members.count - 1 {
|
|
make_builtin_token(*ctx.tokens, .TOKEN_COMMA, ", ", *col, *line);
|
|
}
|
|
|
|
field_source_loc.end = type_tok;
|
|
field.source_location = field_source_loc;
|
|
|
|
add_child(field_list, field);
|
|
}
|
|
|
|
make_builtin_token(*ctx.tokens, .TOKEN_RIGHTPAREN, ")", *col, *line);
|
|
semicolon_tok := make_builtin_token(*ctx.tokens, .TOKEN_SEMICOLON, ";", *col, *line);
|
|
|
|
source_location.end = semicolon_tok;
|
|
|
|
node.source_location = source_location;
|
|
|
|
return node;
|
|
}
|
|
|
|
get_field_list :: (struct_or_func : *AST_Node) -> *AST_Node {
|
|
assert(struct_or_func.kind == .Function || struct_or_func.kind == .Struct || struct_or_func.kind == .CBuffer);
|
|
return struct_or_func.children[0];
|
|
}
|
|
|
|
add_child :: (node : *AST_Node, child : *AST_Node) {
|
|
child.parent = node;
|
|
array_add(*node.children, child);
|
|
}
|
|
|
|
Sync_Snapshot :: struct {
|
|
current : *Token;
|
|
previous : *Token;
|
|
current_token_index : int;
|
|
}
|
|
|
|
snapshot_state :: (parse_state : *Parse_State) -> Sync_Snapshot {
|
|
snapshot : Sync_Snapshot;
|
|
snapshot.current = parse_state.current;
|
|
snapshot.previous = parse_state.previous;
|
|
snapshot.current_token_index = parse_state.current_token_index;
|
|
|
|
return snapshot;
|
|
}
|
|
|
|
rewind_to_snapshot :: (parse_state : *Parse_State, snapshot : Sync_Snapshot) {
|
|
parse_state.current = snapshot.current;
|
|
parse_state.previous = snapshot.previous;
|
|
parse_state.current_token_index = snapshot.current_token_index;
|
|
}
|
|
|
|
advance :: (parse_state : *Parse_State) {
|
|
parse_state.previous = parse_state.current;
|
|
|
|
while true {
|
|
if parse_state.current_token_index >= parse_state.ctx.tokens.count {
|
|
break;
|
|
}
|
|
parse_state.current = *parse_state.ctx.tokens[parse_state.current_token_index];
|
|
parse_state.current_token_index += 1;
|
|
if parse_state.current.kind != .TOKEN_ERROR break;
|
|
|
|
err := tprint("unknown token \x1b[1;37m'%'\x1b[0m", parse_state.current.string_value);
|
|
unexpected_token(parse_state, parse_state.current, err);
|
|
return;
|
|
}
|
|
}
|
|
|
|
//nb - Checks if the current token is of a certain kind and advances if it is
|
|
match :: (parse_state : *Parse_State, kind : Token_Kind) -> bool {
|
|
if !check(parse_state, kind) return false;
|
|
advance(parse_state);
|
|
return true;
|
|
}
|
|
|
|
//nb - Checks if the current token is of a certain kind
|
|
check :: (parse_state : *Parse_State, kind : Token_Kind) -> bool {
|
|
return parse_state.current.kind == kind;
|
|
}
|
|
|
|
check_any :: (parse_state : *Parse_State, kinds : ..Token_Kind) -> bool {
|
|
for kind : kinds {
|
|
if check(parse_state, kind) {
|
|
return true;
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
|
|
//nb - Checks if the next token is of a certain kind
|
|
check_next :: (parse_state : *Parse_State, kind : Token_Kind) -> bool {
|
|
return parse_state.ctx.tokens[parse_state.current_token_index].kind == kind;
|
|
}
|
|
|
|
//nb - Consume a token if
|
|
consume :: (parse_state : *Parse_State, kind : Token_Kind, message : string) {
|
|
if parse_state.current.kind == kind {
|
|
advance(parse_state);
|
|
return;
|
|
}
|
|
|
|
token := parse_state.previous;
|
|
advance_to_sync_point(parse_state);
|
|
|
|
unexpected_token(parse_state, token, message);
|
|
|
|
if parse_state.current.kind == .TOKEN_EOF {
|
|
return;
|
|
}
|
|
}
|
|
|
|
////////////////////////////
|
|
//@nb - Expression parsing
|
|
get_rule :: (kind : Token_Kind) -> *Parse_Rule {
|
|
return *parse_rules[kind];
|
|
}
|
|
|
|
precedence :: (parse_state : *Parse_State, precedence : Precedence, message : string = "") -> *AST_Node {
|
|
prev := parse_state.previous;
|
|
advance(parse_state);
|
|
|
|
prefix_rule := get_rule(parse_state.previous.kind).prefix;
|
|
if prefix_rule == null {
|
|
tok_s : string;
|
|
tok_s.data = prev.source;
|
|
tok_s.count = prev.length;
|
|
if message {
|
|
expected_expression(parse_state, prev, tprint("Expected expression after '%'. %", tok_s, message));
|
|
} else {
|
|
expected_expression(parse_state, prev, tprint("Expected expression after '%'.", tok_s));
|
|
}
|
|
|
|
return error_node(parse_state, "Expected expression.");
|
|
}
|
|
|
|
left := prefix_rule(parse_state, null);
|
|
|
|
while precedence <= get_rule(parse_state.current.kind).precedence {
|
|
advance(parse_state);
|
|
if parse_state.current.kind == .TOKEN_EOF {
|
|
tok_s : string;
|
|
tok_s.data = parse_state.previous.source;
|
|
tok_s.count = parse_state.previous.length;
|
|
expected_expression(parse_state, parse_state.current, tprint("Reached end of file. Expected expression after '%'.", tok_s));
|
|
// @Incomplete: Add error node here?
|
|
return null;
|
|
}
|
|
infix_rule := get_rule(parse_state.previous.kind).infix;
|
|
left = infix_rule(parse_state, left);
|
|
}
|
|
|
|
return left;
|
|
}
|
|
|
|
named_variable :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|
if check(parse_state, .TOKEN_LEFTPAREN) {
|
|
return call(parse_state, left);
|
|
}
|
|
|
|
variable := make_node(parse_state, .Variable);
|
|
variable.source_location = generate_source_location_from_token(parse_state, parse_state.previous);
|
|
|
|
variable.name = parse_state.previous.ident_value;
|
|
|
|
return variable;
|
|
}
|
|
|
|
binary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|
op := parse_state.previous.*;
|
|
rule := get_rule(op.kind);
|
|
|
|
source_location := generate_source_location_from_token(parse_state, op);
|
|
// source_location : Source_Range;
|
|
// source_location.begin = left.source_location.begin;
|
|
|
|
binary_expression := make_node(parse_state, .Binary);
|
|
|
|
add_child(binary_expression, left);
|
|
add_child(binary_expression, precedence(parse_state, rule.precedence + 1));
|
|
|
|
if op.kind == {
|
|
case .TOKEN_PLUS; #through;
|
|
case .TOKEN_PLUSEQUALS; #through;
|
|
case .TOKEN_MINUSEQUALS; #through;
|
|
case .TOKEN_TIMESEQUALS; #through;
|
|
case .TOKEN_DIVEQUALS; #through;
|
|
case .TOKEN_MINUS; #through;
|
|
case .TOKEN_STAR; #through;
|
|
case .TOKEN_SLASH; #through;
|
|
case .TOKEN_ISEQUAL; #through;
|
|
case .TOKEN_ASSIGN; #through;
|
|
case .TOKEN_ISNOTEQUAL; #through;
|
|
case .TOKEN_LOGICALOR; #through;
|
|
case .TOKEN_LOGICALAND; #through;
|
|
case .TOKEN_LESS; #through;
|
|
case .TOKEN_LESSEQUALS; #through;
|
|
case .TOKEN_GREATER; #through;
|
|
case .TOKEN_GREATEREQUALS;
|
|
{
|
|
binary_expression.token = op;
|
|
}
|
|
}
|
|
|
|
// source_location.end = parse_state.previous;
|
|
binary_expression.source_location = source_location;
|
|
|
|
return binary_expression;
|
|
}
|
|
|
|
array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|
identifier := parse_state.ctx.tokens[parse_state.current_token_index - 3];
|
|
left_bracket := parse_state.ctx.tokens[parse_state.current_token_index - 2];
|
|
|
|
array_access := make_node(parse_state, .Binary);
|
|
array_access.token = left_bracket;
|
|
array_index := expression(parse_state);
|
|
add_child(array_access, left);
|
|
add_child(array_access, array_index);
|
|
|
|
consume(parse_state, .TOKEN_RIGHTBRACKET, "Expected ']' after array index.");
|
|
|
|
source_location : Source_Range;
|
|
source_location.begin = left.source_location.begin;
|
|
|
|
if check(parse_state, .TOKEN_ASSIGN) {
|
|
advance(parse_state);
|
|
|
|
node := make_node(parse_state, .Binary);
|
|
node.token = parse_state.previous;
|
|
add_child(node, left);
|
|
add_child(node, expression(parse_state));
|
|
return node;
|
|
}
|
|
|
|
source_location.end = parse_state.previous;
|
|
array_access.source_location = source_location;
|
|
return array_access;
|
|
}
|
|
|
|
unary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|
op := parse_state.previous.*;
|
|
rule := get_rule(op.kind);
|
|
|
|
unary_expression := make_node(parse_state, .Unary);
|
|
|
|
add_child(unary_expression, precedence(parse_state, rule.precedence + 1));
|
|
|
|
if op.kind == {
|
|
case .TOKEN_MINUS; {
|
|
unary_expression.token = op;
|
|
}
|
|
case .TOKEN_LEFTBRACKET; {
|
|
unary_expression.token = op;
|
|
consume(parse_state, .TOKEN_RIGHTBRACKET, "Expect ']' after array access.");
|
|
}
|
|
}
|
|
|
|
return unary_expression;
|
|
}
|
|
|
|
grouping :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|
grouping := expression(parse_state);
|
|
consume(parse_state, .TOKEN_RIGHTPAREN, "Expect ')' after group expression.");
|
|
return grouping;
|
|
}
|
|
|
|
directive :: (state : *Parse_State) -> *AST_Node {
|
|
if state.current.ident_value == "foreign" {
|
|
advance(state);
|
|
identifier_token := state.current;
|
|
advance(state);
|
|
consume(state, .TOKEN_DOUBLECOLON, "Expect '::' after function name.");
|
|
func := function_declaration(state, identifier_token, .None, false, false);
|
|
func.foreign_declaration = true;
|
|
return func;
|
|
} else if state.current.ident_value == "if" {
|
|
if_directive := make_node(state, .If_Directive);
|
|
|
|
source_location : Source_Range;
|
|
if state.previous {
|
|
source_location.begin = state.previous;
|
|
} else {
|
|
source_location.begin = state.current;
|
|
}
|
|
|
|
advance(state);
|
|
|
|
cond := expression(state);
|
|
add_child(if_directive, cond);
|
|
|
|
source_location.end = state.previous;
|
|
advance_to_sync_point(state);
|
|
|
|
if_body := block(state);
|
|
add_child(if_directive, if_body);
|
|
|
|
if match(state, .TOKEN_ELSE) {
|
|
else_node := else_statement(state);
|
|
add_child(if_directive, else_node);
|
|
}
|
|
|
|
if_directive.source_location = source_location;
|
|
|
|
return if_directive;
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
call :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|
call := make_node(parse_state, .Call);
|
|
source_location := generate_source_location_from_token(parse_state, parse_state.previous);
|
|
// source_location : Source_Range;
|
|
// source_location.begin = parse_state.previous;
|
|
// source_location.main_token = parse_state.previous;
|
|
|
|
prev := parse_state.previous;
|
|
call.name = prev.ident_value;
|
|
advance(parse_state);
|
|
arg_list := argument_list(parse_state);
|
|
if arg_list {
|
|
add_child(call, arg_list);
|
|
}
|
|
|
|
snapshot := snapshot_state(parse_state);
|
|
|
|
advance_to_sync_point(parse_state);
|
|
source_location.end = parse_state.current;
|
|
|
|
rewind_to_snapshot(parse_state, snapshot);
|
|
call.source_location = source_location;
|
|
|
|
return call;
|
|
}
|
|
|
|
dot :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|
consume(parse_state, .TOKEN_IDENTIFIER, "Expect property name after '.'.");
|
|
identifier := parse_state.previous;
|
|
|
|
source_location : Source_Range;
|
|
source_location.begin = left.source_location.begin;
|
|
source_location.main_token = identifier;
|
|
|
|
access := make_node(parse_state, .Access);
|
|
|
|
variable := make_node(parse_state, .Variable);
|
|
variable.name = identifier.ident_value;
|
|
|
|
add_child(access, left);
|
|
add_child(access, variable);
|
|
|
|
if check_any(parse_state, .TOKEN_ASSIGN, .TOKEN_MINUSEQUALS, .TOKEN_PLUSEQUALS, .TOKEN_DIVEQUALS, .TOKEN_MODEQUALS, .TOKEN_TIMESEQUALS) {
|
|
advance(parse_state);
|
|
access.source_location = generate_source_location_from_token(parse_state, identifier);
|
|
|
|
node := make_node(parse_state, .Binary);
|
|
node.token = parse_state.previous;
|
|
add_child(node, access);
|
|
add_child(node, expression(parse_state));
|
|
return node;
|
|
}
|
|
|
|
source_location.end = parse_state.current;
|
|
access.source_location = source_location;
|
|
return access;
|
|
}
|
|
|
|
integer :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|
value := parse_state.previous.integer_value;
|
|
node := make_node(parse_state, .Integer);
|
|
node.source_location.begin = parse_state.previous;
|
|
node.source_location.end = parse_state.previous;
|
|
node.source_location.main_token = parse_state.previous;
|
|
node.integer_value = value;
|
|
return node;
|
|
}
|
|
|
|
floating :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|
value := parse_state.previous.float_value;
|
|
node := make_node(parse_state, .Float);
|
|
node.source_location.begin = parse_state.previous;
|
|
node.source_location.end = parse_state.previous;
|
|
node.source_location.main_token = parse_state.previous;
|
|
node.float_value = value;
|
|
return node;
|
|
}
|
|
|
|
expression :: (parse_state : *Parse_State, message : string = "") -> *AST_Node {
|
|
expression := precedence(parse_state, .PREC_ASSIGNMENT, message);
|
|
return expression;
|
|
}
|
|
|
|
////////////////////////////
|
|
//@nb - Statement parsing functions
|
|
|
|
field_assignment :: (parse_state : *Parse_State, identifier_token : *Token) -> *AST_Node {
|
|
node : *AST_Node = make_node(parse_state, .Field);
|
|
|
|
identifier := identifier_token.*;
|
|
source_location : Source_Range;
|
|
source_location.begin = identifier;
|
|
|
|
source_location.main_token = identifier;
|
|
node.name = identifier.string_value;
|
|
|
|
add_child(node, expression(parse_state));
|
|
|
|
source_location.end = parse_state.previous;
|
|
node.source_location = source_location;
|
|
|
|
return node;
|
|
}
|
|
|
|
//nb - Non-const field declarations
|
|
field_declaration :: (parse_state : *Parse_State, identifier_token : *Token) -> *AST_Node {
|
|
node : *AST_Node = make_node(parse_state, .Field);
|
|
|
|
identifier := identifier_token.*;
|
|
source_location : Source_Range;
|
|
source_location.begin = identifier;
|
|
|
|
source_location.main_token = identifier;
|
|
node.name = identifier.string_value;
|
|
|
|
consume(parse_state, .TOKEN_COLON, "Expected ':' after field name for declarations.");
|
|
|
|
if check(parse_state, .TOKEN_IDENTIFIER) {
|
|
type_identifier := parse_state.current;
|
|
node.token = type_identifier;
|
|
advance(parse_state);
|
|
} else if check(parse_state, .TOKEN_LEFTBRACKET) {
|
|
advance(parse_state);
|
|
array_size_expression := expression(parse_state);
|
|
add_child(node, array_size_expression);
|
|
consume(parse_state, .TOKEN_RIGHTBRACKET, "Expected closing ']' in array declaration.");
|
|
consume(parse_state, .TOKEN_DOT, "Expected '.' before array type.");
|
|
|
|
type_identifier := parse_state.current;
|
|
node.token = type_identifier;
|
|
advance(parse_state);
|
|
node.array_field = true;
|
|
} else {
|
|
if !check(parse_state, .TOKEN_ASSIGN) {
|
|
internal_error_message(*parse_state.ctx.messages, "Unimplemented error message.", parse_state.ctx.file.path);
|
|
return node;
|
|
}
|
|
// missing_type_specifier(parse_state, identifier_token, "Expected type specifier after field name.");
|
|
|
|
}
|
|
|
|
if check(parse_state, .TOKEN_AT) {
|
|
while check(parse_state, .TOKEN_AT) {
|
|
advance(parse_state);
|
|
// @Incomplete(niels): this is a mapping
|
|
if check(parse_state, .TOKEN_IDENTIFIER) {
|
|
array_add(*node.hint_tokens, parse_state.current);
|
|
advance(parse_state);
|
|
} else if check(parse_state, .TOKEN_HINT) {
|
|
array_add(*node.hint_tokens, parse_state.current);
|
|
advance(parse_state);
|
|
} else if check(parse_state, .TOKEN_OPTIONAL) {
|
|
array_add(*node.hint_tokens, parse_state.current);
|
|
advance(parse_state);
|
|
}
|
|
}
|
|
} else if match(parse_state, .TOKEN_ASSIGN) {
|
|
add_child(node, expression(parse_state));
|
|
}
|
|
|
|
source_location.end = parse_state.previous;
|
|
node.source_location = source_location;
|
|
|
|
return node;
|
|
}
|
|
|
|
argument_list :: (parse_state : *Parse_State) -> *AST_Node {
|
|
node : *AST_Node;
|
|
|
|
source_location : Source_Range;
|
|
source_location.begin = parse_state.previous;
|
|
source_location.begin.index -= source_location.begin.column;
|
|
source_location.begin.source -= source_location.begin.column;
|
|
source_location.begin.length += source_location.begin.column;
|
|
source_location.begin.column = 0;
|
|
|
|
source_location.main_token = parse_state.current;
|
|
|
|
error_before := parse_state.ctx.had_error;
|
|
parse_state.ctx.had_error = false;
|
|
|
|
while !check(parse_state, .TOKEN_RIGHTPAREN) {
|
|
arg := expression(parse_state);
|
|
if !node {
|
|
node = make_node(parse_state, .ArgList);
|
|
}
|
|
|
|
add_child(node, arg);
|
|
|
|
if check(parse_state, .TOKEN_RIGHTPAREN) break;
|
|
consume(parse_state, .TOKEN_COMMA, "Expect ',' after function argument.");
|
|
|
|
if parse_state.ctx.had_error {
|
|
break;
|
|
}
|
|
}
|
|
|
|
parse_state.ctx.had_error = error_before || parse_state.ctx.had_error;
|
|
|
|
consume(parse_state, .TOKEN_RIGHTPAREN, "Expect ')' after function call.");
|
|
|
|
if node {
|
|
snapshot := snapshot_state(parse_state);
|
|
|
|
advance_to_sync_point(parse_state);
|
|
source_location.end = parse_state.current;
|
|
|
|
rewind_to_snapshot(parse_state, snapshot);
|
|
node.source_location = source_location;
|
|
}
|
|
|
|
return node;
|
|
}
|
|
|
|
expression_statement :: (parse_state : *Parse_State) -> *AST_Node {
|
|
node := make_node(parse_state, .Expression_Statement);
|
|
|
|
source_location : Source_Range;
|
|
source_location.begin = parse_state.current;
|
|
|
|
expr := expression(parse_state);
|
|
add_child(node, expr);
|
|
consume(parse_state, .TOKEN_SEMICOLON, "Expect ';' after expression.");
|
|
while parse_state.current.kind == .TOKEN_SEMICOLON {
|
|
advance(parse_state);
|
|
}
|
|
source_location.end = parse_state.previous;
|
|
|
|
node.source_location = source_location;
|
|
return node;
|
|
}
|
|
|
|
statement :: (parse_state : *Parse_State) -> *AST_Node {
|
|
if match(parse_state, .TOKEN_RETURN) {
|
|
node := make_node(parse_state, .Return);
|
|
|
|
source_location : Source_Range;
|
|
source_location.begin = parse_state.previous;
|
|
|
|
return_expression := expression(parse_state);
|
|
consume(parse_state, .TOKEN_SEMICOLON, "Expect ';' after return statement.");
|
|
while parse_state.current.kind == .TOKEN_SEMICOLON {
|
|
advance(parse_state);
|
|
}
|
|
|
|
if return_expression {
|
|
add_child(node, return_expression);
|
|
}
|
|
source_location.end = parse_state.previous;
|
|
node.source_location = source_location;
|
|
return node;
|
|
} else if match(parse_state, .TOKEN_IF) {
|
|
node := make_node(parse_state, .If);
|
|
|
|
source_location : Source_Range;
|
|
source_location.begin = parse_state.previous;
|
|
|
|
if_cond := expression(parse_state, "Expected if condition.");
|
|
add_child(node, if_cond);
|
|
source_location.end = parse_state.previous;
|
|
advance_to_sync_point(parse_state);
|
|
|
|
if_body := block(parse_state);
|
|
add_child(node, if_body);
|
|
|
|
if match(parse_state, .TOKEN_ELSE) {
|
|
else_node := else_statement(parse_state);
|
|
add_child(node, else_node);
|
|
}
|
|
|
|
node.source_location = source_location;
|
|
|
|
return node;
|
|
} else if match(parse_state, .TOKEN_ELSE) {
|
|
if check(parse_state, .TOKEN_IF) {
|
|
else_if_without_if(parse_state);
|
|
advance_to_sync_point(parse_state);
|
|
if check(parse_state, .TOKEN_LEFTBRACE) {
|
|
return block(parse_state);
|
|
}
|
|
return error_node(parse_state, "'else if' without 'if'.");
|
|
} else {
|
|
else_without_if(parse_state);
|
|
advance_to_sync_point(parse_state);
|
|
if check(parse_state, .TOKEN_LEFTBRACE) {
|
|
return block(parse_state);
|
|
}
|
|
|
|
return error_node(parse_state, "'else' without 'if'.");
|
|
}
|
|
} else if match(parse_state, .TOKEN_FOR) {
|
|
if check(parse_state, .TOKEN_IDENTIFIER) {
|
|
node := make_node(parse_state, .For);
|
|
|
|
source_location : Source_Range;
|
|
source_location.begin = parse_state.previous;
|
|
|
|
loop_iterator := parse_state.current;
|
|
node.token = loop_iterator;
|
|
advance(parse_state);
|
|
consume(parse_state, .TOKEN_COLON, "Expect ':' after for loop iterator.");
|
|
|
|
snap := snapshot_state(parse_state);
|
|
|
|
begin_iter := expression(parse_state, "Expected beginning of iterator.");
|
|
if begin_iter.kind == .Error {
|
|
unable_to_parse_statement(parse_state, source_location.begin);
|
|
rewind_to_snapshot(parse_state, snap);
|
|
if parse_state.current.kind == .TOKEN_LEFTBRACE {
|
|
block(parse_state);
|
|
}
|
|
return error_node(parse_state, "'for' without well-formed iterator expression.");
|
|
}
|
|
add_child(node, begin_iter);
|
|
|
|
consume(parse_state, .TOKEN_DOTDOT, "Expect '..' after for loop iter left hand side.");
|
|
|
|
snap = snapshot_state(parse_state);
|
|
end_iter := expression(parse_state, "Expected end of iterator.");
|
|
if end_iter.kind == .Error {
|
|
unable_to_parse_statement(parse_state, source_location.begin);
|
|
rewind_to_snapshot(parse_state, snap);
|
|
|
|
if parse_state.current.kind == .TOKEN_LEFTBRACE {
|
|
|
|
block(parse_state);
|
|
}
|
|
return error_node(parse_state, "'for' without well-formed iterator expression.");
|
|
}
|
|
add_child(node, end_iter);
|
|
|
|
if check(parse_state, .TOKEN_LEFTBRACE) {
|
|
for_body := block(parse_state);
|
|
add_child(node, for_body);
|
|
} else {
|
|
unable_to_parse_statement(parse_state, source_location.begin, "'for' currently expects a brace-enclosed block as a body.");
|
|
return error_node(parse_state, "'for' currently expects a brace-enclosed block as a body.");
|
|
}
|
|
|
|
node.source_location = source_location;
|
|
|
|
return node;
|
|
}
|
|
} else {
|
|
return expression_statement(parse_state);
|
|
}
|
|
|
|
return error_node(parse_state, "Couldn't parse statement.");
|
|
}
|
|
|
|
else_statement :: (parse_state : *Parse_State) -> *AST_Node {
|
|
if check(parse_state, .TOKEN_IF) {
|
|
return statement(parse_state);
|
|
} else if check(parse_state, .TOKEN_DIRECTIVE) && parse_state.current.ident_value == "if" {
|
|
return directive(parse_state);
|
|
}
|
|
return block(parse_state);
|
|
}
|
|
|
|
block :: (parse_state : *Parse_State) -> *AST_Node {
|
|
node : *AST_Node = make_node(parse_state, .Block);
|
|
array_reserve(*node.children, 32);
|
|
|
|
source_location : Source_Range;
|
|
|
|
consume(parse_state, .TOKEN_LEFTBRACE, "Expect '{' at start of block.");
|
|
source_location.begin = parse_state.previous;
|
|
|
|
while !check(parse_state, .TOKEN_RIGHTBRACE) && !check(parse_state, .TOKEN_EOF) {
|
|
decl := declaration(parse_state);
|
|
if decl {
|
|
add_child(node, decl);
|
|
}
|
|
}
|
|
consume(parse_state, .TOKEN_RIGHTBRACE, "Expect '}' after block.");
|
|
source_location.end = parse_state.previous;
|
|
node.source_location = source_location;
|
|
|
|
return node;
|
|
}
|
|
|
|
field_list :: (parse_state : *Parse_State, separator : Separator_Type, require_field_names := true) -> *AST_Node {
|
|
node : *AST_Node = make_node(parse_state, .FieldList);
|
|
array_reserve(*node.children, 16);
|
|
source_location : Source_Range;
|
|
source_location.begin = parse_state.previous;
|
|
source_location.main_token = parse_state.current;
|
|
|
|
while check(parse_state, .TOKEN_IDENTIFIER) {
|
|
field : *AST_Node;
|
|
identifier := parse_state.current;
|
|
advance(parse_state);
|
|
if require_field_names || check(parse_state, .TOKEN_COLON) {
|
|
field = field_declaration(parse_state, identifier);
|
|
} else {
|
|
field = make_node(parse_state, .Unnamed_Field);
|
|
|
|
source_location : Source_Range;
|
|
source_location.begin = identifier;
|
|
|
|
source_location.main_token = identifier;
|
|
field.name = identifier.ident_value;
|
|
field.token = identifier;
|
|
}
|
|
add_child(node, field);
|
|
|
|
|
|
if check(parse_state, .TOKEN_RIGHTPAREN) {
|
|
source_location.end = parse_state.current;
|
|
node.source_location = source_location;
|
|
return node;
|
|
}
|
|
|
|
if separator == {
|
|
case .Comma; {
|
|
consume(parse_state, .TOKEN_COMMA, "Expect ',' after field declaration.");
|
|
}
|
|
case .Semicolon; {
|
|
consume(parse_state, .TOKEN_SEMICOLON, "Expect ';' after field declaration.");
|
|
}
|
|
}
|
|
}
|
|
|
|
return node;
|
|
}
|
|
|
|
function_declaration :: (parse_state : *Parse_State, identifier_token : *Token, entry_point_kind : Entry_Point_Type, expect_body : bool = true, require_field_names : bool = true) -> *AST_Node {
|
|
node : *AST_Node;
|
|
source_location : Source_Range;
|
|
|
|
source_location = generate_source_location_from_token(parse_state, identifier_token);
|
|
|
|
function_name_token := identifier_token;
|
|
|
|
consume(parse_state, .TOKEN_LEFTPAREN, "Expect argument list after '::' in function declaration.");
|
|
|
|
function_args := field_list(parse_state, .Comma, require_field_names);
|
|
|
|
consume(parse_state, .TOKEN_RIGHTPAREN, "Expect right ')' after function argument list.");
|
|
return_type_token : Token;
|
|
hint_token : Token;
|
|
|
|
if match(parse_state, .TOKEN_ARROW) {
|
|
return_type_token = parse_state.current;
|
|
advance(parse_state);
|
|
if check(parse_state, .TOKEN_AT) {
|
|
advance(parse_state);
|
|
hint_token = parse_state.current;
|
|
advance(parse_state);
|
|
}
|
|
}
|
|
|
|
node = make_node(parse_state, .Function);
|
|
add_child(node, function_args);
|
|
name := function_name_token.ident_value;
|
|
|
|
if entry_point_kind == {
|
|
case .Vertex; {
|
|
node.vertex_entry_point = true;
|
|
name = sprint("vs_%", function_name_token.ident_value);
|
|
|
|
// if return_type_token.kind == .TOKEN_INVALID {
|
|
// entry_point_requires_return_value(parse_state, function_name_token);
|
|
// advance_to_sync_point(parse_state);
|
|
// return error_node(parse_state, "");
|
|
// }
|
|
}
|
|
case .Pixel; {
|
|
node.pixel_entry_point = true;
|
|
name = sprint("ps_%", function_name_token.ident_value);
|
|
|
|
// if return_type_token.kind == .TOKEN_INVALID {
|
|
// entry_point_requires_return_value(parse_state, function_name_token);
|
|
// advance_to_sync_point(parse_state);
|
|
// return error_node(parse_state, "");
|
|
// }
|
|
}
|
|
}
|
|
|
|
node.name = name;
|
|
node.token = return_type_token;
|
|
array_add(*node.hint_tokens, hint_token);
|
|
|
|
if expect_body {
|
|
function_body := block(parse_state);
|
|
if function_body.children.count > 0 {
|
|
add_child(node, function_body);
|
|
}
|
|
} else {
|
|
consume(parse_state, .TOKEN_SEMICOLON, "Expect ';' after a function with no body.");
|
|
}
|
|
|
|
node.source_location = source_location;
|
|
|
|
return node;
|
|
}
|
|
|
|
buffer :: (state : *Parse_State, identifier_token : *Token = null) -> *AST_Node {
|
|
node : *AST_Node = make_node(state, .Buffer);
|
|
source_location : Source_Range;
|
|
source_location.begin = state.current;
|
|
|
|
if check(state, .TOKEN_AT) {
|
|
while check(state, .TOKEN_AT) {
|
|
advance(state);
|
|
// @Incomplete(niels): this is a mapping
|
|
if check(state, .TOKEN_IDENTIFIER) {
|
|
array_add(*node.hint_tokens, state.current);
|
|
advance(state);
|
|
}
|
|
}
|
|
}
|
|
|
|
consume(state, .TOKEN_LEFTBRACE, "Expect '{' after 'buffer' keyword");
|
|
buffer := field_list(state, .Semicolon);
|
|
node.array_field = true;
|
|
|
|
if identifier_token {
|
|
node.name = identifier_token.ident_value;
|
|
}
|
|
add_child(node, buffer);
|
|
|
|
consume(state, .TOKEN_RIGHTBRACE, "Expect '}' after 'buffer' block");
|
|
source_location.end = state.previous;
|
|
node.source_location = source_location;
|
|
|
|
return node;
|
|
}
|
|
|
|
constant_buffer :: (parse_state : *Parse_State, identifier_token : *Token = null) -> *AST_Node {
|
|
node : *AST_Node = make_node(parse_state, .CBuffer);
|
|
source_location : Source_Range;
|
|
source_location.begin = parse_state.current;
|
|
|
|
if check(parse_state, .TOKEN_AT) {
|
|
while check(parse_state, .TOKEN_AT) {
|
|
advance(parse_state);
|
|
// @Incomplete(niels): this is a mapping
|
|
if check(parse_state, .TOKEN_IDENTIFIER) {
|
|
array_add(*node.hint_tokens, parse_state.current);
|
|
advance(parse_state);
|
|
}
|
|
}
|
|
}
|
|
|
|
consume(parse_state, .TOKEN_LEFTBRACE, "Expect '{' after 'constant_buffer' keyword");
|
|
buffer := field_list(parse_state, .Semicolon);
|
|
|
|
if identifier_token {
|
|
node.name = identifier_token.ident_value;
|
|
}
|
|
add_child(node, buffer);
|
|
|
|
consume(parse_state, .TOKEN_RIGHTBRACE, "Expect '}' after 'constant_buffer' block");
|
|
source_location.end = parse_state.previous;
|
|
node.source_location = source_location;
|
|
|
|
return node;
|
|
}
|
|
|
|
struct_declaration :: (parse_state : *Parse_State, identifier_token : *Token) -> *AST_Node {
|
|
source_location := generate_source_location_from_token(parse_state, identifier_token);
|
|
|
|
consume(parse_state, .TOKEN_LEFTBRACE, "Expect '{' before struct declaration.");
|
|
|
|
fields := field_list(parse_state, .Semicolon);
|
|
|
|
node := make_node(parse_state, .Struct);
|
|
node.name = identifier_token.ident_value;
|
|
add_child(node, fields);
|
|
|
|
consume(parse_state, .TOKEN_RIGHTBRACE, "Expect '}' after struct declaration.");
|
|
source_location.end = parse_state.previous;
|
|
node.source_location = source_location;
|
|
|
|
return node;
|
|
}
|
|
|
|
access :: (parse_state : *Parse_State, identifier_token : *Token) -> *AST_Node {
|
|
err_node := error_node(parse_state, tprint("Accessors not yet implemented. Token: %.", identifier_token.ident_value));
|
|
advance(parse_state);
|
|
return err_node;
|
|
}
|
|
|
|
const_declaration :: (parse_state : *Parse_State, identifier_token : *Token) -> *AST_Node {
|
|
if match(parse_state, .TOKEN_STRUCT) {
|
|
return struct_declaration(parse_state, identifier_token);
|
|
} else if check(parse_state, .TOKEN_LEFTPAREN) {
|
|
return function_declaration(parse_state, identifier_token, .None);
|
|
} else if match(parse_state, .TOKEN_CONSTANT_BUFFER) {
|
|
return constant_buffer(parse_state, identifier_token);
|
|
} else if match(parse_state, .TOKEN_BUFFER) {
|
|
return buffer(parse_state, identifier_token);
|
|
}
|
|
return error_node(parse_state, tprint("Couldn't parse constant declaration at token %\n", parse_state.current.*));
|
|
}
|
|
|
|
declaration :: (parse_state : *Parse_State) -> *AST_Node {
|
|
skip_statement := false;
|
|
decl_node : *AST_Node;
|
|
if match(parse_state, .TOKEN_VERTEX) {
|
|
vertex_token := parse_state.previous;
|
|
identifier := parse_state.current;
|
|
|
|
advance(parse_state);
|
|
consume(parse_state, .TOKEN_DOUBLECOLON, "Expect '::' after vertex entry point declaration.");
|
|
|
|
decl_node = function_declaration(parse_state, identifier, .Vertex);
|
|
} else if match(parse_state, .TOKEN_PIXEL) {
|
|
pixel_token := parse_state.previous;
|
|
identifier := parse_state.current;
|
|
|
|
advance(parse_state);
|
|
consume(parse_state, .TOKEN_DOUBLECOLON, "Expect '::' after pixel entry point declaration.");
|
|
|
|
decl_node = function_declaration(parse_state, identifier, .Pixel);
|
|
} else if check(parse_state, .TOKEN_LEFTPAREN) {
|
|
decl_node = call(parse_state, null);
|
|
} else if check(parse_state, .TOKEN_DIRECTIVE) {
|
|
decl_node = directive(parse_state);
|
|
skip_statement = true;
|
|
} else if check(parse_state, .TOKEN_IDENTIFIER) {
|
|
identifier := parse_state.current;
|
|
|
|
if check_next(parse_state, .TOKEN_DOUBLECOLON) {
|
|
advance(parse_state);
|
|
advance(parse_state);
|
|
decl_node = const_declaration(parse_state, identifier);
|
|
} else if check_next(parse_state, .TOKEN_LEFTPAREN) {
|
|
decl_node = statement(parse_state);
|
|
} else if check_next(parse_state, .TOKEN_COLON) {
|
|
advance(parse_state);
|
|
decl_node = field_declaration(parse_state, identifier);
|
|
consume(parse_state, .TOKEN_SEMICOLON, "Expect ';' after a field declaration.");
|
|
} else if check_next(parse_state, .TOKEN_ASSIGN) {
|
|
decl_node = expression_statement(parse_state);
|
|
}
|
|
} else if check(parse_state, .TOKEN_OUT) || check(parse_state, .TOKEN_IN) {
|
|
error := error_node(parse_state, tprint("Expected a declaration or function call. '%' not allowed as a declaration name.", parse_state.current.kind));
|
|
advance(parse_state);
|
|
decl_node = error;
|
|
}
|
|
|
|
if !decl_node && !skip_statement {
|
|
decl_node = statement(parse_state);
|
|
}
|
|
|
|
while parse_state.current.kind == .TOKEN_SEMICOLON {
|
|
advance(parse_state);
|
|
}
|
|
|
|
return decl_node;
|
|
}
|
|
|
|
parse :: (ctx : *Compiler_Context, allocator := temp) {
|
|
if ctx.had_error {
|
|
return;
|
|
}
|
|
|
|
new_context := context;
|
|
new_context.allocator = allocator;
|
|
push_context new_context {
|
|
init_context_allocators();
|
|
defer clear_context_allocators();
|
|
|
|
parse_state : Parse_State;
|
|
array_reserve(*ctx.nodes, 4096);
|
|
parse_state.current_token_index = 0;
|
|
parse_state.ctx = ctx;
|
|
|
|
advance(*parse_state);
|
|
|
|
if !match(*parse_state, .TOKEN_EOF) {
|
|
parse_state.ctx.root = make_node(*parse_state, .Program);
|
|
array_reserve(*parse_state.ctx.root.children, 1024);
|
|
program := parse_state.ctx.root;
|
|
|
|
while !check(*parse_state, .TOKEN_EOF) {
|
|
decl := declaration(*parse_state);
|
|
if decl {
|
|
add_child(program, decl);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
#load "ast.jai";
|