Broke builtins.
This commit is contained in:
87
Lexing.jai
87
Lexing.jai
@@ -5,17 +5,11 @@ Lexer :: struct {
|
||||
current_line : int;
|
||||
current_column : int;
|
||||
|
||||
result : Lexing_Result;
|
||||
result : *Compile_Result;
|
||||
|
||||
path : string;
|
||||
}
|
||||
|
||||
Lexing_Result :: struct {
|
||||
tokens : [..]Token;
|
||||
had_error : bool;
|
||||
messages : [..]Compiler_Message;
|
||||
}
|
||||
|
||||
Token_Kind :: enum {
|
||||
TOKEN_FLOATLITERAL;
|
||||
TOKEN_INTLITERAL;
|
||||
@@ -139,6 +133,8 @@ Token :: struct {
|
||||
index : int;
|
||||
|
||||
error : string;
|
||||
|
||||
builtin : bool; // @Incomplete: This is kind of a bad idea, but let's just do it for now...
|
||||
}
|
||||
|
||||
Source_Range :: struct {
|
||||
@@ -272,6 +268,26 @@ error_token :: (lexer : *Lexer, message : string) -> *Token {
|
||||
return token;
|
||||
}
|
||||
|
||||
// unable_to_open_file :: (state : *Parse_State, path : string, token : Token) {
|
||||
// builder : String_Builder;
|
||||
// init_string_builder(*builder,, temp);
|
||||
|
||||
// print_to_builder(*builder, "Unable to open file '%' for reading\n\n", path);
|
||||
|
||||
// location := generate_source_location_from_token(state, token);
|
||||
|
||||
// indent(*builder, 1);
|
||||
// cyan(*builder);
|
||||
// print_to_builder(*builder, "%\n", print_from_source_location(location));
|
||||
// indent(*builder, 1);
|
||||
|
||||
// loc := location.begin;
|
||||
// print_token_pointer(*builder, loc);
|
||||
|
||||
// final_message := builder_to_string(*builder);
|
||||
// record_error(state, token, final_message, false);
|
||||
// }
|
||||
|
||||
record_error :: (lexer : *Lexer, message : string) {
|
||||
error : Compiler_Message;
|
||||
error.message_kind = .Error;
|
||||
@@ -346,14 +362,41 @@ new_token :: (lexer : *Lexer, kind : Token_Kind) -> *Token {
|
||||
|
||||
make_directive :: (lexer : *Lexer) -> *Token {
|
||||
lexer.start += 1;
|
||||
return make_identifier(lexer, .TOKEN_DIRECTIVE);
|
||||
ident := make_identifier(lexer, .TOKEN_DIRECTIVE);
|
||||
if ident.ident_value == "load" {
|
||||
path_tok := scan_next_token(lexer);
|
||||
path := path_tok.string_value;
|
||||
result : Compile_Result;
|
||||
result.allocator = lexer.result.allocator;
|
||||
result.environment = lexer.result.environment;
|
||||
|
||||
result.file = make_file(*result, path);
|
||||
|
||||
if result.file.source.count == 0 {
|
||||
// unable_to_open_file(lexer, path, path_tok);
|
||||
record_error(lexer, tprint("Unable to open file '%' for reading\n", path));
|
||||
return error_token(lexer, tprint("Unable to open file '%' for reading\n", path));
|
||||
}
|
||||
|
||||
lex(*result);
|
||||
|
||||
result.tokens.count -= 1; // @Note: remote TOKEN_EOF
|
||||
lexer.result.tokens.count -= 2;
|
||||
array_resize(*lexer.result.tokens, lexer.result.tokens.count + result.tokens.count);
|
||||
|
||||
for tok : result.tokens {
|
||||
lexer.result.tokens[it_index] = tok;
|
||||
}
|
||||
return scan_next_token(lexer);;
|
||||
}
|
||||
return ident;
|
||||
}
|
||||
|
||||
make_string :: (lexer : *Lexer) -> *Token {
|
||||
token : *Token = new_token(lexer, .TOKEN_STRING);
|
||||
|
||||
name : string = .{ count = token.length,
|
||||
data = *lexer.input.data[lexer.start] };
|
||||
name : string = .{ count = token.length - 2,
|
||||
data = *lexer.input.data[lexer.start + 1] };
|
||||
token.string_value = name;
|
||||
|
||||
return token;
|
||||
@@ -432,13 +475,13 @@ scan_next_token :: (lexer : *Lexer) -> *Token {
|
||||
if c == {
|
||||
case #char "\""; {
|
||||
c = advance(lexer);
|
||||
lexer.start = lexer.cursor;
|
||||
// lexer.start = lexer.cursor;
|
||||
while c != #char "\"" {
|
||||
c = advance(lexer);
|
||||
}
|
||||
lexer.cursor -= 1;
|
||||
// lexer.cursor -= 1;
|
||||
tok := make_string(lexer);
|
||||
advance(lexer);
|
||||
// advance(lexer);
|
||||
return tok;
|
||||
}
|
||||
case #char "+"; {
|
||||
@@ -528,28 +571,14 @@ lex :: (result : *Compile_Result) {
|
||||
}
|
||||
|
||||
lexer : Lexer;
|
||||
lexer.result = result;
|
||||
|
||||
init_lexer_from_string(*lexer, result.file.source);
|
||||
lexer.path = result.file.path;
|
||||
token : *Token = scan_next_token(*lexer);
|
||||
while token && token.kind != .TOKEN_EOF {
|
||||
token = scan_next_token(*lexer);
|
||||
}
|
||||
|
||||
array_copy(*result.tokens.tokens, lexer.result.tokens);
|
||||
result.had_error |= lexer.result.had_error;
|
||||
|
||||
// @Incomplete(nb): Temporary until we figure out a good way of passing this stuff around
|
||||
copy_messages(lexer.result.messages, *result.messages);
|
||||
}
|
||||
|
||||
lex :: (lexer : *Lexer, allocator : Allocator = context.allocator) -> Lexing_Result {
|
||||
lexer.result.tokens.allocator = allocator;
|
||||
token : *Token = scan_next_token(lexer);
|
||||
while token && token.kind != .TOKEN_EOF {
|
||||
token = scan_next_token(lexer);
|
||||
}
|
||||
|
||||
return lexer.result;
|
||||
}
|
||||
|
||||
init_lexer_from_string :: (lexer : *Lexer, input : string) {
|
||||
|
||||
Reference in New Issue
Block a user